summaryrefslogtreecommitdiffstats
path: root/third_party/rust/ash/src
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 19:33:14 +0000
commit36d22d82aa202bb199967e9512281e9a53db42c9 (patch)
tree105e8c98ddea1c1e4784a60a5a6410fa416be2de /third_party/rust/ash/src
parentInitial commit. (diff)
downloadfirefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.tar.xz
firefox-esr-36d22d82aa202bb199967e9512281e9a53db42c9.zip
Adding upstream version 115.7.0esr.upstream/115.7.0esr
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/rust/ash/src')
-rw-r--r--third_party/rust/ash/src/device.rs2707
-rw-r--r--third_party/rust/ash/src/entry.rs402
-rw-r--r--third_party/rust/ash/src/extensions/experimental/amd.rs722
-rw-r--r--third_party/rust/ash/src/extensions/experimental/mod.rs1
-rw-r--r--third_party/rust/ash/src/extensions/ext/acquire_drm_display.rs55
-rw-r--r--third_party/rust/ash/src/extensions/ext/buffer_device_address.rs44
-rw-r--r--third_party/rust/ash/src/extensions/ext/calibrated_timestamps.rs72
-rwxr-xr-xthird_party/rust/ash/src/extensions/ext/debug_marker.rs71
-rwxr-xr-xthird_party/rust/ash/src/extensions/ext/debug_report.rs68
-rwxr-xr-xthird_party/rust/ash/src/extensions/ext/debug_utils.rs173
-rw-r--r--third_party/rust/ash/src/extensions/ext/descriptor_buffer.rs211
-rw-r--r--third_party/rust/ash/src/extensions/ext/extended_dynamic_state.rs196
-rw-r--r--third_party/rust/ash/src/extensions/ext/extended_dynamic_state2.rs85
-rw-r--r--third_party/rust/ash/src/extensions/ext/extended_dynamic_state3.rs409
-rw-r--r--third_party/rust/ash/src/extensions/ext/full_screen_exclusive.rs86
-rw-r--r--third_party/rust/ash/src/extensions/ext/headless_surface.rs55
-rw-r--r--third_party/rust/ash/src/extensions/ext/image_compression_control.rs47
-rw-r--r--third_party/rust/ash/src/extensions/ext/image_drm_format_modifier.rs48
-rw-r--r--third_party/rust/ash/src/extensions/ext/mesh_shader.rs94
-rw-r--r--third_party/rust/ash/src/extensions/ext/metal_surface.rs54
-rw-r--r--third_party/rust/ash/src/extensions/ext/mod.rs45
-rw-r--r--third_party/rust/ash/src/extensions/ext/physical_device_drm.rs26
-rw-r--r--third_party/rust/ash/src/extensions/ext/private_data.rs105
-rw-r--r--third_party/rust/ash/src/extensions/ext/sample_locations.rs54
-rw-r--r--third_party/rust/ash/src/extensions/ext/tooling_info.rs40
-rw-r--r--third_party/rust/ash/src/extensions/khr/acceleration_structure.rs322
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/android_surface.rs54
-rw-r--r--third_party/rust/ash/src/extensions/khr/buffer_device_address.rs62
-rw-r--r--third_party/rust/ash/src/extensions/khr/copy_commands2.rs84
-rw-r--r--third_party/rust/ash/src/extensions/khr/create_render_pass2.rs90
-rw-r--r--third_party/rust/ash/src/extensions/khr/deferred_host_operations.rs93
-rw-r--r--third_party/rust/ash/src/extensions/khr/device_group.rs169
-rw-r--r--third_party/rust/ash/src/extensions/khr/device_group_creation.rs66
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/display.rs143
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/display_swapchain.rs57
-rw-r--r--third_party/rust/ash/src/extensions/khr/draw_indirect_count.rs74
-rw-r--r--third_party/rust/ash/src/extensions/khr/dynamic_rendering.rs44
-rw-r--r--third_party/rust/ash/src/extensions/khr/external_fence_fd.rs49
-rw-r--r--third_party/rust/ash/src/extensions/khr/external_fence_win32.rs58
-rw-r--r--third_party/rust/ash/src/extensions/khr/external_memory_fd.rs60
-rw-r--r--third_party/rust/ash/src/extensions/khr/external_memory_win32.rs66
-rw-r--r--third_party/rust/ash/src/extensions/khr/external_semaphore_fd.rs52
-rw-r--r--third_party/rust/ash/src/extensions/khr/external_semaphore_win32.rs58
-rw-r--r--third_party/rust/ash/src/extensions/khr/get_memory_requirements2.rs92
-rw-r--r--third_party/rust/ash/src/extensions/khr/get_physical_device_properties2.rs167
-rw-r--r--third_party/rust/ash/src/extensions/khr/get_surface_capabilities2.rs84
-rw-r--r--third_party/rust/ash/src/extensions/khr/maintenance1.rs45
-rw-r--r--third_party/rust/ash/src/extensions/khr/maintenance3.rs45
-rw-r--r--third_party/rust/ash/src/extensions/khr/maintenance4.rs91
-rw-r--r--third_party/rust/ash/src/extensions/khr/mod.rs75
-rw-r--r--third_party/rust/ash/src/extensions/khr/pipeline_executable_properties.rs84
-rw-r--r--third_party/rust/ash/src/extensions/khr/present_wait.rs47
-rw-r--r--third_party/rust/ash/src/extensions/khr/push_descriptor.rs68
-rw-r--r--third_party/rust/ash/src/extensions/khr/ray_tracing_maintenance1.rs42
-rw-r--r--third_party/rust/ash/src/extensions/khr/ray_tracing_pipeline.rs194
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/surface.rs110
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/swapchain.rs211
-rw-r--r--third_party/rust/ash/src/extensions/khr/synchronization2.rs101
-rw-r--r--third_party/rust/ash/src/extensions/khr/timeline_semaphore.rs60
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/wayland_surface.rs71
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/win32_surface.rs69
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/xcb_surface.rs73
-rwxr-xr-xthird_party/rust/ash/src/extensions/khr/xlib_surface.rs73
-rw-r--r--third_party/rust/ash/src/extensions/mod.rs6
-rwxr-xr-xthird_party/rust/ash/src/extensions/mvk/ios_surface.rs54
-rwxr-xr-xthird_party/rust/ash/src/extensions/mvk/macos_surface.rs54
-rw-r--r--third_party/rust/ash/src/extensions/mvk/mod.rs5
-rw-r--r--third_party/rust/ash/src/extensions/nn/mod.rs3
-rw-r--r--third_party/rust/ash/src/extensions/nn/vi_surface.rs54
-rw-r--r--third_party/rust/ash/src/extensions/nv/coverage_reduction_mode.rs70
-rw-r--r--third_party/rust/ash/src/extensions/nv/device_diagnostic_checkpoints.rs63
-rwxr-xr-xthird_party/rust/ash/src/extensions/nv/mesh_shader.rs81
-rw-r--r--third_party/rust/ash/src/extensions/nv/mod.rs9
-rwxr-xr-xthird_party/rust/ash/src/extensions/nv/ray_tracing.rs271
-rw-r--r--third_party/rust/ash/src/instance.rs535
-rw-r--r--third_party/rust/ash/src/lib.rs197
-rw-r--r--third_party/rust/ash/src/prelude.rs122
-rw-r--r--third_party/rust/ash/src/util.rs139
-rw-r--r--third_party/rust/ash/src/vk.rs52
-rw-r--r--third_party/rust/ash/src/vk/aliases.rs251
-rw-r--r--third_party/rust/ash/src/vk/bitflags.rs1643
-rw-r--r--third_party/rust/ash/src/vk/const_debugs.rs7167
-rw-r--r--third_party/rust/ash/src/vk/constants.rs27
-rw-r--r--third_party/rust/ash/src/vk/definitions.rs73178
-rw-r--r--third_party/rust/ash/src/vk/enums.rs2917
-rw-r--r--third_party/rust/ash/src/vk/extensions.rs25322
-rw-r--r--third_party/rust/ash/src/vk/feature_extensions.rs451
-rw-r--r--third_party/rust/ash/src/vk/features.rs5353
-rw-r--r--third_party/rust/ash/src/vk/macros.rs162
-rw-r--r--third_party/rust/ash/src/vk/native.rs8998
-rw-r--r--third_party/rust/ash/src/vk/platform_types.rs41
-rw-r--r--third_party/rust/ash/src/vk/prelude.rs61
92 files changed, 136559 insertions, 0 deletions
diff --git a/third_party/rust/ash/src/device.rs b/third_party/rust/ash/src/device.rs
new file mode 100644
index 0000000000..60a91ae155
--- /dev/null
+++ b/third_party/rust/ash/src/device.rs
@@ -0,0 +1,2707 @@
+#![allow(clippy::trivially_copy_pass_by_ref)]
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use std::mem;
+use std::os::raw::c_void;
+use std::ptr;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDevice.html>
+#[derive(Clone)]
+pub struct Device {
+ pub(crate) handle: vk::Device,
+
+ pub(crate) device_fn_1_0: vk::DeviceFnV1_0,
+ pub(crate) device_fn_1_1: vk::DeviceFnV1_1,
+ pub(crate) device_fn_1_2: vk::DeviceFnV1_2,
+ pub(crate) device_fn_1_3: vk::DeviceFnV1_3,
+}
+
+impl Device {
+ pub unsafe fn load(instance_fn: &vk::InstanceFnV1_0, device: vk::Device) -> Self {
+ let load_fn = |name: &std::ffi::CStr| {
+ mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr()))
+ };
+
+ Self {
+ handle: device,
+
+ device_fn_1_0: vk::DeviceFnV1_0::load(load_fn),
+ device_fn_1_1: vk::DeviceFnV1_1::load(load_fn),
+ device_fn_1_2: vk::DeviceFnV1_2::load(load_fn),
+ device_fn_1_3: vk::DeviceFnV1_3::load(load_fn),
+ }
+ }
+
+ #[inline]
+ pub fn handle(&self) -> vk::Device {
+ self.handle
+ }
+}
+
+/// Vulkan core 1.3
+#[allow(non_camel_case_types)]
+impl Device {
+ #[inline]
+ pub fn fp_v1_3(&self) -> &vk::DeviceFnV1_3 {
+ &self.device_fn_1_3
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreatePrivateDataSlot.html>
+ #[inline]
+ pub unsafe fn create_private_data_slot(
+ &self,
+ create_info: &vk::PrivateDataSlotCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::PrivateDataSlot> {
+ let mut private_data_slot = mem::zeroed();
+ (self.device_fn_1_3.create_private_data_slot)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut private_data_slot,
+ )
+ .result_with_success(private_data_slot)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyPrivateDataSlot.html>
+ #[inline]
+ pub unsafe fn destroy_private_data_slot(
+ &self,
+ private_data_slot: vk::PrivateDataSlot,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_3.destroy_private_data_slot)(
+ self.handle,
+ private_data_slot,
+ allocation_callbacks.as_raw_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetPrivateData.html>
+ #[inline]
+ pub unsafe fn set_private_data<T: vk::Handle>(
+ &self,
+ object: T,
+ private_data_slot: vk::PrivateDataSlot,
+ data: u64,
+ ) -> VkResult<()> {
+ (self.device_fn_1_3.set_private_data)(
+ self.handle,
+ T::TYPE,
+ object.as_raw(),
+ private_data_slot,
+ data,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPrivateData.html>
+ #[inline]
+ pub unsafe fn get_private_data<T: vk::Handle>(
+ &self,
+ object: T,
+ private_data_slot: vk::PrivateDataSlot,
+ ) -> u64 {
+ let mut data = mem::zeroed();
+ (self.device_fn_1_3.get_private_data)(
+ self.handle,
+ T::TYPE,
+ object.as_raw(),
+ private_data_slot,
+ &mut data,
+ );
+ data
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdPipelineBarrier2.html>
+ #[inline]
+ pub unsafe fn cmd_pipeline_barrier2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ dependency_info: &vk::DependencyInfo,
+ ) {
+ (self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResetEvent2.html>
+ #[inline]
+ pub unsafe fn cmd_reset_event2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ event: vk::Event,
+ stage_mask: vk::PipelineStageFlags2,
+ ) {
+ (self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetEvent2.html>
+ #[inline]
+ pub unsafe fn cmd_set_event2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ event: vk::Event,
+ dependency_info: &vk::DependencyInfo,
+ ) {
+ (self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWaitEvents2.html>
+ #[inline]
+ pub unsafe fn cmd_wait_events2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ events: &[vk::Event],
+ dependency_infos: &[vk::DependencyInfo],
+ ) {
+ assert_eq!(events.len(), dependency_infos.len());
+ (self.device_fn_1_3.cmd_wait_events2)(
+ command_buffer,
+ events.len() as u32,
+ events.as_ptr(),
+ dependency_infos.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWriteTimestamp2.html>
+ #[inline]
+ pub unsafe fn cmd_write_timestamp2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ stage: vk::PipelineStageFlags2,
+ query_pool: vk::QueryPool,
+ query: u32,
+ ) {
+ (self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueSubmit2.html>
+ #[inline]
+ pub unsafe fn queue_submit2(
+ &self,
+ queue: vk::Queue,
+ submits: &[vk::SubmitInfo2],
+ fence: vk::Fence,
+ ) -> VkResult<()> {
+ (self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence)
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyBuffer2.html>
+ #[inline]
+ pub unsafe fn cmd_copy_buffer2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_buffer_info: &vk::CopyBufferInfo2,
+ ) {
+ (self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyImage2.html>
+ #[inline]
+ pub unsafe fn cmd_copy_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_image_info: &vk::CopyImageInfo2,
+ ) {
+ (self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyBufferToImage2.html>
+ #[inline]
+ pub unsafe fn cmd_copy_buffer_to_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2,
+ ) {
+ (self.device_fn_1_3.cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyImageToBuffer2.html>
+ #[inline]
+ pub unsafe fn cmd_copy_image_to_buffer2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2,
+ ) {
+ (self.device_fn_1_3.cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBlitImage2.html>
+ #[inline]
+ pub unsafe fn cmd_blit_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ blit_image_info: &vk::BlitImageInfo2,
+ ) {
+ (self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResolveImage2.html>
+ #[inline]
+ pub unsafe fn cmd_resolve_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ resolve_image_info: &vk::ResolveImageInfo2,
+ ) {
+ (self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginRendering.html>
+ #[inline]
+ pub unsafe fn cmd_begin_rendering(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ rendering_info: &vk::RenderingInfo,
+ ) {
+ (self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndRendering.html>
+ #[inline]
+ pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
+ (self.device_fn_1_3.cmd_end_rendering)(command_buffer)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCullMode.html>
+ #[inline]
+ pub unsafe fn cmd_set_cull_mode(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ cull_mode: vk::CullModeFlags,
+ ) {
+ (self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetFrontFace.html>
+ #[inline]
+ pub unsafe fn cmd_set_front_face(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ front_face: vk::FrontFace,
+ ) {
+ (self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetPrimitiveTopology.html>
+ #[inline]
+ pub unsafe fn cmd_set_primitive_topology(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ primitive_topology: vk::PrimitiveTopology,
+ ) {
+ (self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetViewportWithCount.html>
+ #[inline]
+ pub unsafe fn cmd_set_viewport_with_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ viewports: &[vk::Viewport],
+ ) {
+ (self.device_fn_1_3.cmd_set_viewport_with_count)(
+ command_buffer,
+ viewports.len() as u32,
+ viewports.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetScissorWithCount.html>
+ #[inline]
+ pub unsafe fn cmd_set_scissor_with_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ scissors: &[vk::Rect2D],
+ ) {
+ (self.device_fn_1_3.cmd_set_scissor_with_count)(
+ command_buffer,
+ scissors.len() as u32,
+ scissors.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindVertexBuffers2.html>
+ #[inline]
+ pub unsafe fn cmd_bind_vertex_buffers2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_binding: u32,
+ buffers: &[vk::Buffer],
+ offsets: &[vk::DeviceSize],
+ sizes: Option<&[vk::DeviceSize]>,
+ strides: Option<&[vk::DeviceSize]>,
+ ) {
+ assert_eq!(offsets.len(), buffers.len());
+ let p_sizes = if let Some(sizes) = sizes {
+ assert_eq!(sizes.len(), buffers.len());
+ sizes.as_ptr()
+ } else {
+ ptr::null()
+ };
+ let p_strides = if let Some(strides) = strides {
+ assert_eq!(strides.len(), buffers.len());
+ strides.as_ptr()
+ } else {
+ ptr::null()
+ };
+ (self.device_fn_1_3.cmd_bind_vertex_buffers2)(
+ command_buffer,
+ first_binding,
+ buffers.len() as u32,
+ buffers.as_ptr(),
+ offsets.as_ptr(),
+ p_sizes,
+ p_strides,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthTestEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_test_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_test_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthWriteEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_write_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_write_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_depth_write_enable)(command_buffer, depth_write_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthCompareOp.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_compare_op(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_compare_op: vk::CompareOp,
+ ) {
+ (self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthBoundsTestEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_bounds_test_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_bounds_test_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_depth_bounds_test_enable)(
+ command_buffer,
+ depth_bounds_test_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilTestEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_test_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ stencil_test_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_stencil_test_enable)(command_buffer, stencil_test_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilOp.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_op(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ face_mask: vk::StencilFaceFlags,
+ fail_op: vk::StencilOp,
+ pass_op: vk::StencilOp,
+ depth_fail_op: vk::StencilOp,
+ compare_op: vk::CompareOp,
+ ) {
+ (self.device_fn_1_3.cmd_set_stencil_op)(
+ command_buffer,
+ face_mask,
+ fail_op,
+ pass_op,
+ depth_fail_op,
+ compare_op,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetRasterizerDiscardEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_rasterizer_discard_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ rasterizer_discard_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_rasterizer_discard_enable)(
+ command_buffer,
+ rasterizer_discard_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthBiasEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_bias_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_bias_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetPrimitiveRestartEnable.html>
+ #[inline]
+ pub unsafe fn cmd_set_primitive_restart_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ primitive_restart_enable: bool,
+ ) {
+ (self.device_fn_1_3.cmd_set_primitive_restart_enable)(
+ command_buffer,
+ primitive_restart_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceBufferMemoryRequirements.html>
+ #[inline]
+ pub unsafe fn get_device_buffer_memory_requirements(
+ &self,
+ create_info: &vk::DeviceBufferMemoryRequirements,
+ out: &mut vk::MemoryRequirements2,
+ ) {
+ (self.device_fn_1_3.get_device_buffer_memory_requirements)(self.handle, create_info, out)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceImageMemoryRequirements.html>
+ #[inline]
+ pub unsafe fn get_device_image_memory_requirements(
+ &self,
+ create_info: &vk::DeviceImageMemoryRequirements,
+ out: &mut vk::MemoryRequirements2,
+ ) {
+ (self.device_fn_1_3.get_device_image_memory_requirements)(self.handle, create_info, out)
+ }
+
+ /// Retrieve the number of elements to pass to [`get_device_image_sparse_memory_requirements()`][Self::get_device_image_sparse_memory_requirements()]
+ #[inline]
+ pub unsafe fn get_device_image_sparse_memory_requirements_len(
+ &self,
+ create_info: &vk::DeviceImageMemoryRequirements,
+ ) -> usize {
+ let mut count = 0;
+ (self
+ .device_fn_1_3
+ .get_device_image_sparse_memory_requirements)(
+ self.handle,
+ create_info,
+ &mut count,
+ std::ptr::null_mut(),
+ );
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceImageSparseMemoryRequirements.html>
+ ///
+ /// Call [`get_device_image_sparse_memory_requirements_len()`][Self::get_device_image_sparse_memory_requirements_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_device_image_sparse_memory_requirements(
+ &self,
+ create_info: &vk::DeviceImageMemoryRequirements,
+ out: &mut [vk::SparseImageMemoryRequirements2],
+ ) {
+ let mut count = out.len() as u32;
+ (self
+ .device_fn_1_3
+ .get_device_image_sparse_memory_requirements)(
+ self.handle,
+ create_info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+}
+
+/// Vulkan core 1.2
+#[allow(non_camel_case_types)]
+impl Device {
+ #[inline]
+ pub fn fp_v1_2(&self) -> &vk::DeviceFnV1_2 {
+ &self.device_fn_1_2
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndirectCount.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indirect_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ count_buffer: vk::Buffer,
+ count_buffer_offset: vk::DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+ ) {
+ (self.device_fn_1_2.cmd_draw_indirect_count)(
+ command_buffer,
+ buffer,
+ offset,
+ count_buffer,
+ count_buffer_offset,
+ max_draw_count,
+ stride,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndexedIndirectCount.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indexed_indirect_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ count_buffer: vk::Buffer,
+ count_buffer_offset: vk::DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+ ) {
+ (self.device_fn_1_2.cmd_draw_indexed_indirect_count)(
+ command_buffer,
+ buffer,
+ offset,
+ count_buffer,
+ count_buffer_offset,
+ max_draw_count,
+ stride,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateRenderPass2.html>
+ #[inline]
+ pub unsafe fn create_render_pass2(
+ &self,
+ create_info: &vk::RenderPassCreateInfo2,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::RenderPass> {
+ let mut renderpass = mem::zeroed();
+ (self.device_fn_1_2.create_render_pass2)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut renderpass,
+ )
+ .result_with_success(renderpass)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginRenderPass2.html>
+ #[inline]
+ pub unsafe fn cmd_begin_render_pass2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ render_pass_begin_info: &vk::RenderPassBeginInfo,
+ subpass_begin_info: &vk::SubpassBeginInfo,
+ ) {
+ (self.device_fn_1_2.cmd_begin_render_pass2)(
+ command_buffer,
+ render_pass_begin_info,
+ subpass_begin_info,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdNextSubpass2.html>
+ #[inline]
+ pub unsafe fn cmd_next_subpass2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ subpass_begin_info: &vk::SubpassBeginInfo,
+ subpass_end_info: &vk::SubpassEndInfo,
+ ) {
+ (self.device_fn_1_2.cmd_next_subpass2)(
+ command_buffer,
+ subpass_begin_info,
+ subpass_end_info,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndRenderPass2.html>
+ #[inline]
+ pub unsafe fn cmd_end_render_pass2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ subpass_end_info: &vk::SubpassEndInfo,
+ ) {
+ (self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkResetQueryPool.html>
+ #[inline]
+ pub unsafe fn reset_query_pool(
+ &self,
+ query_pool: vk::QueryPool,
+ first_query: u32,
+ query_count: u32,
+ ) {
+ (self.device_fn_1_2.reset_query_pool)(self.handle(), query_pool, first_query, query_count);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetSemaphoreCounterValue.html>
+ #[inline]
+ pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
+ let mut value = 0;
+ (self.device_fn_1_2.get_semaphore_counter_value)(self.handle(), semaphore, &mut value)
+ .result_with_success(value)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkWaitSemaphores.html>
+ #[inline]
+ pub unsafe fn wait_semaphores(
+ &self,
+ wait_info: &vk::SemaphoreWaitInfo,
+ timeout: u64,
+ ) -> VkResult<()> {
+ (self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSignalSemaphore.html>
+ #[inline]
+ pub unsafe fn signal_semaphore(&self, signal_info: &vk::SemaphoreSignalInfo) -> VkResult<()> {
+ (self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferDeviceAddress.html>
+ #[inline]
+ pub unsafe fn get_buffer_device_address(
+ &self,
+ info: &vk::BufferDeviceAddressInfo,
+ ) -> vk::DeviceAddress {
+ (self.device_fn_1_2.get_buffer_device_address)(self.handle(), info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferOpaqueCaptureAddress.html>
+ #[inline]
+ pub unsafe fn get_buffer_opaque_capture_address(
+ &self,
+ info: &vk::BufferDeviceAddressInfo,
+ ) -> u64 {
+ (self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceMemoryOpaqueCaptureAddress.html>
+ #[inline]
+ pub unsafe fn get_device_memory_opaque_capture_address(
+ &self,
+ info: &vk::DeviceMemoryOpaqueCaptureAddressInfo,
+ ) -> u64 {
+ (self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info)
+ }
+}
+
+/// Vulkan core 1.1
+#[allow(non_camel_case_types)]
+impl Device {
+ #[inline]
+ pub fn fp_v1_1(&self) -> &vk::DeviceFnV1_1 {
+ &self.device_fn_1_1
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBindBufferMemory2.html>
+ #[inline]
+ pub unsafe fn bind_buffer_memory2(
+ &self,
+ bind_infos: &[vk::BindBufferMemoryInfo],
+ ) -> VkResult<()> {
+ (self.device_fn_1_1.bind_buffer_memory2)(
+ self.handle(),
+ bind_infos.len() as _,
+ bind_infos.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBindImageMemory2.html>
+ #[inline]
+ pub unsafe fn bind_image_memory2(
+ &self,
+ bind_infos: &[vk::BindImageMemoryInfo],
+ ) -> VkResult<()> {
+ (self.device_fn_1_1.bind_image_memory2)(
+ self.handle(),
+ bind_infos.len() as _,
+ bind_infos.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupPeerMemoryFeatures.html>
+ #[inline]
+ pub unsafe fn get_device_group_peer_memory_features(
+ &self,
+ heap_index: u32,
+ local_device_index: u32,
+ remote_device_index: u32,
+ ) -> vk::PeerMemoryFeatureFlags {
+ let mut peer_memory_features = mem::zeroed();
+ (self.device_fn_1_1.get_device_group_peer_memory_features)(
+ self.handle(),
+ heap_index,
+ local_device_index,
+ remote_device_index,
+ &mut peer_memory_features,
+ );
+ peer_memory_features
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDeviceMask.html>
+ #[inline]
+ pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
+ (self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDispatchBase.html>
+ #[inline]
+ pub unsafe fn cmd_dispatch_base(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ base_group_x: u32,
+ base_group_y: u32,
+ base_group_z: u32,
+ group_count_x: u32,
+ group_count_y: u32,
+ group_count_z: u32,
+ ) {
+ (self.device_fn_1_1.cmd_dispatch_base)(
+ command_buffer,
+ base_group_x,
+ base_group_y,
+ base_group_z,
+ group_count_x,
+ group_count_y,
+ group_count_z,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageMemoryRequirements2.html>
+ #[inline]
+ pub unsafe fn get_image_memory_requirements2(
+ &self,
+ info: &vk::ImageMemoryRequirementsInfo2,
+ out: &mut vk::MemoryRequirements2,
+ ) {
+ (self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferMemoryRequirements2.html>
+ #[inline]
+ pub unsafe fn get_buffer_memory_requirements2(
+ &self,
+ info: &vk::BufferMemoryRequirementsInfo2,
+ out: &mut vk::MemoryRequirements2,
+ ) {
+ (self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out);
+ }
+
+ /// Retrieve the number of elements to pass to [`get_image_sparse_memory_requirements2()`][Self::get_image_sparse_memory_requirements2()]
+ #[inline]
+ pub unsafe fn get_image_sparse_memory_requirements2_len(
+ &self,
+ info: &vk::ImageSparseMemoryRequirementsInfo2,
+ ) -> usize {
+ let mut count = 0;
+ (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
+ self.handle(),
+ info,
+ &mut count,
+ ptr::null_mut(),
+ );
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageSparseMemoryRequirements2.html>
+ ///
+ /// Call [`get_image_sparse_memory_requirements2_len()`][Self::get_image_sparse_memory_requirements2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_image_sparse_memory_requirements2(
+ &self,
+ info: &vk::ImageSparseMemoryRequirementsInfo2,
+ out: &mut [vk::SparseImageMemoryRequirements2],
+ ) {
+ let mut count = out.len() as u32;
+ (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
+ self.handle(),
+ info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkTrimCommandPool.html>
+ #[inline]
+ pub unsafe fn trim_command_pool(
+ &self,
+ command_pool: vk::CommandPool,
+ flags: vk::CommandPoolTrimFlags,
+ ) {
+ (self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateSamplerYcbcrConversion.html>
+ #[inline]
+ pub unsafe fn create_sampler_ycbcr_conversion(
+ &self,
+ create_info: &vk::SamplerYcbcrConversionCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SamplerYcbcrConversion> {
+ let mut ycbcr_conversion = mem::zeroed();
+ (self.device_fn_1_1.create_sampler_ycbcr_conversion)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut ycbcr_conversion,
+ )
+ .result_with_success(ycbcr_conversion)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroySamplerYcbcrConversion.html>
+ #[inline]
+ pub unsafe fn destroy_sampler_ycbcr_conversion(
+ &self,
+ ycbcr_conversion: vk::SamplerYcbcrConversion,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_1.destroy_sampler_ycbcr_conversion)(
+ self.handle(),
+ ycbcr_conversion,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDescriptorUpdateTemplate.html>
+ #[inline]
+ pub unsafe fn create_descriptor_update_template(
+ &self,
+ create_info: &vk::DescriptorUpdateTemplateCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DescriptorUpdateTemplate> {
+ let mut descriptor_update_template = mem::zeroed();
+ (self.device_fn_1_1.create_descriptor_update_template)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut descriptor_update_template,
+ )
+ .result_with_success(descriptor_update_template)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDescriptorUpdateTemplate.html>
+ #[inline]
+ pub unsafe fn destroy_descriptor_update_template(
+ &self,
+ descriptor_update_template: vk::DescriptorUpdateTemplate,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_1.destroy_descriptor_update_template)(
+ self.handle(),
+ descriptor_update_template,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkUpdateDescriptorSetWithTemplate.html>
+ #[inline]
+ pub unsafe fn update_descriptor_set_with_template(
+ &self,
+ descriptor_set: vk::DescriptorSet,
+ descriptor_update_template: vk::DescriptorUpdateTemplate,
+ data: *const c_void,
+ ) {
+ (self.device_fn_1_1.update_descriptor_set_with_template)(
+ self.handle(),
+ descriptor_set,
+ descriptor_update_template,
+ data,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDescriptorSetLayoutSupport.html>
+ #[inline]
+ pub unsafe fn get_descriptor_set_layout_support(
+ &self,
+ create_info: &vk::DescriptorSetLayoutCreateInfo,
+ out: &mut vk::DescriptorSetLayoutSupport,
+ ) {
+ (self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out);
+ }
+}
+
+/// Vulkan core 1.0
+#[allow(non_camel_case_types)]
+impl Device {
+ #[inline]
+ pub fn fp_v1_0(&self) -> &vk::DeviceFnV1_0 {
+ &self.device_fn_1_0
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDevice.html>
+ #[inline]
+ pub unsafe fn destroy_device(&self, allocation_callbacks: Option<&vk::AllocationCallbacks>) {
+ (self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroySampler.html>
+ #[inline]
+ pub unsafe fn destroy_sampler(
+ &self,
+ sampler: vk::Sampler,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_sampler)(
+ self.handle(),
+ sampler,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkFreeMemory.html>
+ #[inline]
+ pub unsafe fn free_memory(
+ &self,
+ memory: vk::DeviceMemory,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.free_memory)(self.handle(), memory, allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkFreeCommandBuffers.html>
+ #[inline]
+ pub unsafe fn free_command_buffers(
+ &self,
+ command_pool: vk::CommandPool,
+ command_buffers: &[vk::CommandBuffer],
+ ) {
+ (self.device_fn_1_0.free_command_buffers)(
+ self.handle(),
+ command_pool,
+ command_buffers.len() as u32,
+ command_buffers.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateEvent.html>
+ #[inline]
+ pub unsafe fn create_event(
+ &self,
+ create_info: &vk::EventCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Event> {
+ let mut event = mem::zeroed();
+ (self.device_fn_1_0.create_event)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut event,
+ )
+ .result_with_success(event)
+ }
+
+ /// Returns [`true`] if the event was set, and [`false`] if the event was reset, otherwise it will
+ /// return the error code.
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetEventStatus.html>
+ #[inline]
+ pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult<bool> {
+ let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event);
+ match err_code {
+ vk::Result::EVENT_SET => Ok(true),
+ vk::Result::EVENT_RESET => Ok(false),
+ _ => Err(err_code),
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetEvent.html>
+ #[inline]
+ pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> {
+ (self.device_fn_1_0.set_event)(self.handle(), event).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkResetEvent.html>
+ #[inline]
+ pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> {
+ (self.device_fn_1_0.reset_event)(self.handle(), event).result()
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetEvent.html>
+ #[inline]
+ pub unsafe fn cmd_set_event(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ event: vk::Event,
+ stage_mask: vk::PipelineStageFlags,
+ ) {
+ (self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask);
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResetEvent.html>
+ #[inline]
+ pub unsafe fn cmd_reset_event(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ event: vk::Event,
+ stage_mask: vk::PipelineStageFlags,
+ ) {
+ (self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWaitEvents.html>
+ #[inline]
+ pub unsafe fn cmd_wait_events(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ events: &[vk::Event],
+ src_stage_mask: vk::PipelineStageFlags,
+ dst_stage_mask: vk::PipelineStageFlags,
+ memory_barriers: &[vk::MemoryBarrier],
+ buffer_memory_barriers: &[vk::BufferMemoryBarrier],
+ image_memory_barriers: &[vk::ImageMemoryBarrier],
+ ) {
+ (self.device_fn_1_0.cmd_wait_events)(
+ command_buffer,
+ events.len() as _,
+ events.as_ptr(),
+ src_stage_mask,
+ dst_stage_mask,
+ memory_barriers.len() as _,
+ memory_barriers.as_ptr(),
+ buffer_memory_barriers.len() as _,
+ buffer_memory_barriers.as_ptr(),
+ image_memory_barriers.len() as _,
+ image_memory_barriers.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyFence.html>
+ #[inline]
+ pub unsafe fn destroy_fence(
+ &self,
+ fence: vk::Fence,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_fence)(self.handle(), fence, allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyEvent.html>
+ #[inline]
+ pub unsafe fn destroy_event(
+ &self,
+ event: vk::Event,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_event)(self.handle(), event, allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyImage.html>
+ #[inline]
+ pub unsafe fn destroy_image(
+ &self,
+ image: vk::Image,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_image)(self.handle(), image, allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyCommandPool.html>
+ #[inline]
+ pub unsafe fn destroy_command_pool(
+ &self,
+ pool: vk::CommandPool,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_command_pool)(
+ self.handle(),
+ pool,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyImageView.html>
+ #[inline]
+ pub unsafe fn destroy_image_view(
+ &self,
+ image_view: vk::ImageView,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_image_view)(
+ self.handle(),
+ image_view,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyRenderPass.html>
+ #[inline]
+ pub unsafe fn destroy_render_pass(
+ &self,
+ renderpass: vk::RenderPass,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_render_pass)(
+ self.handle(),
+ renderpass,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyFramebuffer.html>
+ #[inline]
+ pub unsafe fn destroy_framebuffer(
+ &self,
+ framebuffer: vk::Framebuffer,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_framebuffer)(
+ self.handle(),
+ framebuffer,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyPipelineLayout.html>
+ #[inline]
+ pub unsafe fn destroy_pipeline_layout(
+ &self,
+ pipeline_layout: vk::PipelineLayout,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_pipeline_layout)(
+ self.handle(),
+ pipeline_layout,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyPipelineCache.html>
+ #[inline]
+ pub unsafe fn destroy_pipeline_cache(
+ &self,
+ pipeline_cache: vk::PipelineCache,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_pipeline_cache)(
+ self.handle(),
+ pipeline_cache,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyBuffer.html>
+ #[inline]
+ pub unsafe fn destroy_buffer(
+ &self,
+ buffer: vk::Buffer,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_buffer)(
+ self.handle(),
+ buffer,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyShaderModule.html>
+ #[inline]
+ pub unsafe fn destroy_shader_module(
+ &self,
+ shader: vk::ShaderModule,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_shader_module)(
+ self.handle(),
+ shader,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyPipeline.html>
+ #[inline]
+ pub unsafe fn destroy_pipeline(
+ &self,
+ pipeline: vk::Pipeline,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_pipeline)(
+ self.handle(),
+ pipeline,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroySemaphore.html>
+ #[inline]
+ pub unsafe fn destroy_semaphore(
+ &self,
+ semaphore: vk::Semaphore,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_semaphore)(
+ self.handle(),
+ semaphore,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDescriptorPool.html>
+ #[inline]
+ pub unsafe fn destroy_descriptor_pool(
+ &self,
+ pool: vk::DescriptorPool,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_descriptor_pool)(
+ self.handle(),
+ pool,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyQueryPool.html>
+ #[inline]
+ pub unsafe fn destroy_query_pool(
+ &self,
+ pool: vk::QueryPool,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_query_pool)(
+ self.handle(),
+ pool,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDescriptorSetLayout.html>
+ #[inline]
+ pub unsafe fn destroy_descriptor_set_layout(
+ &self,
+ layout: vk::DescriptorSetLayout,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_descriptor_set_layout)(
+ self.handle(),
+ layout,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkFreeDescriptorSets.html>
+ #[inline]
+ pub unsafe fn free_descriptor_sets(
+ &self,
+ pool: vk::DescriptorPool,
+ descriptor_sets: &[vk::DescriptorSet],
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.free_descriptor_sets)(
+ self.handle(),
+ pool,
+ descriptor_sets.len() as u32,
+ descriptor_sets.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkUpdateDescriptorSets.html>
+ #[inline]
+ pub unsafe fn update_descriptor_sets(
+ &self,
+ descriptor_writes: &[vk::WriteDescriptorSet],
+ descriptor_copies: &[vk::CopyDescriptorSet],
+ ) {
+ (self.device_fn_1_0.update_descriptor_sets)(
+ self.handle(),
+ descriptor_writes.len() as u32,
+ descriptor_writes.as_ptr(),
+ descriptor_copies.len() as u32,
+ descriptor_copies.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateSampler.html>
+ #[inline]
+ pub unsafe fn create_sampler(
+ &self,
+ create_info: &vk::SamplerCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Sampler> {
+ let mut sampler = mem::zeroed();
+ (self.device_fn_1_0.create_sampler)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut sampler,
+ )
+ .result_with_success(sampler)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBlitImage.html>
+ #[inline]
+ pub unsafe fn cmd_blit_image(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_image: vk::Image,
+ src_image_layout: vk::ImageLayout,
+ dst_image: vk::Image,
+ dst_image_layout: vk::ImageLayout,
+ regions: &[vk::ImageBlit],
+ filter: vk::Filter,
+ ) {
+ (self.device_fn_1_0.cmd_blit_image)(
+ command_buffer,
+ src_image,
+ src_image_layout,
+ dst_image,
+ dst_image_layout,
+ regions.len() as _,
+ regions.as_ptr(),
+ filter,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResolveImage.html>
+ #[inline]
+ pub unsafe fn cmd_resolve_image(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_image: vk::Image,
+ src_image_layout: vk::ImageLayout,
+ dst_image: vk::Image,
+ dst_image_layout: vk::ImageLayout,
+ regions: &[vk::ImageResolve],
+ ) {
+ (self.device_fn_1_0.cmd_resolve_image)(
+ command_buffer,
+ src_image,
+ src_image_layout,
+ dst_image,
+ dst_image_layout,
+ regions.len() as u32,
+ regions.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdFillBuffer.html>
+ #[inline]
+ pub unsafe fn cmd_fill_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ size: vk::DeviceSize,
+ data: u32,
+ ) {
+ (self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdUpdateBuffer.html>
+ #[inline]
+ pub unsafe fn cmd_update_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ data: &[u8],
+ ) {
+ (self.device_fn_1_0.cmd_update_buffer)(
+ command_buffer,
+ buffer,
+ offset,
+ data.len() as u64,
+ data.as_ptr() as _,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyBuffer.html>
+ #[inline]
+ pub unsafe fn cmd_copy_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_buffer: vk::Buffer,
+ dst_buffer: vk::Buffer,
+ regions: &[vk::BufferCopy],
+ ) {
+ (self.device_fn_1_0.cmd_copy_buffer)(
+ command_buffer,
+ src_buffer,
+ dst_buffer,
+ regions.len() as u32,
+ regions.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyImageToBuffer.html>
+ #[inline]
+ pub unsafe fn cmd_copy_image_to_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_image: vk::Image,
+ src_image_layout: vk::ImageLayout,
+ dst_buffer: vk::Buffer,
+ regions: &[vk::BufferImageCopy],
+ ) {
+ (self.device_fn_1_0.cmd_copy_image_to_buffer)(
+ command_buffer,
+ src_image,
+ src_image_layout,
+ dst_buffer,
+ regions.len() as u32,
+ regions.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyBufferToImage.html>
+ #[inline]
+ pub unsafe fn cmd_copy_buffer_to_image(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_buffer: vk::Buffer,
+ dst_image: vk::Image,
+ dst_image_layout: vk::ImageLayout,
+ regions: &[vk::BufferImageCopy],
+ ) {
+ (self.device_fn_1_0.cmd_copy_buffer_to_image)(
+ command_buffer,
+ src_buffer,
+ dst_image,
+ dst_image_layout,
+ regions.len() as u32,
+ regions.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyImage.html>
+ #[inline]
+ pub unsafe fn cmd_copy_image(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_image: vk::Image,
+ src_image_layout: vk::ImageLayout,
+ dst_image: vk::Image,
+ dst_image_layout: vk::ImageLayout,
+ regions: &[vk::ImageCopy],
+ ) {
+ (self.device_fn_1_0.cmd_copy_image)(
+ command_buffer,
+ src_image,
+ src_image_layout,
+ dst_image,
+ dst_image_layout,
+ regions.len() as u32,
+ regions.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAllocateDescriptorSets.html>
+ #[inline]
+ pub unsafe fn allocate_descriptor_sets(
+ &self,
+ create_info: &vk::DescriptorSetAllocateInfo,
+ ) -> VkResult<Vec<vk::DescriptorSet>> {
+ let mut desc_set = Vec::with_capacity(create_info.descriptor_set_count as usize);
+ (self.device_fn_1_0.allocate_descriptor_sets)(
+ self.handle(),
+ create_info,
+ desc_set.as_mut_ptr(),
+ )
+ .result()?;
+
+ desc_set.set_len(create_info.descriptor_set_count as usize);
+ Ok(desc_set)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDescriptorSetLayout.html>
+ #[inline]
+ pub unsafe fn create_descriptor_set_layout(
+ &self,
+ create_info: &vk::DescriptorSetLayoutCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DescriptorSetLayout> {
+ let mut layout = mem::zeroed();
+ (self.device_fn_1_0.create_descriptor_set_layout)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut layout,
+ )
+ .result_with_success(layout)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDeviceWaitIdle.html>
+ #[inline]
+ pub unsafe fn device_wait_idle(&self) -> VkResult<()> {
+ (self.device_fn_1_0.device_wait_idle)(self.handle()).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDescriptorPool.html>
+ #[inline]
+ pub unsafe fn create_descriptor_pool(
+ &self,
+ create_info: &vk::DescriptorPoolCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DescriptorPool> {
+ let mut pool = mem::zeroed();
+ (self.device_fn_1_0.create_descriptor_pool)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut pool,
+ )
+ .result_with_success(pool)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkResetDescriptorPool.html>
+ #[inline]
+ pub unsafe fn reset_descriptor_pool(
+ &self,
+ pool: vk::DescriptorPool,
+ flags: vk::DescriptorPoolResetFlags,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkResetCommandPool.html>
+ #[inline]
+ pub unsafe fn reset_command_pool(
+ &self,
+ command_pool: vk::CommandPool,
+ flags: vk::CommandPoolResetFlags,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkResetCommandBuffer.html>
+ #[inline]
+ pub unsafe fn reset_command_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ flags: vk::CommandBufferResetFlags,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkResetFences.html>
+ #[inline]
+ pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> {
+ (self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr())
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindIndexBuffer.html>
+ #[inline]
+ pub unsafe fn cmd_bind_index_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ index_type: vk::IndexType,
+ ) {
+ (self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdClearColorImage.html>
+ #[inline]
+ pub unsafe fn cmd_clear_color_image(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ image: vk::Image,
+ image_layout: vk::ImageLayout,
+ clear_color_value: &vk::ClearColorValue,
+ ranges: &[vk::ImageSubresourceRange],
+ ) {
+ (self.device_fn_1_0.cmd_clear_color_image)(
+ command_buffer,
+ image,
+ image_layout,
+ clear_color_value,
+ ranges.len() as u32,
+ ranges.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdClearDepthStencilImage.html>
+ #[inline]
+ pub unsafe fn cmd_clear_depth_stencil_image(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ image: vk::Image,
+ image_layout: vk::ImageLayout,
+ clear_depth_stencil_value: &vk::ClearDepthStencilValue,
+ ranges: &[vk::ImageSubresourceRange],
+ ) {
+ (self.device_fn_1_0.cmd_clear_depth_stencil_image)(
+ command_buffer,
+ image,
+ image_layout,
+ clear_depth_stencil_value,
+ ranges.len() as u32,
+ ranges.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdClearAttachments.html>
+ #[inline]
+ pub unsafe fn cmd_clear_attachments(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ attachments: &[vk::ClearAttachment],
+ rects: &[vk::ClearRect],
+ ) {
+ (self.device_fn_1_0.cmd_clear_attachments)(
+ command_buffer,
+ attachments.len() as u32,
+ attachments.as_ptr(),
+ rects.len() as u32,
+ rects.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndexed.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indexed(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ index_count: u32,
+ instance_count: u32,
+ first_index: u32,
+ vertex_offset: i32,
+ first_instance: u32,
+ ) {
+ (self.device_fn_1_0.cmd_draw_indexed)(
+ command_buffer,
+ index_count,
+ instance_count,
+ first_index,
+ vertex_offset,
+ first_instance,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndexedIndirect.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indexed_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ draw_count: u32,
+ stride: u32,
+ ) {
+ (self.device_fn_1_0.cmd_draw_indexed_indirect)(
+ command_buffer,
+ buffer,
+ offset,
+ draw_count,
+ stride,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdExecuteCommands.html>
+ #[inline]
+ pub unsafe fn cmd_execute_commands(
+ &self,
+ primary_command_buffer: vk::CommandBuffer,
+ secondary_command_buffers: &[vk::CommandBuffer],
+ ) {
+ (self.device_fn_1_0.cmd_execute_commands)(
+ primary_command_buffer,
+ secondary_command_buffers.len() as u32,
+ secondary_command_buffers.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindDescriptorSets.html>
+ #[inline]
+ pub unsafe fn cmd_bind_descriptor_sets(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_bind_point: vk::PipelineBindPoint,
+ layout: vk::PipelineLayout,
+ first_set: u32,
+ descriptor_sets: &[vk::DescriptorSet],
+ dynamic_offsets: &[u32],
+ ) {
+ (self.device_fn_1_0.cmd_bind_descriptor_sets)(
+ command_buffer,
+ pipeline_bind_point,
+ layout,
+ first_set,
+ descriptor_sets.len() as u32,
+ descriptor_sets.as_ptr(),
+ dynamic_offsets.len() as u32,
+ dynamic_offsets.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyQueryPoolResults.html>
+ #[inline]
+ pub unsafe fn cmd_copy_query_pool_results(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ query_pool: vk::QueryPool,
+ first_query: u32,
+ query_count: u32,
+ dst_buffer: vk::Buffer,
+ dst_offset: vk::DeviceSize,
+ stride: vk::DeviceSize,
+ flags: vk::QueryResultFlags,
+ ) {
+ (self.device_fn_1_0.cmd_copy_query_pool_results)(
+ command_buffer,
+ query_pool,
+ first_query,
+ query_count,
+ dst_buffer,
+ dst_offset,
+ stride,
+ flags,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdPushConstants.html>
+ #[inline]
+ pub unsafe fn cmd_push_constants(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ layout: vk::PipelineLayout,
+ stage_flags: vk::ShaderStageFlags,
+ offset: u32,
+ constants: &[u8],
+ ) {
+ (self.device_fn_1_0.cmd_push_constants)(
+ command_buffer,
+ layout,
+ stage_flags,
+ offset,
+ constants.len() as _,
+ constants.as_ptr() as _,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginRenderPass.html>
+ #[inline]
+ pub unsafe fn cmd_begin_render_pass(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ create_info: &vk::RenderPassBeginInfo,
+ contents: vk::SubpassContents,
+ ) {
+ (self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, create_info, contents);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdNextSubpass.html>
+ #[inline]
+ pub unsafe fn cmd_next_subpass(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ contents: vk::SubpassContents,
+ ) {
+ (self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindPipeline.html>
+ #[inline]
+ pub unsafe fn cmd_bind_pipeline(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_bind_point: vk::PipelineBindPoint,
+ pipeline: vk::Pipeline,
+ ) {
+ (self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetScissor.html>
+ #[inline]
+ pub unsafe fn cmd_set_scissor(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_scissor: u32,
+ scissors: &[vk::Rect2D],
+ ) {
+ (self.device_fn_1_0.cmd_set_scissor)(
+ command_buffer,
+ first_scissor,
+ scissors.len() as u32,
+ scissors.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetLineWidth.html>
+ #[inline]
+ pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) {
+ (self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindVertexBuffers.html>
+ #[inline]
+ pub unsafe fn cmd_bind_vertex_buffers(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_binding: u32,
+ buffers: &[vk::Buffer],
+ offsets: &[vk::DeviceSize],
+ ) {
+ debug_assert_eq!(buffers.len(), offsets.len());
+ (self.device_fn_1_0.cmd_bind_vertex_buffers)(
+ command_buffer,
+ first_binding,
+ buffers.len() as u32,
+ buffers.as_ptr(),
+ offsets.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndRenderPass.html>
+ #[inline]
+ pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) {
+ (self.device_fn_1_0.cmd_end_render_pass)(command_buffer);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDraw.html>
+ #[inline]
+ pub unsafe fn cmd_draw(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ vertex_count: u32,
+ instance_count: u32,
+ first_vertex: u32,
+ first_instance: u32,
+ ) {
+ (self.device_fn_1_0.cmd_draw)(
+ command_buffer,
+ vertex_count,
+ instance_count,
+ first_vertex,
+ first_instance,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndirect.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ draw_count: u32,
+ stride: u32,
+ ) {
+ (self.device_fn_1_0.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDispatch.html>
+ #[inline]
+ pub unsafe fn cmd_dispatch(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ group_count_x: u32,
+ group_count_y: u32,
+ group_count_z: u32,
+ ) {
+ (self.device_fn_1_0.cmd_dispatch)(
+ command_buffer,
+ group_count_x,
+ group_count_y,
+ group_count_z,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDispatchIndirect.html>
+ #[inline]
+ pub unsafe fn cmd_dispatch_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ ) {
+ (self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetViewport.html>
+ #[inline]
+ pub unsafe fn cmd_set_viewport(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_viewport: u32,
+ viewports: &[vk::Viewport],
+ ) {
+ (self.device_fn_1_0.cmd_set_viewport)(
+ command_buffer,
+ first_viewport,
+ viewports.len() as u32,
+ viewports.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthBias.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_bias(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ constant_factor: f32,
+ clamp: f32,
+ slope_factor: f32,
+ ) {
+ (self.device_fn_1_0.cmd_set_depth_bias)(
+ command_buffer,
+ constant_factor,
+ clamp,
+ slope_factor,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetBlendConstants.html>
+ #[inline]
+ pub unsafe fn cmd_set_blend_constants(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ blend_constants: &[f32; 4],
+ ) {
+ (self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthBounds.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_bounds(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ min_depth_bounds: f32,
+ max_depth_bounds: f32,
+ ) {
+ (self.device_fn_1_0.cmd_set_depth_bounds)(
+ command_buffer,
+ min_depth_bounds,
+ max_depth_bounds,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilCompareMask.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_compare_mask(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ face_mask: vk::StencilFaceFlags,
+ compare_mask: u32,
+ ) {
+ (self.device_fn_1_0.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilWriteMask.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_write_mask(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ face_mask: vk::StencilFaceFlags,
+ write_mask: u32,
+ ) {
+ (self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilReference.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_reference(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ face_mask: vk::StencilFaceFlags,
+ reference: u32,
+ ) {
+ (self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetQueryPoolResults.html>
+ #[inline]
+ pub unsafe fn get_query_pool_results<T>(
+ &self,
+ query_pool: vk::QueryPool,
+ first_query: u32,
+ query_count: u32,
+ data: &mut [T],
+ flags: vk::QueryResultFlags,
+ ) -> VkResult<()> {
+ let data_length = query_count as usize;
+ assert!(
+ data_length <= data.len(),
+ "query_count was higher than the length of the slice"
+ );
+ let data_size = mem::size_of::<T>() * data_length;
+ (self.device_fn_1_0.get_query_pool_results)(
+ self.handle(),
+ query_pool,
+ first_query,
+ query_count,
+ data_size,
+ data.as_mut_ptr().cast(),
+ mem::size_of::<T>() as _,
+ flags,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginQuery.html>
+ #[inline]
+ pub unsafe fn cmd_begin_query(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ query_pool: vk::QueryPool,
+ query: u32,
+ flags: vk::QueryControlFlags,
+ ) {
+ (self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndQuery.html>
+ #[inline]
+ pub unsafe fn cmd_end_query(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ query_pool: vk::QueryPool,
+ query: u32,
+ ) {
+ (self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResetQueryPool.html>
+ #[inline]
+ pub unsafe fn cmd_reset_query_pool(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pool: vk::QueryPool,
+ first_query: u32,
+ query_count: u32,
+ ) {
+ (self.device_fn_1_0.cmd_reset_query_pool)(command_buffer, pool, first_query, query_count);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWriteTimestamp.html>
+ #[inline]
+ pub unsafe fn cmd_write_timestamp(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_stage: vk::PipelineStageFlags,
+ query_pool: vk::QueryPool,
+ query: u32,
+ ) {
+ (self.device_fn_1_0.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateSemaphore.html>
+ #[inline]
+ pub unsafe fn create_semaphore(
+ &self,
+ create_info: &vk::SemaphoreCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Semaphore> {
+ let mut semaphore = mem::zeroed();
+ (self.device_fn_1_0.create_semaphore)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut semaphore,
+ )
+ .result_with_success(semaphore)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateGraphicsPipelines.html>
+ #[inline]
+ pub unsafe fn create_graphics_pipelines(
+ &self,
+ pipeline_cache: vk::PipelineCache,
+ create_infos: &[vk::GraphicsPipelineCreateInfo],
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
+ let mut pipelines = Vec::with_capacity(create_infos.len());
+ let err_code = (self.device_fn_1_0.create_graphics_pipelines)(
+ self.handle(),
+ pipeline_cache,
+ create_infos.len() as u32,
+ create_infos.as_ptr(),
+ allocation_callbacks.as_raw_ptr(),
+ pipelines.as_mut_ptr(),
+ );
+ pipelines.set_len(create_infos.len());
+ match err_code {
+ vk::Result::SUCCESS => Ok(pipelines),
+ _ => Err((pipelines, err_code)),
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateComputePipelines.html>
+ #[inline]
+ pub unsafe fn create_compute_pipelines(
+ &self,
+ pipeline_cache: vk::PipelineCache,
+ create_infos: &[vk::ComputePipelineCreateInfo],
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
+ let mut pipelines = Vec::with_capacity(create_infos.len());
+ let err_code = (self.device_fn_1_0.create_compute_pipelines)(
+ self.handle(),
+ pipeline_cache,
+ create_infos.len() as u32,
+ create_infos.as_ptr(),
+ allocation_callbacks.as_raw_ptr(),
+ pipelines.as_mut_ptr(),
+ );
+ pipelines.set_len(create_infos.len());
+ match err_code {
+ vk::Result::SUCCESS => Ok(pipelines),
+ _ => Err((pipelines, err_code)),
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateBuffer.html>
+ #[inline]
+ pub unsafe fn create_buffer(
+ &self,
+ create_info: &vk::BufferCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Buffer> {
+ let mut buffer = mem::zeroed();
+ (self.device_fn_1_0.create_buffer)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut buffer,
+ )
+ .result_with_success(buffer)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreatePipelineLayout.html>
+ #[inline]
+ pub unsafe fn create_pipeline_layout(
+ &self,
+ create_info: &vk::PipelineLayoutCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::PipelineLayout> {
+ let mut pipeline_layout = mem::zeroed();
+ (self.device_fn_1_0.create_pipeline_layout)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut pipeline_layout,
+ )
+ .result_with_success(pipeline_layout)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreatePipelineCache.html>
+ #[inline]
+ pub unsafe fn create_pipeline_cache(
+ &self,
+ create_info: &vk::PipelineCacheCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::PipelineCache> {
+ let mut pipeline_cache = mem::zeroed();
+ (self.device_fn_1_0.create_pipeline_cache)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut pipeline_cache,
+ )
+ .result_with_success(pipeline_cache)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPipelineCacheData.html>
+ #[inline]
+ pub unsafe fn get_pipeline_cache_data(
+ &self,
+ pipeline_cache: vk::PipelineCache,
+ ) -> VkResult<Vec<u8>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.device_fn_1_0.get_pipeline_cache_data)(
+ self.handle(),
+ pipeline_cache,
+ count,
+ data as _,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkMergePipelineCaches.html>
+ #[inline]
+ pub unsafe fn merge_pipeline_caches(
+ &self,
+ dst_cache: vk::PipelineCache,
+ src_caches: &[vk::PipelineCache],
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.merge_pipeline_caches)(
+ self.handle(),
+ dst_cache,
+ src_caches.len() as u32,
+ src_caches.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkMapMemory.html>
+ #[inline]
+ pub unsafe fn map_memory(
+ &self,
+ memory: vk::DeviceMemory,
+ offset: vk::DeviceSize,
+ size: vk::DeviceSize,
+ flags: vk::MemoryMapFlags,
+ ) -> VkResult<*mut c_void> {
+ let mut data: *mut c_void = ptr::null_mut();
+ (self.device_fn_1_0.map_memory)(self.handle(), memory, offset, size, flags, &mut data)
+ .result_with_success(data)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkUnmapMemory.html>
+ #[inline]
+ pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) {
+ (self.device_fn_1_0.unmap_memory)(self.handle(), memory);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkInvalidateMappedMemoryRanges.html>
+ #[inline]
+ pub unsafe fn invalidate_mapped_memory_ranges(
+ &self,
+ ranges: &[vk::MappedMemoryRange],
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.invalidate_mapped_memory_ranges)(
+ self.handle(),
+ ranges.len() as u32,
+ ranges.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkFlushMappedMemoryRanges.html>
+ #[inline]
+ pub unsafe fn flush_mapped_memory_ranges(
+ &self,
+ ranges: &[vk::MappedMemoryRange],
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.flush_mapped_memory_ranges)(
+ self.handle(),
+ ranges.len() as u32,
+ ranges.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateFramebuffer.html>
+ #[inline]
+ pub unsafe fn create_framebuffer(
+ &self,
+ create_info: &vk::FramebufferCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Framebuffer> {
+ let mut framebuffer = mem::zeroed();
+ (self.device_fn_1_0.create_framebuffer)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut framebuffer,
+ )
+ .result_with_success(framebuffer)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceQueue.html>
+ #[inline]
+ pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue {
+ let mut queue = mem::zeroed();
+ (self.device_fn_1_0.get_device_queue)(
+ self.handle(),
+ queue_family_index,
+ queue_index,
+ &mut queue,
+ );
+ queue
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdPipelineBarrier.html>
+ #[inline]
+ pub unsafe fn cmd_pipeline_barrier(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ src_stage_mask: vk::PipelineStageFlags,
+ dst_stage_mask: vk::PipelineStageFlags,
+ dependency_flags: vk::DependencyFlags,
+ memory_barriers: &[vk::MemoryBarrier],
+ buffer_memory_barriers: &[vk::BufferMemoryBarrier],
+ image_memory_barriers: &[vk::ImageMemoryBarrier],
+ ) {
+ (self.device_fn_1_0.cmd_pipeline_barrier)(
+ command_buffer,
+ src_stage_mask,
+ dst_stage_mask,
+ dependency_flags,
+ memory_barriers.len() as u32,
+ memory_barriers.as_ptr(),
+ buffer_memory_barriers.len() as u32,
+ buffer_memory_barriers.as_ptr(),
+ image_memory_barriers.len() as u32,
+ image_memory_barriers.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateRenderPass.html>
+ #[inline]
+ pub unsafe fn create_render_pass(
+ &self,
+ create_info: &vk::RenderPassCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::RenderPass> {
+ let mut renderpass = mem::zeroed();
+ (self.device_fn_1_0.create_render_pass)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut renderpass,
+ )
+ .result_with_success(renderpass)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBeginCommandBuffer.html>
+ #[inline]
+ pub unsafe fn begin_command_buffer(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ begin_info: &vk::CommandBufferBeginInfo,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEndCommandBuffer.html>
+ #[inline]
+ pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> {
+ (self.device_fn_1_0.end_command_buffer)(command_buffer).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkWaitForFences.html>
+ #[inline]
+ pub unsafe fn wait_for_fences(
+ &self,
+ fences: &[vk::Fence],
+ wait_all: bool,
+ timeout: u64,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.wait_for_fences)(
+ self.handle(),
+ fences.len() as u32,
+ fences.as_ptr(),
+ wait_all as u32,
+ timeout,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetFenceStatus.html>
+ #[inline]
+ pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult<bool> {
+ let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence);
+ match err_code {
+ vk::Result::SUCCESS => Ok(true),
+ vk::Result::NOT_READY => Ok(false),
+ _ => Err(err_code),
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueWaitIdle.html>
+ #[inline]
+ pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> {
+ (self.device_fn_1_0.queue_wait_idle)(queue).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueSubmit.html>
+ #[inline]
+ pub unsafe fn queue_submit(
+ &self,
+ queue: vk::Queue,
+ submits: &[vk::SubmitInfo],
+ fence: vk::Fence,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence)
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueBindSparse.html>
+ #[inline]
+ pub unsafe fn queue_bind_sparse(
+ &self,
+ queue: vk::Queue,
+ bind_info: &[vk::BindSparseInfo],
+ fence: vk::Fence,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.queue_bind_sparse)(
+ queue,
+ bind_info.len() as u32,
+ bind_info.as_ptr(),
+ fence,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateBufferView.html>
+ #[inline]
+ pub unsafe fn create_buffer_view(
+ &self,
+ create_info: &vk::BufferViewCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::BufferView> {
+ let mut buffer_view = mem::zeroed();
+ (self.device_fn_1_0.create_buffer_view)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut buffer_view,
+ )
+ .result_with_success(buffer_view)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyBufferView.html>
+ #[inline]
+ pub unsafe fn destroy_buffer_view(
+ &self,
+ buffer_view: vk::BufferView,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.device_fn_1_0.destroy_buffer_view)(
+ self.handle(),
+ buffer_view,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateImageView.html>
+ #[inline]
+ pub unsafe fn create_image_view(
+ &self,
+ create_info: &vk::ImageViewCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::ImageView> {
+ let mut image_view = mem::zeroed();
+ (self.device_fn_1_0.create_image_view)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut image_view,
+ )
+ .result_with_success(image_view)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAllocateCommandBuffers.html>
+ #[inline]
+ pub unsafe fn allocate_command_buffers(
+ &self,
+ create_info: &vk::CommandBufferAllocateInfo,
+ ) -> VkResult<Vec<vk::CommandBuffer>> {
+ let mut buffers = Vec::with_capacity(create_info.command_buffer_count as usize);
+ (self.device_fn_1_0.allocate_command_buffers)(
+ self.handle(),
+ create_info,
+ buffers.as_mut_ptr(),
+ )
+ .result()?;
+ buffers.set_len(create_info.command_buffer_count as usize);
+ Ok(buffers)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateCommandPool.html>
+ #[inline]
+ pub unsafe fn create_command_pool(
+ &self,
+ create_info: &vk::CommandPoolCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::CommandPool> {
+ let mut pool = mem::zeroed();
+ (self.device_fn_1_0.create_command_pool)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut pool,
+ )
+ .result_with_success(pool)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateQueryPool.html>
+ #[inline]
+ pub unsafe fn create_query_pool(
+ &self,
+ create_info: &vk::QueryPoolCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::QueryPool> {
+ let mut pool = mem::zeroed();
+ (self.device_fn_1_0.create_query_pool)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut pool,
+ )
+ .result_with_success(pool)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateImage.html>
+ #[inline]
+ pub unsafe fn create_image(
+ &self,
+ create_info: &vk::ImageCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Image> {
+ let mut image = mem::zeroed();
+ (self.device_fn_1_0.create_image)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut image,
+ )
+ .result_with_success(image)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageSubresourceLayout.html>
+ #[inline]
+ pub unsafe fn get_image_subresource_layout(
+ &self,
+ image: vk::Image,
+ subresource: vk::ImageSubresource,
+ ) -> vk::SubresourceLayout {
+ let mut layout = mem::zeroed();
+ (self.device_fn_1_0.get_image_subresource_layout)(
+ self.handle(),
+ image,
+ &subresource,
+ &mut layout,
+ );
+ layout
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageMemoryRequirements.html>
+ #[inline]
+ pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements {
+ let mut mem_req = mem::zeroed();
+ (self.device_fn_1_0.get_image_memory_requirements)(self.handle(), image, &mut mem_req);
+ mem_req
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferMemoryRequirements.html>
+ #[inline]
+ pub unsafe fn get_buffer_memory_requirements(
+ &self,
+ buffer: vk::Buffer,
+ ) -> vk::MemoryRequirements {
+ let mut mem_req = mem::zeroed();
+ (self.device_fn_1_0.get_buffer_memory_requirements)(self.handle(), buffer, &mut mem_req);
+ mem_req
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAllocateMemory.html>
+ #[inline]
+ pub unsafe fn allocate_memory(
+ &self,
+ create_info: &vk::MemoryAllocateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DeviceMemory> {
+ let mut memory = mem::zeroed();
+ (self.device_fn_1_0.allocate_memory)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut memory,
+ )
+ .result_with_success(memory)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateShaderModule.html>
+ #[inline]
+ pub unsafe fn create_shader_module(
+ &self,
+ create_info: &vk::ShaderModuleCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::ShaderModule> {
+ let mut shader = mem::zeroed();
+ (self.device_fn_1_0.create_shader_module)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut shader,
+ )
+ .result_with_success(shader)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateFence.html>
+ #[inline]
+ pub unsafe fn create_fence(
+ &self,
+ create_info: &vk::FenceCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::Fence> {
+ let mut fence = mem::zeroed();
+ (self.device_fn_1_0.create_fence)(
+ self.handle(),
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut fence,
+ )
+ .result_with_success(fence)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBindBufferMemory.html>
+ #[inline]
+ pub unsafe fn bind_buffer_memory(
+ &self,
+ buffer: vk::Buffer,
+ device_memory: vk::DeviceMemory,
+ offset: vk::DeviceSize,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset)
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBindImageMemory.html>
+ #[inline]
+ pub unsafe fn bind_image_memory(
+ &self,
+ image: vk::Image,
+ device_memory: vk::DeviceMemory,
+ offset: vk::DeviceSize,
+ ) -> VkResult<()> {
+ (self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetRenderAreaGranularity.html>
+ #[inline]
+ pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D {
+ let mut granularity = mem::zeroed();
+ (self.device_fn_1_0.get_render_area_granularity)(
+ self.handle(),
+ render_pass,
+ &mut granularity,
+ );
+ granularity
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceMemoryCommitment.html>
+ #[inline]
+ pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize {
+ let mut committed_memory_in_bytes = 0;
+ (self.device_fn_1_0.get_device_memory_commitment)(
+ self.handle(),
+ memory,
+ &mut committed_memory_in_bytes,
+ );
+ committed_memory_in_bytes
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageSparseMemoryRequirements.html>
+ #[inline]
+ pub unsafe fn get_image_sparse_memory_requirements(
+ &self,
+ image: vk::Image,
+ ) -> Vec<vk::SparseImageMemoryRequirements> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.device_fn_1_0.get_image_sparse_memory_requirements)(
+ self.handle(),
+ image,
+ count,
+ data,
+ );
+ vk::Result::SUCCESS
+ })
+ // The closure always returns SUCCESS
+ .unwrap()
+ }
+}
diff --git a/third_party/rust/ash/src/entry.rs b/third_party/rust/ash/src/entry.rs
new file mode 100644
index 0000000000..67e6c9a739
--- /dev/null
+++ b/third_party/rust/ash/src/entry.rs
@@ -0,0 +1,402 @@
+use crate::instance::Instance;
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use std::ffi::CStr;
+#[cfg(feature = "loaded")]
+use std::ffi::OsStr;
+use std::mem;
+use std::os::raw::c_char;
+use std::os::raw::c_void;
+use std::ptr;
+#[cfg(feature = "loaded")]
+use std::sync::Arc;
+
+#[cfg(feature = "loaded")]
+use libloading::Library;
+
+/// Holds the Vulkan functions independent of a particular instance
+#[derive(Clone)]
+pub struct Entry {
+ static_fn: vk::StaticFn,
+ entry_fn_1_0: vk::EntryFnV1_0,
+ entry_fn_1_1: vk::EntryFnV1_1,
+ entry_fn_1_2: vk::EntryFnV1_2,
+ entry_fn_1_3: vk::EntryFnV1_3,
+ #[cfg(feature = "loaded")]
+ _lib_guard: Option<Arc<Library>>,
+}
+
+/// Vulkan core 1.0
+#[allow(non_camel_case_types)]
+impl Entry {
+ /// Load default Vulkan library for the current platform
+ ///
+ /// Prefer this over [`linked`](Self::linked) when your application can gracefully handle
+ /// environments that lack Vulkan support, and when the build environment might not have Vulkan
+ /// development packages installed (e.g. the Vulkan SDK, or Ubuntu's `libvulkan-dev`).
+ ///
+ /// # Safety
+ /// `dlopen`ing native libraries is inherently unsafe. The safety guidelines
+ /// for [`Library::new()`] and [`Library::get()`] apply here.
+ ///
+ /// ```no_run
+ /// use ash::{vk, Entry};
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// let entry = unsafe { Entry::load()? };
+ /// let app_info = vk::ApplicationInfo {
+ /// api_version: vk::make_api_version(0, 1, 0, 0),
+ /// ..Default::default()
+ /// };
+ /// let create_info = vk::InstanceCreateInfo {
+ /// p_application_info: &app_info,
+ /// ..Default::default()
+ /// };
+ /// let instance = unsafe { entry.create_instance(&create_info, None)? };
+ /// # Ok(()) }
+ /// ```
+ #[cfg(feature = "loaded")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "loaded")))]
+ pub unsafe fn load() -> Result<Self, LoadingError> {
+ #[cfg(windows)]
+ const LIB_PATH: &str = "vulkan-1.dll";
+
+ #[cfg(all(
+ unix,
+ not(any(target_os = "macos", target_os = "ios", target_os = "android"))
+ ))]
+ const LIB_PATH: &str = "libvulkan.so.1";
+
+ #[cfg(target_os = "android")]
+ const LIB_PATH: &str = "libvulkan.so";
+
+ #[cfg(any(target_os = "macos", target_os = "ios"))]
+ const LIB_PATH: &str = "libvulkan.dylib";
+
+ Self::load_from(LIB_PATH)
+ }
+
+ /// Load entry points from a Vulkan loader linked at compile time
+ ///
+ /// Compared to [`load`](Self::load), this is infallible, but requires that the build
+ /// environment have Vulkan development packages installed (e.g. the Vulkan SDK, or Ubuntu's
+ /// `libvulkan-dev`), and prevents the resulting binary from starting in environments that do not
+ /// support Vulkan.
+ ///
+ /// Note that instance/device functions are still fetched via `vkGetInstanceProcAddr` and
+ /// `vkGetDeviceProcAddr` for maximum performance.
+ ///
+ /// ```no_run
+ /// use ash::{vk, Entry};
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// let entry = Entry::linked();
+ /// let app_info = vk::ApplicationInfo {
+ /// api_version: vk::make_api_version(0, 1, 0, 0),
+ /// ..Default::default()
+ /// };
+ /// let create_info = vk::InstanceCreateInfo {
+ /// p_application_info: &app_info,
+ /// ..Default::default()
+ /// };
+ /// let instance = unsafe { entry.create_instance(&create_info, None)? };
+ /// # Ok(()) }
+ /// ```
+ #[cfg(feature = "linked")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "linked")))]
+ pub fn linked() -> Self {
+ // Sound because we're linking to Vulkan, which provides a vkGetInstanceProcAddr that has
+ // defined behavior in this use.
+ unsafe {
+ Self::from_static_fn(vk::StaticFn {
+ get_instance_proc_addr: vkGetInstanceProcAddr,
+ })
+ }
+ }
+
+ /// Load Vulkan library at `path`
+ ///
+ /// # Safety
+ /// `dlopen`ing native libraries is inherently unsafe. The safety guidelines
+ /// for [`Library::new()`] and [`Library::get()`] apply here.
+ #[cfg(feature = "loaded")]
+ #[cfg_attr(docsrs, doc(cfg(feature = "loaded")))]
+ pub unsafe fn load_from(path: impl AsRef<OsStr>) -> Result<Self, LoadingError> {
+ let lib = Library::new(path)
+ .map_err(LoadingError::LibraryLoadFailure)
+ .map(Arc::new)?;
+
+ let static_fn = vk::StaticFn::load_checked(|name| {
+ lib.get(name.to_bytes_with_nul())
+ .map(|symbol| *symbol)
+ .unwrap_or(ptr::null_mut())
+ })?;
+
+ Ok(Self {
+ _lib_guard: Some(lib),
+ ..Self::from_static_fn(static_fn)
+ })
+ }
+
+ /// Load entry points based on an already-loaded [`vk::StaticFn`]
+ ///
+ /// # Safety
+ /// `static_fn` must contain valid function pointers that comply with the semantics specified by
+ /// Vulkan 1.0, which must remain valid for at least the lifetime of the returned [`Entry`].
+ pub unsafe fn from_static_fn(static_fn: vk::StaticFn) -> Self {
+ let load_fn = |name: &std::ffi::CStr| {
+ mem::transmute((static_fn.get_instance_proc_addr)(
+ vk::Instance::null(),
+ name.as_ptr(),
+ ))
+ };
+ let entry_fn_1_0 = vk::EntryFnV1_0::load(load_fn);
+ let entry_fn_1_1 = vk::EntryFnV1_1::load(load_fn);
+ let entry_fn_1_2 = vk::EntryFnV1_2::load(load_fn);
+ let entry_fn_1_3 = vk::EntryFnV1_3::load(load_fn);
+
+ Self {
+ static_fn,
+ entry_fn_1_0,
+ entry_fn_1_1,
+ entry_fn_1_2,
+ entry_fn_1_3,
+ #[cfg(feature = "loaded")]
+ _lib_guard: None,
+ }
+ }
+
+ #[inline]
+ pub fn fp_v1_0(&self) -> &vk::EntryFnV1_0 {
+ &self.entry_fn_1_0
+ }
+
+ #[inline]
+ pub fn static_fn(&self) -> &vk::StaticFn {
+ &self.static_fn
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceVersion.html>
+ /// ```no_run
+ /// # use ash::{Entry, vk};
+ /// # fn main() -> Result<(), Box<dyn std::error::Error>> {
+ /// let entry = Entry::linked();
+ /// match entry.try_enumerate_instance_version()? {
+ /// // Vulkan 1.1+
+ /// Some(version) => {
+ /// let major = vk::version_major(version);
+ /// let minor = vk::version_minor(version);
+ /// let patch = vk::version_patch(version);
+ /// },
+ /// // Vulkan 1.0
+ /// None => {},
+ /// }
+ /// # Ok(()) }
+ /// ```
+ #[inline]
+ pub fn try_enumerate_instance_version(&self) -> VkResult<Option<u32>> {
+ unsafe {
+ let mut api_version = 0;
+ let enumerate_instance_version: Option<vk::PFN_vkEnumerateInstanceVersion> = {
+ let name = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumerateInstanceVersion\0",
+ );
+ mem::transmute((self.static_fn.get_instance_proc_addr)(
+ vk::Instance::null(),
+ name.as_ptr(),
+ ))
+ };
+ if let Some(enumerate_instance_version) = enumerate_instance_version {
+ (enumerate_instance_version)(&mut api_version)
+ .result_with_success(Some(api_version))
+ } else {
+ Ok(None)
+ }
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateInstance.html>
+ ///
+ /// # Safety
+ /// In order for the created [`Instance`] to be valid for the duration of its
+ /// usage, the [`Entry`](Self) this was called on must be dropped later than the
+ /// resulting [`Instance`].
+ #[inline]
+ pub unsafe fn create_instance(
+ &self,
+ create_info: &vk::InstanceCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<Instance> {
+ let mut instance = mem::zeroed();
+ (self.entry_fn_1_0.create_instance)(
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut instance,
+ )
+ .result()?;
+ Ok(Instance::load(&self.static_fn, instance))
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceLayerProperties.html>
+ #[inline]
+ pub fn enumerate_instance_layer_properties(&self) -> VkResult<Vec<vk::LayerProperties>> {
+ unsafe {
+ read_into_uninitialized_vector(|count, data| {
+ (self.entry_fn_1_0.enumerate_instance_layer_properties)(count, data)
+ })
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html>
+ #[inline]
+ pub fn enumerate_instance_extension_properties(
+ &self,
+ layer_name: Option<&CStr>,
+ ) -> VkResult<Vec<vk::ExtensionProperties>> {
+ unsafe {
+ read_into_uninitialized_vector(|count, data| {
+ (self.entry_fn_1_0.enumerate_instance_extension_properties)(
+ layer_name.map_or(ptr::null(), |str| str.as_ptr()),
+ count,
+ data,
+ )
+ })
+ }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetInstanceProcAddr.html>
+ #[inline]
+ pub unsafe fn get_instance_proc_addr(
+ &self,
+ instance: vk::Instance,
+ p_name: *const c_char,
+ ) -> vk::PFN_vkVoidFunction {
+ (self.static_fn.get_instance_proc_addr)(instance, p_name)
+ }
+}
+
+/// Vulkan core 1.1
+#[allow(non_camel_case_types)]
+impl Entry {
+ #[inline]
+ pub fn fp_v1_1(&self) -> &vk::EntryFnV1_1 {
+ &self.entry_fn_1_1
+ }
+
+ #[deprecated = "This function is unavailable and therefore panics on Vulkan 1.0, please use `try_enumerate_instance_version()` instead"]
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceVersion.html>
+ ///
+ /// Please use [`try_enumerate_instance_version()`][Self::try_enumerate_instance_version()] instead.
+ #[inline]
+ pub fn enumerate_instance_version(&self) -> VkResult<u32> {
+ unsafe {
+ let mut api_version = 0;
+ (self.entry_fn_1_1.enumerate_instance_version)(&mut api_version)
+ .result_with_success(api_version)
+ }
+ }
+}
+
+/// Vulkan core 1.2
+#[allow(non_camel_case_types)]
+impl Entry {
+ #[inline]
+ pub fn fp_v1_2(&self) -> &vk::EntryFnV1_2 {
+ &self.entry_fn_1_2
+ }
+}
+
+/// Vulkan core 1.3
+#[allow(non_camel_case_types)]
+impl Entry {
+ #[inline]
+ pub fn fp_v1_3(&self) -> &vk::EntryFnV1_3 {
+ &self.entry_fn_1_3
+ }
+}
+
+#[cfg(feature = "linked")]
+#[cfg_attr(docsrs, doc(cfg(feature = "linked")))]
+impl Default for Entry {
+ #[inline]
+ fn default() -> Self {
+ Self::linked()
+ }
+}
+
+impl vk::StaticFn {
+ pub fn load_checked<F>(mut _f: F) -> Result<Self, MissingEntryPoint>
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ // TODO: Make this a &'static CStr once CStr::from_bytes_with_nul_unchecked is const
+ static ENTRY_POINT: &[u8] = b"vkGetInstanceProcAddr\0";
+
+ Ok(Self {
+ get_instance_proc_addr: unsafe {
+ let cname = CStr::from_bytes_with_nul_unchecked(ENTRY_POINT);
+ let val = _f(cname);
+ if val.is_null() {
+ return Err(MissingEntryPoint);
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ })
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct MissingEntryPoint;
+impl std::fmt::Display for MissingEntryPoint {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {
+ write!(f, "Cannot load `vkGetInstanceProcAddr` symbol from library")
+ }
+}
+impl std::error::Error for MissingEntryPoint {}
+
+#[cfg(feature = "linked")]
+extern "system" {
+ fn vkGetInstanceProcAddr(instance: vk::Instance, name: *const c_char)
+ -> vk::PFN_vkVoidFunction;
+}
+
+#[cfg(feature = "loaded")]
+mod loaded {
+ use std::error::Error;
+ use std::fmt;
+
+ use super::*;
+
+ #[derive(Debug)]
+ #[cfg_attr(docsrs, doc(cfg(feature = "loaded")))]
+ pub enum LoadingError {
+ LibraryLoadFailure(libloading::Error),
+ MissingEntryPoint(MissingEntryPoint),
+ }
+
+ impl fmt::Display for LoadingError {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Self::LibraryLoadFailure(err) => fmt::Display::fmt(err, f),
+ Self::MissingEntryPoint(err) => fmt::Display::fmt(err, f),
+ }
+ }
+ }
+
+ impl Error for LoadingError {
+ fn source(&self) -> Option<&(dyn Error + 'static)> {
+ Some(match self {
+ Self::LibraryLoadFailure(err) => err,
+ Self::MissingEntryPoint(err) => err,
+ })
+ }
+ }
+
+ impl From<MissingEntryPoint> for LoadingError {
+ fn from(err: MissingEntryPoint) -> Self {
+ Self::MissingEntryPoint(err)
+ }
+ }
+}
+#[cfg(feature = "loaded")]
+pub use self::loaded::*;
diff --git a/third_party/rust/ash/src/extensions/experimental/amd.rs b/third_party/rust/ash/src/extensions/experimental/amd.rs
new file mode 100644
index 0000000000..35b4289f16
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/experimental/amd.rs
@@ -0,0 +1,722 @@
+#![allow(clippy::unreadable_literal)]
+
+/*
+ ***********************************************************************************************************************
+ *
+ * Copyright (c) 2014-2019 Advanced Micro Devices, Inc. All Rights Reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a copy
+ * of this software and associated documentation files (the "Software"), to deal
+ * in the Software without restriction, including without limitation the rights
+ * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ * copies of the Software, and to permit persons to whom the Software is
+ * furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in all
+ * copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ *
+ **********************************************************************************************************************/
+
+#[cfg(feature = "debug")]
+use crate::prelude::debug_flags;
+use crate::vk::*;
+
+use std::fmt;
+use std::os::raw::*;
+
+// Extension: `VK_AMD_gpa_interface`
+
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct GpaSqShaderStageFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(GpaSqShaderStageFlags, Flags);
+#[cfg(feature = "debug")]
+impl fmt::Debug for GpaSqShaderStageFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (GpaSqShaderStageFlags::PS.0, "PS"),
+ (GpaSqShaderStageFlags::VS.0, "VS"),
+ (GpaSqShaderStageFlags::GS.0, "GS"),
+ (GpaSqShaderStageFlags::ES.0, "ES"),
+ (GpaSqShaderStageFlags::HS.0, "HS"),
+ (GpaSqShaderStageFlags::LS.0, "LS"),
+ (GpaSqShaderStageFlags::CS.0, "CS"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl GpaSqShaderStageFlags {
+ pub const PS: Self = Self(0b1);
+ pub const VS: Self = Self(0b10);
+ pub const GS: Self = Self(0b100);
+ pub const ES: Self = Self(0b1000);
+ pub const HS: Self = Self(0b10000);
+ pub const LS: Self = Self(0b100000);
+ pub const CS: Self = Self(0b1000000);
+}
+
+impl StructureType {
+ pub const PHYSICAL_DEVICE_GPA_FEATURES_AMD: Self = Self(1000133000);
+ pub const PHYSICAL_DEVICE_GPA_PROPERTIES_AMD: Self = Self(1000133001);
+ pub const GPA_SAMPLE_BEGIN_INFO_AMD: Self = Self(1000133002);
+ pub const GPA_SESSION_CREATE_INFO_AMD: Self = Self(1000133003);
+ pub const GPA_DEVICE_CLOCK_MODE_INFO_AMD: Self = Self(1000133004);
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[repr(transparent)]
+pub struct GpaDeviceClockModeAmd(pub(crate) i32);
+impl GpaDeviceClockModeAmd {
+ pub fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ pub fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl GpaDeviceClockModeAmd {
+ pub const DEFAULT: Self = Self(0);
+ pub const QUERY: Self = Self(1);
+ pub const PROFILING: Self = Self(2);
+ pub const MIN_MEMORY: Self = Self(3);
+ pub const MIN_ENGINE: Self = Self(4);
+ pub const PEAK: Self = Self(5);
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[repr(transparent)]
+pub struct GpaPerfBlockAmd(pub(crate) i32);
+impl GpaPerfBlockAmd {
+ pub fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ pub fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl GpaPerfBlockAmd {
+ pub const CPF: Self = Self(0);
+ pub const IA: Self = Self(1);
+ pub const VGT: Self = Self(2);
+ pub const PA: Self = Self(3);
+ pub const SC: Self = Self(4);
+ pub const SPI: Self = Self(5);
+ pub const SQ: Self = Self(6);
+ pub const SX: Self = Self(7);
+ pub const TA: Self = Self(8);
+ pub const TD: Self = Self(9);
+ pub const TCP: Self = Self(10);
+ pub const TCC: Self = Self(11);
+ pub const TCA: Self = Self(12);
+ pub const DB: Self = Self(13);
+ pub const CB: Self = Self(14);
+ pub const GDS: Self = Self(15);
+ pub const SRBM: Self = Self(16);
+ pub const GRBM: Self = Self(17);
+ pub const GRBM_SE: Self = Self(18);
+ pub const RLC: Self = Self(19);
+ pub const DMA: Self = Self(20);
+ pub const MC: Self = Self(21);
+ pub const CPG: Self = Self(22);
+ pub const CPC: Self = Self(23);
+ pub const WD: Self = Self(24);
+ pub const TCS: Self = Self(25);
+ pub const ATC: Self = Self(26);
+ pub const ATC_L2: Self = Self(27);
+ pub const MC_VM_L2: Self = Self(28);
+ pub const EA: Self = Self(29);
+ pub const RPB: Self = Self(30);
+ pub const RMI: Self = Self(31);
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[repr(transparent)]
+pub struct GpaSampleTypeAmd(pub(crate) i32);
+impl GpaSampleTypeAmd {
+ pub fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ pub fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl GpaSampleTypeAmd {
+ pub const CUMULATIVE: Self = Self(0);
+ pub const TRACE: Self = Self(1);
+ pub const TIMING: Self = Self(2);
+}
+
+handle_nondispatchable!(GpaSessionAmd, UNKNOWN);
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct GpaSessionCreateInfoAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub secondary_copy_source: GpaSessionAmd,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct GpaPerfBlockPropertiesAmd {
+ pub block_type: GpaPerfBlockAmd,
+ pub flags: Flags,
+ pub instance_count: u32,
+ pub max_event_id: u32,
+ pub max_global_only_counters: u32,
+ pub max_global_shared_counters: u32,
+ pub max_streaming_counters: u32,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct PhysicalDeviceGpaFeaturesAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub perf_counters: Bool32,
+ pub streaming_perf_counters: Bool32,
+ pub sq_thread_tracing: Bool32,
+ pub clock_modes: Bool32,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct PhysicalDeviceGpaPropertiesAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: Flags,
+ pub max_sqtt_se_buffer_size: DeviceSize,
+ pub shader_engine_count: u32,
+ pub perf_block_count: u32,
+ pub p_perf_block_properties: *mut GpaPerfBlockPropertiesAmd,
+}
+
+impl ::std::default::Default for PhysicalDeviceGpaPropertiesAmd {
+ fn default() -> Self {
+ Self {
+ s_type: StructureType::PHYSICAL_DEVICE_GPA_PROPERTIES_AMD,
+ p_next: ::std::ptr::null_mut(),
+ flags: Flags::default(),
+ max_sqtt_se_buffer_size: DeviceSize::default(),
+ shader_engine_count: u32::default(),
+ perf_block_count: u32::default(),
+ p_perf_block_properties: ::std::ptr::null_mut(),
+ }
+ }
+}
+impl PhysicalDeviceGpaPropertiesAmd {
+ pub fn builder<'a>() -> PhysicalDeviceGpaPropertiesAmdBuilder<'a> {
+ PhysicalDeviceGpaPropertiesAmdBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+pub struct PhysicalDeviceGpaPropertiesAmdBuilder<'a> {
+ inner: PhysicalDeviceGpaPropertiesAmd,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceGpaPropertiesAmd {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceGpaPropertiesAmd {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceGpaPropertiesAmdBuilder<'a> {
+ type Target = PhysicalDeviceGpaPropertiesAmd;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> PhysicalDeviceGpaPropertiesAmdBuilder<'a> {
+ pub fn push_next<T>(mut self, next: &'a mut T) -> PhysicalDeviceGpaPropertiesAmdBuilder<'a>
+ where
+ T: ExtendsPhysicalDeviceGpaPropertiesAmd,
+ {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ pub fn build(self) -> PhysicalDeviceGpaPropertiesAmd {
+ self.inner
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct GpaPerfCounterAmd {
+ pub block_type: GpaPerfBlockAmd,
+ pub block_instance: u32,
+ pub event_id: u32,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct GpaSampleBeginInfoAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub sample_type: GpaSampleTypeAmd,
+ pub sample_internal_operations: Bool32,
+ pub cache_flush_on_counter_collection: Bool32,
+ pub sq_shader_mask_enable: Bool32,
+ pub sq_shader_mask: GpaSqShaderStageFlags,
+ pub perf_counter_count: u32,
+ pub p_perf_counters: *const GpaPerfCounterAmd,
+ pub streaming_perf_trace_sample_interval: u32,
+ pub perf_counter_device_memory_limit: DeviceSize,
+ pub sq_thread_trace_enable: Bool32,
+ pub sq_thread_trace_suppress_instruction_tokens: Bool32,
+ pub sq_thread_trace_device_memory_limit: DeviceSize,
+ pub timing_pre_sample: PipelineStageFlags,
+ pub timing_post_sample: PipelineStageFlags,
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct GpaDeviceClockModeInfoAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub clock_mode: GpaDeviceClockModeAmd,
+ pub memory_clock_ratio_to_peak: f32,
+ pub engine_clock_ratio_to_peak: f32,
+}
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateGpaSessionAMD = extern "system" fn(
+ device: Device,
+ p_create_info: *const GpaSessionCreateInfoAmd,
+ p_allocator: *const AllocationCallbacks,
+ p_gpa_session: *mut GpaSessionAmd,
+) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyGpaSessionAMD = extern "system" fn(
+ device: Device,
+ gpa_session: GpaSessionAmd,
+ p_allocator: *const AllocationCallbacks,
+) -> c_void;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetGpaDeviceClockModeAMD =
+ extern "system" fn(device: Device, p_info: *mut GpaDeviceClockModeInfoAmd) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginGpaSessionAMD =
+ extern "system" fn(commandBuffer: CommandBuffer, gpa_session: GpaSessionAmd) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndGpaSessionAMD =
+ extern "system" fn(commandBuffer: CommandBuffer, gpa_session: GpaSessionAmd) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginGpaSampleAMD = extern "system" fn(
+ commandBuffer: CommandBuffer,
+ gpa_session: GpaSessionAmd,
+ p_gpa_sample_begin_info: *const GpaSampleBeginInfoAmd,
+ p_sample_id: *mut u32,
+) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndGpaSampleAMD = extern "system" fn(
+ commandBuffer: CommandBuffer,
+ gpa_session: GpaSessionAmd,
+ sample_id: u32,
+) -> c_void;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetGpaSessionStatusAMD =
+ extern "system" fn(device: Device, gpaSession: GpaSessionAmd) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetGpaSessionResultsAMD = extern "system" fn(
+ device: Device,
+ gpaSession: GpaSessionAmd,
+ sample_id: u32,
+ p_size_in_bytes: *mut usize,
+ p_data: *mut c_void,
+) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetGpaSessionAMD =
+ extern "system" fn(device: Device, gpaSession: GpaSessionAmd) -> Result;
+
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyGpaSessionResultsAMD =
+ extern "system" fn(commandBuffer: CommandBuffer, gpaSession: GpaSessionAmd) -> c_void;
+
+pub struct AmdGpaInterfaceFn {
+ pub create_gpa_session: PFN_vkCreateGpaSessionAMD,
+ pub destroy_gpa_session: PFN_vkDestroyGpaSessionAMD,
+ pub set_gpa_device_clock_mode: PFN_vkSetGpaDeviceClockModeAMD,
+ pub cmd_begin_gpa_session: PFN_vkCmdBeginGpaSessionAMD,
+ pub cmd_end_gpa_session: PFN_vkCmdEndGpaSessionAMD,
+ pub cmd_begin_gpa_sample: PFN_vkCmdBeginGpaSampleAMD,
+ pub cmd_end_gpa_sample: PFN_vkCmdEndGpaSampleAMD,
+ pub get_gpa_session_status: PFN_vkGetGpaSessionStatusAMD,
+ pub get_gpa_session_results: PFN_vkGetGpaSessionResultsAMD,
+ pub reset_gpa_session: PFN_vkResetGpaSessionAMD,
+ pub cmd_copy_gpa_session_results: PFN_vkCmdCopyGpaSessionResultsAMD,
+}
+unsafe impl Send for AmdGpaInterfaceFn {}
+unsafe impl Sync for AmdGpaInterfaceFn {}
+
+impl ::std::clone::Clone for AmdGpaInterfaceFn {
+ fn clone(&self) -> Self {
+ Self {
+ create_gpa_session: self.create_gpa_session,
+ destroy_gpa_session: self.destroy_gpa_session,
+ set_gpa_device_clock_mode: self.set_gpa_device_clock_mode,
+ cmd_begin_gpa_session: self.cmd_begin_gpa_session,
+ cmd_end_gpa_session: self.cmd_end_gpa_session,
+ cmd_begin_gpa_sample: self.cmd_begin_gpa_sample,
+ cmd_end_gpa_sample: self.cmd_end_gpa_sample,
+ get_gpa_session_status: self.get_gpa_session_status,
+ get_gpa_session_results: self.get_gpa_session_results,
+ reset_gpa_session: self.reset_gpa_session,
+ cmd_copy_gpa_session_results: self.cmd_copy_gpa_session_results,
+ }
+ }
+}
+
+impl AmdGpaInterfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_gpa_session: unsafe {
+ extern "system" fn create_gpa_session_amd(
+ _device: Device,
+ _p_create_info: *const GpaSessionCreateInfoAmd,
+ _p_allocator: *const AllocationCallbacks,
+ _p_gpa_session: *mut GpaSessionAmd,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_gpa_session_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateGpaSessionAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_gpa_session_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_gpa_session: unsafe {
+ extern "system" fn destroy_gpa_session_amd(
+ _device: Device,
+ _gpa_session: GpaSessionAmd,
+ _p_allocator: *const AllocationCallbacks,
+ ) -> c_void {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_gpa_session_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyGpaSessionAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_gpa_session_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_gpa_device_clock_mode: unsafe {
+ extern "system" fn set_gpa_device_clock_mode_amd(
+ _device: Device,
+ _p_info: *mut GpaDeviceClockModeInfoAmd,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_gpa_device_clock_mode_amd)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSetGpaDeviceClockModeAMD\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ set_gpa_device_clock_mode_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_gpa_session: unsafe {
+ extern "system" fn cmd_begin_gpa_session_amd(
+ _command_buffer: CommandBuffer,
+ _gpa_session: GpaSessionAmd,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_gpa_session_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginGpaSessionAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_gpa_session_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_gpa_session: unsafe {
+ extern "system" fn cmd_end_gpa_session_amd(
+ _command_buffer: CommandBuffer,
+ _gpa_session: GpaSessionAmd,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_gpa_session_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndGpaSessionAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_gpa_session_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_gpa_sample: unsafe {
+ extern "system" fn cmd_begin_gpa_sample_amd(
+ _command_buffer: CommandBuffer,
+ _gpa_session: GpaSessionAmd,
+ _p_gpa_sample_begin_info: *const GpaSampleBeginInfoAmd,
+ _p_sample_id: *mut u32,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_gpa_sample_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginGpaSampleAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_gpa_sample_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_gpa_sample: unsafe {
+ extern "system" fn cmd_end_gpa_sample_amd(
+ _command_buffer: CommandBuffer,
+ _gpa_session: GpaSessionAmd,
+ _sample_id: u32,
+ ) -> c_void {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_gpa_sample_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndGpaSampleAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_gpa_sample_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_gpa_session_status: unsafe {
+ extern "system" fn get_gpa_session_status_amd(
+ _device: Device,
+ _gpa_session: GpaSessionAmd,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_gpa_session_status_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetGpaSessionStatusAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_gpa_session_status_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_gpa_session_results: unsafe {
+ extern "system" fn get_gpa_session_results_amd(
+ _device: Device,
+ _gpa_session: GpaSessionAmd,
+ _sample_id: u32,
+ _p_size_in_bytes: *mut usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_gpa_session_results_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetGpaSessionResultsAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_gpa_session_results_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_gpa_session: unsafe {
+ extern "system" fn reset_gpa_session_amd(
+ _device: Device,
+ _gpa_session: GpaSessionAmd,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(reset_gpa_session_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndGpaSampleAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_gpa_session_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_gpa_session_results: unsafe {
+ extern "system" fn cmd_copy_gpa_session_results_amd(
+ _command_buffer: CommandBuffer,
+ _gpa_session: GpaSessionAmd,
+ ) -> c_void {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_gpa_session_results_amd)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyGpaSessionResultsAMD\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_gpa_session_results_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+ pub unsafe fn create_gpa_session(
+ &self,
+ device: Device,
+ create_info: *const GpaSessionCreateInfoAmd,
+ allocator: *const AllocationCallbacks,
+ gpa_session: *mut GpaSessionAmd,
+ ) -> Result {
+ (self.create_gpa_session)(device, create_info, allocator, gpa_session)
+ }
+ pub unsafe fn destroy_gpa_session(
+ &self,
+ device: Device,
+ gpa_session: GpaSessionAmd,
+ allocator: *const AllocationCallbacks,
+ ) -> c_void {
+ (self.destroy_gpa_session)(device, gpa_session, allocator)
+ }
+}
+
+// Extension: `VK_AMD_wave_limits`
+
+impl StructureType {
+ pub const WAVE_LIMIT_AMD: Self = Self(1000045000);
+ pub const PHYSICAL_DEVICE_WAVE_LIMIT_PROPERTIES_AMD: Self = Self(1000045001);
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct PhysicalDeviceWaveLimitPropertiesAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub cu_count: u32,
+ pub max_waves_per_cu: u32,
+}
+
+impl ::std::default::Default for PhysicalDeviceWaveLimitPropertiesAmd {
+ fn default() -> Self {
+ Self {
+ s_type: StructureType::PHYSICAL_DEVICE_WAVE_LIMIT_PROPERTIES_AMD,
+ p_next: ::std::ptr::null_mut(),
+ cu_count: u32::default(),
+ max_waves_per_cu: u32::default(),
+ }
+ }
+}
+impl PhysicalDeviceWaveLimitPropertiesAmd {
+ pub fn builder<'a>() -> PhysicalDeviceWaveLimitPropertiesAmdBuilder<'a> {
+ PhysicalDeviceWaveLimitPropertiesAmdBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+pub struct PhysicalDeviceWaveLimitPropertiesAmdBuilder<'a> {
+ inner: PhysicalDeviceWaveLimitPropertiesAmd,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceWaveLimitPropertiesAmd {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceWaveLimitPropertiesAmd {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceWaveLimitPropertiesAmdBuilder<'a> {
+ type Target = PhysicalDeviceWaveLimitPropertiesAmd;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> PhysicalDeviceWaveLimitPropertiesAmdBuilder<'a> {
+ pub fn push_next<T>(
+ mut self,
+ next: &'a mut T,
+ ) -> PhysicalDeviceWaveLimitPropertiesAmdBuilder<'a>
+ where
+ T: ExtendsPhysicalDeviceWaveLimitPropertiesAmd,
+ {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ pub fn build(self) -> PhysicalDeviceWaveLimitPropertiesAmd {
+ self.inner
+ }
+}
+
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+pub struct PipelineShaderStageCreateInfoWaveLimitAmd {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub waves_per_cu: f32,
+ pub cu_enable_mask: *mut u32,
+}
diff --git a/third_party/rust/ash/src/extensions/experimental/mod.rs b/third_party/rust/ash/src/extensions/experimental/mod.rs
new file mode 100644
index 0000000000..49e51f80c3
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/experimental/mod.rs
@@ -0,0 +1 @@
+pub mod amd;
diff --git a/third_party/rust/ash/src/extensions/ext/acquire_drm_display.rs b/third_party/rust/ash/src/extensions/ext/acquire_drm_display.rs
new file mode 100644
index 0000000000..8c80e506e0
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/acquire_drm_display.rs
@@ -0,0 +1,55 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_acquire_drm_display.html>
+#[derive(Clone)]
+pub struct AcquireDrmDisplay {
+ fp: vk::ExtAcquireDrmDisplayFn,
+}
+
+impl AcquireDrmDisplay {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::ExtAcquireDrmDisplayFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAcquireDrmDisplayEXT.html>
+ #[inline]
+ pub unsafe fn acquire_drm_display(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ drm_fd: i32,
+ display: vk::DisplayKHR,
+ ) -> VkResult<()> {
+ (self.fp.acquire_drm_display_ext)(physical_device, drm_fd, display).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDrmDisplayEXT.html>
+ #[inline]
+ pub unsafe fn get_drm_display(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ drm_fd: i32,
+ connector_id: u32,
+ ) -> VkResult<vk::DisplayKHR> {
+ let mut display = mem::MaybeUninit::uninit();
+ (self.fp.get_drm_display_ext)(physical_device, drm_fd, connector_id, display.as_mut_ptr())
+ .assume_init_on_success(display)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtAcquireDrmDisplayFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtAcquireDrmDisplayFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/buffer_device_address.rs b/third_party/rust/ash/src/extensions/ext/buffer_device_address.rs
new file mode 100644
index 0000000000..a1d304e639
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/buffer_device_address.rs
@@ -0,0 +1,44 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct BufferDeviceAddress {
+ handle: vk::Device,
+ fp: vk::ExtBufferDeviceAddressFn,
+}
+
+impl BufferDeviceAddress {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtBufferDeviceAddressFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferDeviceAddressEXT.html>
+ #[inline]
+ pub unsafe fn get_buffer_device_address(
+ &self,
+ info: &vk::BufferDeviceAddressInfoEXT,
+ ) -> vk::DeviceAddress {
+ (self.fp.get_buffer_device_address_ext)(self.handle, info)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtBufferDeviceAddressFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtBufferDeviceAddressFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/calibrated_timestamps.rs b/third_party/rust/ash/src/extensions/ext/calibrated_timestamps.rs
new file mode 100644
index 0000000000..e102fca15c
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/calibrated_timestamps.rs
@@ -0,0 +1,72 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct CalibratedTimestamps {
+ handle: vk::Instance,
+ fp: vk::ExtCalibratedTimestampsFn,
+}
+
+impl CalibratedTimestamps {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::ExtCalibratedTimestampsFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceCalibrateableTimeDomainsEXT.html>
+ #[inline]
+ pub unsafe fn get_physical_device_calibrateable_time_domains(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> VkResult<Vec<vk::TimeDomainEXT>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_calibrateable_time_domains_ext)(
+ physical_device,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetCalibratedTimestampsEXT.html>
+ ///
+ /// Returns a tuple containing `(timestamps, max_deviation)`
+ #[inline]
+ pub unsafe fn get_calibrated_timestamps(
+ &self,
+ device: vk::Device,
+ info: &[vk::CalibratedTimestampInfoEXT],
+ ) -> VkResult<(Vec<u64>, Vec<u64>)> {
+ let mut timestamps = vec![0u64; info.len()];
+ let mut max_deviation = vec![0u64; info.len()];
+ (self.fp.get_calibrated_timestamps_ext)(
+ device,
+ info.len() as u32,
+ info.as_ptr(),
+ timestamps.as_mut_ptr(),
+ max_deviation.as_mut_ptr(),
+ )
+ .result_with_success((timestamps, max_deviation))
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtCalibratedTimestampsFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtCalibratedTimestampsFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/debug_marker.rs b/third_party/rust/ash/src/extensions/ext/debug_marker.rs
new file mode 100755
index 0000000000..1c21a47fd9
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/debug_marker.rs
@@ -0,0 +1,71 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DebugMarker {
+ handle: vk::Device,
+ fp: vk::ExtDebugMarkerFn,
+}
+
+impl DebugMarker {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtDebugMarkerFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDebugMarkerSetObjectNameEXT.html>
+ #[inline]
+ pub unsafe fn debug_marker_set_object_name(
+ &self,
+ name_info: &vk::DebugMarkerObjectNameInfoEXT,
+ ) -> VkResult<()> {
+ (self.fp.debug_marker_set_object_name_ext)(self.handle, name_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDebugMarkerBeginEXT.html>
+ #[inline]
+ pub unsafe fn cmd_debug_marker_begin(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ marker_info: &vk::DebugMarkerMarkerInfoEXT,
+ ) {
+ (self.fp.cmd_debug_marker_begin_ext)(command_buffer, marker_info);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDebugMarkerEndEXT.html>
+ #[inline]
+ pub unsafe fn cmd_debug_marker_end(&self, command_buffer: vk::CommandBuffer) {
+ (self.fp.cmd_debug_marker_end_ext)(command_buffer);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDebugMarkerInsertEXT.html>
+ #[inline]
+ pub unsafe fn cmd_debug_marker_insert(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ marker_info: &vk::DebugMarkerMarkerInfoEXT,
+ ) {
+ (self.fp.cmd_debug_marker_insert_ext)(command_buffer, marker_info);
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtDebugMarkerFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtDebugMarkerFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/debug_report.rs b/third_party/rust/ash/src/extensions/ext/debug_report.rs
new file mode 100755
index 0000000000..625ebf479e
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/debug_report.rs
@@ -0,0 +1,68 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DebugReport {
+ handle: vk::Instance,
+ fp: vk::ExtDebugReportFn,
+}
+
+impl DebugReport {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::ExtDebugReportFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDebugReportCallbackEXT.html>
+ #[inline]
+ pub unsafe fn destroy_debug_report_callback(
+ &self,
+ debug: vk::DebugReportCallbackEXT,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_debug_report_callback_ext)(
+ self.handle,
+ debug,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDebugReportCallbackEXT.html>
+ #[inline]
+ pub unsafe fn create_debug_report_callback(
+ &self,
+ create_info: &vk::DebugReportCallbackCreateInfoEXT,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DebugReportCallbackEXT> {
+ let mut debug_cb = mem::zeroed();
+ (self.fp.create_debug_report_callback_ext)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut debug_cb,
+ )
+ .result_with_success(debug_cb)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtDebugReportFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtDebugReportFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/debug_utils.rs b/third_party/rust/ash/src/extensions/ext/debug_utils.rs
new file mode 100755
index 0000000000..46d91769ad
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/debug_utils.rs
@@ -0,0 +1,173 @@
+use crate::prelude::*;
+use crate::{vk, RawPtr};
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DebugUtils {
+ handle: vk::Instance,
+ fp: vk::ExtDebugUtilsFn,
+}
+
+impl DebugUtils {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::ExtDebugUtilsFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetDebugUtilsObjectNameEXT.html>
+ #[deprecated = "Backwards-compatible alias containing a typo, use `set_debug_utils_object_name()` instead"]
+ #[inline]
+ pub unsafe fn debug_utils_set_object_name(
+ &self,
+ device: vk::Device,
+ name_info: &vk::DebugUtilsObjectNameInfoEXT,
+ ) -> VkResult<()> {
+ self.set_debug_utils_object_name(device, name_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetDebugUtilsObjectNameEXT.html>
+ #[inline]
+ pub unsafe fn set_debug_utils_object_name(
+ &self,
+ device: vk::Device,
+ name_info: &vk::DebugUtilsObjectNameInfoEXT,
+ ) -> VkResult<()> {
+ (self.fp.set_debug_utils_object_name_ext)(device, name_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetDebugUtilsObjectTagEXT.html>
+ #[deprecated = "Backwards-compatible alias containing a typo, use `set_debug_utils_object_tag()` instead"]
+ #[inline]
+ pub unsafe fn debug_utils_set_object_tag(
+ &self,
+ device: vk::Device,
+ tag_info: &vk::DebugUtilsObjectTagInfoEXT,
+ ) -> VkResult<()> {
+ self.set_debug_utils_object_tag(device, tag_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetDebugUtilsObjectTagEXT.html>
+ #[inline]
+ pub unsafe fn set_debug_utils_object_tag(
+ &self,
+ device: vk::Device,
+ tag_info: &vk::DebugUtilsObjectTagInfoEXT,
+ ) -> VkResult<()> {
+ (self.fp.set_debug_utils_object_tag_ext)(device, tag_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginDebugUtilsLabelEXT.html>
+ #[inline]
+ pub unsafe fn cmd_begin_debug_utils_label(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ label: &vk::DebugUtilsLabelEXT,
+ ) {
+ (self.fp.cmd_begin_debug_utils_label_ext)(command_buffer, label);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndDebugUtilsLabelEXT.html>
+ #[inline]
+ pub unsafe fn cmd_end_debug_utils_label(&self, command_buffer: vk::CommandBuffer) {
+ (self.fp.cmd_end_debug_utils_label_ext)(command_buffer);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdInsertDebugUtilsLabelEXT.html>
+ #[inline]
+ pub unsafe fn cmd_insert_debug_utils_label(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ label: &vk::DebugUtilsLabelEXT,
+ ) {
+ (self.fp.cmd_insert_debug_utils_label_ext)(command_buffer, label);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueBeginDebugUtilsLabelEXT.html>
+ #[inline]
+ pub unsafe fn queue_begin_debug_utils_label(
+ &self,
+ queue: vk::Queue,
+ label: &vk::DebugUtilsLabelEXT,
+ ) {
+ (self.fp.queue_begin_debug_utils_label_ext)(queue, label);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueEndDebugUtilsLabelEXT.html>
+ #[inline]
+ pub unsafe fn queue_end_debug_utils_label(&self, queue: vk::Queue) {
+ (self.fp.queue_end_debug_utils_label_ext)(queue);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueInsertDebugUtilsLabelEXT.html>
+ #[inline]
+ pub unsafe fn queue_insert_debug_utils_label(
+ &self,
+ queue: vk::Queue,
+ label: &vk::DebugUtilsLabelEXT,
+ ) {
+ (self.fp.queue_insert_debug_utils_label_ext)(queue, label);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDebugUtilsMessengerEXT.html>
+ #[inline]
+ pub unsafe fn create_debug_utils_messenger(
+ &self,
+ create_info: &vk::DebugUtilsMessengerCreateInfoEXT,
+ allocator: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DebugUtilsMessengerEXT> {
+ let mut messenger = mem::zeroed();
+ (self.fp.create_debug_utils_messenger_ext)(
+ self.handle,
+ create_info,
+ allocator.as_raw_ptr(),
+ &mut messenger,
+ )
+ .result_with_success(messenger)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDebugUtilsMessengerEXT.html>
+ #[inline]
+ pub unsafe fn destroy_debug_utils_messenger(
+ &self,
+ messenger: vk::DebugUtilsMessengerEXT,
+ allocator: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_debug_utils_messenger_ext)(self.handle, messenger, allocator.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSubmitDebugUtilsMessageEXT.html>
+ #[inline]
+ pub unsafe fn submit_debug_utils_message(
+ &self,
+ message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,
+ message_types: vk::DebugUtilsMessageTypeFlagsEXT,
+ callback_data: &vk::DebugUtilsMessengerCallbackDataEXT,
+ ) {
+ (self.fp.submit_debug_utils_message_ext)(
+ self.handle,
+ message_severity,
+ message_types,
+ callback_data,
+ );
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtDebugUtilsFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtDebugUtilsFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/descriptor_buffer.rs b/third_party/rust/ash/src/extensions/ext/descriptor_buffer.rs
new file mode 100644
index 0000000000..4503f5bdab
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/descriptor_buffer.rs
@@ -0,0 +1,211 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_descriptor_buffer.html>
+#[derive(Clone)]
+pub struct DescriptorBuffer {
+ handle: vk::Device,
+ fp: vk::ExtDescriptorBufferFn,
+}
+
+impl DescriptorBuffer {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtDescriptorBufferFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDescriptorSetLayoutSizeEXT.html>
+ #[inline]
+ pub unsafe fn get_descriptor_set_layout_size(
+ &self,
+ layout: vk::DescriptorSetLayout,
+ ) -> vk::DeviceSize {
+ let mut count = 0;
+ (self.fp.get_descriptor_set_layout_size_ext)(self.handle, layout, &mut count);
+ count
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDescriptorSetLayoutBindingOffsetEXT.html>
+ #[inline]
+ pub unsafe fn get_descriptor_set_layout_binding_offset(
+ &self,
+ layout: vk::DescriptorSetLayout,
+ binding: u32,
+ ) -> vk::DeviceSize {
+ let mut offset = 0;
+ (self.fp.get_descriptor_set_layout_binding_offset_ext)(
+ self.handle,
+ layout,
+ binding,
+ &mut offset,
+ );
+ offset
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDescriptorEXT.html>
+ #[inline]
+ pub unsafe fn get_descriptor(
+ &self,
+ descriptor_info: &vk::DescriptorGetInfoEXT,
+ descriptor: &mut [u8],
+ ) {
+ (self.fp.get_descriptor_ext)(
+ self.handle,
+ descriptor_info,
+ descriptor.len(),
+ descriptor.as_mut_ptr().cast(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindDescriptorBuffersEXT.html>
+ #[inline]
+ pub unsafe fn cmd_bind_descriptor_buffers(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ binding_info: &[vk::DescriptorBufferBindingInfoEXT],
+ ) {
+ (self.fp.cmd_bind_descriptor_buffers_ext)(
+ command_buffer,
+ binding_info.len() as u32,
+ binding_info.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDescriptorBufferOffsetsEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_descriptor_buffer_offsets(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_bind_point: vk::PipelineBindPoint,
+ layout: vk::PipelineLayout,
+ first_set: u32,
+ buffer_indices: &[u32],
+ offsets: &[vk::DeviceSize],
+ ) {
+ assert_eq!(buffer_indices.len(), offsets.len());
+ (self.fp.cmd_set_descriptor_buffer_offsets_ext)(
+ command_buffer,
+ pipeline_bind_point,
+ layout,
+ first_set,
+ buffer_indices.len() as u32,
+ buffer_indices.as_ptr(),
+ offsets.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindDescriptorBufferEmbeddedSamplersEXT.html>
+ #[inline]
+ pub unsafe fn cmd_bind_descriptor_buffer_embedded_samplers(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_bind_point: vk::PipelineBindPoint,
+ layout: vk::PipelineLayout,
+ set: u32,
+ ) {
+ (self.fp.cmd_bind_descriptor_buffer_embedded_samplers_ext)(
+ command_buffer,
+ pipeline_bind_point,
+ layout,
+ set,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferOpaqueCaptureDescriptorDataEXT.html>
+ #[inline]
+ pub unsafe fn get_buffer_opaque_capture_descriptor_data(
+ &self,
+ info: &vk::BufferCaptureDescriptorDataInfoEXT,
+ data: &mut [u8],
+ ) -> VkResult<()> {
+ (self.fp.get_buffer_opaque_capture_descriptor_data_ext)(
+ self.handle,
+ info,
+ data.as_mut_ptr().cast(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageOpaqueCaptureDescriptorDataEXT.html>
+ #[inline]
+ pub unsafe fn get_image_opaque_capture_descriptor_data(
+ &self,
+ info: &vk::ImageCaptureDescriptorDataInfoEXT,
+ data: &mut [u8],
+ ) -> VkResult<()> {
+ (self.fp.get_image_opaque_capture_descriptor_data_ext)(
+ self.handle,
+ info,
+ data.as_mut_ptr().cast(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageViewOpaqueCaptureDescriptorDataEXT.html>
+ #[inline]
+ pub unsafe fn get_image_view_opaque_capture_descriptor_data(
+ &self,
+ info: &vk::ImageViewCaptureDescriptorDataInfoEXT,
+ data: &mut [u8],
+ ) -> VkResult<()> {
+ (self.fp.get_image_view_opaque_capture_descriptor_data_ext)(
+ self.handle,
+ info,
+ data.as_mut_ptr().cast(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetSamplerOpaqueCaptureDescriptorDataEXT.html>
+ #[inline]
+ pub unsafe fn get_sampler_opaque_capture_descriptor_data(
+ &self,
+ info: &vk::SamplerCaptureDescriptorDataInfoEXT,
+ data: &mut [u8],
+ ) -> VkResult<()> {
+ (self.fp.get_sampler_opaque_capture_descriptor_data_ext)(
+ self.handle,
+ info,
+ data.as_mut_ptr().cast(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT.html>
+ #[inline]
+ pub unsafe fn get_acceleration_structure_opaque_capture_descriptor_data(
+ &self,
+ info: &vk::AccelerationStructureCaptureDescriptorDataInfoEXT,
+ data: &mut [u8],
+ ) -> VkResult<()> {
+ (self
+ .fp
+ .get_acceleration_structure_opaque_capture_descriptor_data_ext)(
+ self.handle,
+ info,
+ data.as_mut_ptr().cast(),
+ )
+ .result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtDescriptorBufferFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtDescriptorBufferFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/extended_dynamic_state.rs b/third_party/rust/ash/src/extensions/ext/extended_dynamic_state.rs
new file mode 100644
index 0000000000..2e6b3eeca3
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/extended_dynamic_state.rs
@@ -0,0 +1,196 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+#[derive(Clone)]
+pub struct ExtendedDynamicState {
+ fp: vk::ExtExtendedDynamicStateFn,
+}
+
+impl ExtendedDynamicState {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::ExtExtendedDynamicStateFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCullModeEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_cull_mode(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ cull_mode: vk::CullModeFlags,
+ ) {
+ (self.fp.cmd_set_cull_mode_ext)(command_buffer, cull_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetFrontFaceEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_front_face(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ front_face: vk::FrontFace,
+ ) {
+ (self.fp.cmd_set_front_face_ext)(command_buffer, front_face)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetPrimitiveTopologyEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_primitive_topology(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ primitive_topology: vk::PrimitiveTopology,
+ ) {
+ (self.fp.cmd_set_primitive_topology_ext)(command_buffer, primitive_topology)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetViewportWithCountEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_viewport_with_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ viewports: &[vk::Viewport],
+ ) {
+ (self.fp.cmd_set_viewport_with_count_ext)(
+ command_buffer,
+ viewports.len() as u32,
+ viewports.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetScissorWithCountEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_scissor_with_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ scissors: &[vk::Rect2D],
+ ) {
+ (self.fp.cmd_set_scissor_with_count_ext)(
+ command_buffer,
+ scissors.len() as u32,
+ scissors.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBindVertexBuffers2EXT.html>
+ #[inline]
+ pub unsafe fn cmd_bind_vertex_buffers2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_binding: u32,
+ buffers: &[vk::Buffer],
+ offsets: &[vk::DeviceSize],
+ sizes: Option<&[vk::DeviceSize]>,
+ strides: Option<&[vk::DeviceSize]>,
+ ) {
+ assert_eq!(offsets.len(), buffers.len());
+ let p_sizes = if let Some(sizes) = sizes {
+ assert_eq!(sizes.len(), buffers.len());
+ sizes.as_ptr()
+ } else {
+ ptr::null()
+ };
+ let p_strides = if let Some(strides) = strides {
+ assert_eq!(strides.len(), buffers.len());
+ strides.as_ptr()
+ } else {
+ ptr::null()
+ };
+ (self.fp.cmd_bind_vertex_buffers2_ext)(
+ command_buffer,
+ first_binding,
+ buffers.len() as u32,
+ buffers.as_ptr(),
+ offsets.as_ptr(),
+ p_sizes,
+ p_strides,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthTestEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_test_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_test_enable: bool,
+ ) {
+ (self.fp.cmd_set_depth_test_enable_ext)(command_buffer, depth_test_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthWriteEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_write_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_write_enable: bool,
+ ) {
+ (self.fp.cmd_set_depth_write_enable_ext)(command_buffer, depth_write_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthCompareOpEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_compare_op(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_compare_op: vk::CompareOp,
+ ) {
+ (self.fp.cmd_set_depth_compare_op_ext)(command_buffer, depth_compare_op)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthBoundsTestEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_bounds_test_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_bounds_test_enable: bool,
+ ) {
+ (self.fp.cmd_set_depth_bounds_test_enable_ext)(
+ command_buffer,
+ depth_bounds_test_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilTestEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_test_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ stencil_test_enable: bool,
+ ) {
+ (self.fp.cmd_set_stencil_test_enable_ext)(command_buffer, stencil_test_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetStencilOpEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_stencil_op(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ face_mask: vk::StencilFaceFlags,
+ fail_op: vk::StencilOp,
+ pass_op: vk::StencilOp,
+ depth_fail_op: vk::StencilOp,
+ compare_op: vk::CompareOp,
+ ) {
+ (self.fp.cmd_set_stencil_op_ext)(
+ command_buffer,
+ face_mask,
+ fail_op,
+ pass_op,
+ depth_fail_op,
+ compare_op,
+ )
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtExtendedDynamicStateFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtExtendedDynamicStateFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/extended_dynamic_state2.rs b/third_party/rust/ash/src/extensions/ext/extended_dynamic_state2.rs
new file mode 100644
index 0000000000..b515385930
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/extended_dynamic_state2.rs
@@ -0,0 +1,85 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_extended_dynamic_state2.html>
+#[derive(Clone)]
+pub struct ExtendedDynamicState2 {
+ fp: vk::ExtExtendedDynamicState2Fn,
+}
+
+impl ExtendedDynamicState2 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::ExtExtendedDynamicState2Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetPatchControlPointsEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_patch_control_points(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ patch_control_points: u32,
+ ) {
+ (self.fp.cmd_set_patch_control_points_ext)(command_buffer, patch_control_points)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetRasterizerDiscardEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_rasterizer_discard_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ rasterizer_discard_enable: bool,
+ ) {
+ (self.fp.cmd_set_rasterizer_discard_enable_ext)(
+ command_buffer,
+ rasterizer_discard_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthBiasEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_bias_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_bias_enable: bool,
+ ) {
+ (self.fp.cmd_set_depth_bias_enable_ext)(command_buffer, depth_bias_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetLogicOpEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_logic_op(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ logic_op: vk::LogicOp,
+ ) {
+ (self.fp.cmd_set_logic_op_ext)(command_buffer, logic_op)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetPrimitiveRestartEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_primitive_restart_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ primitive_restart_enable: bool,
+ ) {
+ (self.fp.cmd_set_primitive_restart_enable_ext)(
+ command_buffer,
+ primitive_restart_enable.into(),
+ )
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtExtendedDynamicState2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtExtendedDynamicState2Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/extended_dynamic_state3.rs b/third_party/rust/ash/src/extensions/ext/extended_dynamic_state3.rs
new file mode 100644
index 0000000000..bb68f75f3e
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/extended_dynamic_state3.rs
@@ -0,0 +1,409 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_extended_dynamic_state3.html>
+#[derive(Clone)]
+pub struct ExtendedDynamicState3 {
+ fp: vk::ExtExtendedDynamicState3Fn,
+}
+
+impl ExtendedDynamicState3 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::ExtExtendedDynamicState3Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetTessellationDomainOriginEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_tessellation_domain_origin(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ domain_origin: vk::TessellationDomainOrigin,
+ ) {
+ (self.fp.cmd_set_tessellation_domain_origin_ext)(command_buffer, domain_origin)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthClampEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_clamp_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_clamp_enable: bool,
+ ) {
+ (self.fp.cmd_set_depth_clamp_enable_ext)(command_buffer, depth_clamp_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetPolygonModeEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_polygon_mode(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ polygon_mode: vk::PolygonMode,
+ ) {
+ (self.fp.cmd_set_polygon_mode_ext)(command_buffer, polygon_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetRasterizationSamplesEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_rasterization_samples(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ rasterization_samples: vk::SampleCountFlags,
+ ) {
+ (self.fp.cmd_set_rasterization_samples_ext)(command_buffer, rasterization_samples)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetSampleMaskEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_sample_mask(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ samples: vk::SampleCountFlags,
+ sample_mask: &[vk::SampleMask],
+ ) {
+ assert!(
+ samples.as_raw().is_power_of_two(),
+ "Only one SampleCount bit must be set"
+ );
+ assert_eq!(samples.as_raw() as usize / 32, sample_mask.len());
+ (self.fp.cmd_set_sample_mask_ext)(command_buffer, samples, sample_mask.as_ptr())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetAlphaToCoverageEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_alpha_to_coverage_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ alpha_to_coverage_enable: bool,
+ ) {
+ (self.fp.cmd_set_alpha_to_coverage_enable_ext)(
+ command_buffer,
+ alpha_to_coverage_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetAlphaToOneEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_alpha_to_one_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ alpha_to_one_enable: bool,
+ ) {
+ (self.fp.cmd_set_alpha_to_one_enable_ext)(command_buffer, alpha_to_one_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetLogicOpEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_logic_op_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ logic_op_enable: bool,
+ ) {
+ (self.fp.cmd_set_logic_op_enable_ext)(command_buffer, logic_op_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetColorBlendEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_color_blend_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_attachment: u32,
+ color_blend_enables: &[vk::Bool32],
+ ) {
+ (self.fp.cmd_set_color_blend_enable_ext)(
+ command_buffer,
+ first_attachment,
+ color_blend_enables.len() as u32,
+ color_blend_enables.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetColorBlendEquationEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_color_blend_equation(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_attachment: u32,
+ color_blend_equations: &[vk::ColorBlendEquationEXT],
+ ) {
+ (self.fp.cmd_set_color_blend_equation_ext)(
+ command_buffer,
+ first_attachment,
+ color_blend_equations.len() as u32,
+ color_blend_equations.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetColorWriteMaskEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_color_write_mask(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_attachment: u32,
+ color_write_masks: &[vk::ColorComponentFlags],
+ ) {
+ (self.fp.cmd_set_color_write_mask_ext)(
+ command_buffer,
+ first_attachment,
+ color_write_masks.len() as u32,
+ color_write_masks.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetRasterizationStreamEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_rasterization_stream(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ rasterization_stream: u32,
+ ) {
+ (self.fp.cmd_set_rasterization_stream_ext)(command_buffer, rasterization_stream)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetConservativeRasterizationModeEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_conservative_rasterization_mode(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ conservative_rasterization_mode: vk::ConservativeRasterizationModeEXT,
+ ) {
+ (self.fp.cmd_set_conservative_rasterization_mode_ext)(
+ command_buffer,
+ conservative_rasterization_mode,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetExtraPrimitiveOverestimationSizeEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_extra_primitive_overestimation_size(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ extra_primitive_overestimation_size: f32,
+ ) {
+ (self.fp.cmd_set_extra_primitive_overestimation_size_ext)(
+ command_buffer,
+ extra_primitive_overestimation_size,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthClipEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_clip_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ depth_clip_enable: bool,
+ ) {
+ (self.fp.cmd_set_depth_clip_enable_ext)(command_buffer, depth_clip_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetSampleLocationsEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_sample_locations_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ sample_locations_enable: bool,
+ ) {
+ (self.fp.cmd_set_sample_locations_enable_ext)(
+ command_buffer,
+ sample_locations_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetColorBlendAdvancedEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_color_blend_advanced(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_attachment: u32,
+ color_blend_advanced: &[vk::ColorBlendAdvancedEXT],
+ ) {
+ (self.fp.cmd_set_color_blend_advanced_ext)(
+ command_buffer,
+ first_attachment,
+ color_blend_advanced.len() as u32,
+ color_blend_advanced.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetProvokingVertexModeEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_provoking_vertex_mode(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ provoking_vertex_mode: vk::ProvokingVertexModeEXT,
+ ) {
+ (self.fp.cmd_set_provoking_vertex_mode_ext)(command_buffer, provoking_vertex_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetLineRasterizationModeEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_line_rasterization_mode(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ line_rasterization_mode: vk::LineRasterizationModeEXT,
+ ) {
+ (self.fp.cmd_set_line_rasterization_mode_ext)(command_buffer, line_rasterization_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetLineStippleEnableEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_line_stipple_enable(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ stippled_line_enable: bool,
+ ) {
+ (self.fp.cmd_set_line_stipple_enable_ext)(command_buffer, stippled_line_enable.into())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDepthClipNegativeOneToOneEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_depth_clip_negative_one_to_one(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ negative_one_to_one: bool,
+ ) {
+ (self.fp.cmd_set_depth_clip_negative_one_to_one_ext)(
+ command_buffer,
+ negative_one_to_one.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetViewportWScalingEnableNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_viewport_w_scaling_enable_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ viewport_w_scaling_enable: bool,
+ ) {
+ (self.fp.cmd_set_viewport_w_scaling_enable_nv)(
+ command_buffer,
+ viewport_w_scaling_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetViewportSwizzleNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_viewport_swizzle_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ first_attachment: u32,
+ viewport_swizzles: &[vk::ViewportSwizzleNV],
+ ) {
+ (self.fp.cmd_set_viewport_swizzle_nv)(
+ command_buffer,
+ first_attachment,
+ viewport_swizzles.len() as u32,
+ viewport_swizzles.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCoverageToColorEnableNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_coverage_to_color_enable_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ coverage_to_color_enable: bool,
+ ) {
+ (self.fp.cmd_set_coverage_to_color_enable_nv)(
+ command_buffer,
+ coverage_to_color_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCoverageToColorLocationNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_coverage_to_color_location_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ coverage_to_color_location: u32,
+ ) {
+ (self.fp.cmd_set_coverage_to_color_location_nv)(command_buffer, coverage_to_color_location)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCoverageModulationModeNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_coverage_modulation_mode_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ coverage_modulation_mode: vk::CoverageModulationModeNV,
+ ) {
+ (self.fp.cmd_set_coverage_modulation_mode_nv)(command_buffer, coverage_modulation_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCoverageModulationTableEnableNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_coverage_modulation_table_enable_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ coverage_modulation_table_enable: bool,
+ ) {
+ (self.fp.cmd_set_coverage_modulation_table_enable_nv)(
+ command_buffer,
+ coverage_modulation_table_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCoverageModulationTableNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_coverage_modulation_table_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ coverage_modulation_table: &[f32],
+ ) {
+ (self.fp.cmd_set_coverage_modulation_table_nv)(
+ command_buffer,
+ coverage_modulation_table.len() as u32,
+ coverage_modulation_table.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetShadingRateImageEnableNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_shading_rate_image_enable_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ shading_rate_image_enable: bool,
+ ) {
+ (self.fp.cmd_set_shading_rate_image_enable_nv)(
+ command_buffer,
+ shading_rate_image_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetRepresentativeFragmentTestEnableNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_representative_fragment_test_enable_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ representative_fragment_test_enable: bool,
+ ) {
+ (self.fp.cmd_set_representative_fragment_test_enable_nv)(
+ command_buffer,
+ representative_fragment_test_enable.into(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCoverageReductionModeNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_coverage_reduction_mode_nv(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ coverage_reduction_mode: vk::CoverageReductionModeNV,
+ ) {
+ (self.fp.cmd_set_coverage_reduction_mode_nv)(command_buffer, coverage_reduction_mode)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtExtendedDynamicState3Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtExtendedDynamicState3Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/full_screen_exclusive.rs b/third_party/rust/ash/src/extensions/ext/full_screen_exclusive.rs
new file mode 100644
index 0000000000..3cb73cdaeb
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/full_screen_exclusive.rs
@@ -0,0 +1,86 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct FullScreenExclusive {
+ handle: vk::Device,
+ fp: vk::ExtFullScreenExclusiveFn,
+}
+
+impl FullScreenExclusive {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtFullScreenExclusiveFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAcquireFullScreenExclusiveModeEXT.html>
+ #[inline]
+ pub unsafe fn acquire_full_screen_exclusive_mode(
+ &self,
+ swapchain: vk::SwapchainKHR,
+ ) -> VkResult<()> {
+ (self.fp.acquire_full_screen_exclusive_mode_ext)(self.handle, swapchain).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfacePresentModes2EXT.html>
+ #[inline]
+ pub unsafe fn get_physical_device_surface_present_modes2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR,
+ ) -> VkResult<Vec<vk::PresentModeKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_surface_present_modes2_ext)(
+ physical_device,
+ surface_info,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkReleaseFullScreenExclusiveModeEXT.html>
+ #[inline]
+ pub unsafe fn release_full_screen_exclusive_mode(
+ &self,
+ swapchain: vk::SwapchainKHR,
+ ) -> VkResult<()> {
+ (self.fp.release_full_screen_exclusive_mode_ext)(self.handle, swapchain).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupSurfacePresentModes2EXT.html>
+ #[inline]
+ pub unsafe fn get_device_group_surface_present_modes2(
+ &self,
+ surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR,
+ ) -> VkResult<vk::DeviceGroupPresentModeFlagsKHR> {
+ let mut present_modes = mem::zeroed();
+ (self.fp.get_device_group_surface_present_modes2_ext)(
+ self.handle,
+ surface_info,
+ &mut present_modes,
+ )
+ .result_with_success(present_modes)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtFullScreenExclusiveFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtFullScreenExclusiveFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/headless_surface.rs b/third_party/rust/ash/src/extensions/ext/headless_surface.rs
new file mode 100644
index 0000000000..fe04236aaf
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/headless_surface.rs
@@ -0,0 +1,55 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_headless_surface.html>
+#[derive(Clone)]
+pub struct HeadlessSurface {
+ handle: vk::Instance,
+ fp: vk::ExtHeadlessSurfaceFn,
+}
+
+impl HeadlessSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::ExtHeadlessSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateHeadlessSurfaceEXT.html>
+ #[inline]
+ pub unsafe fn create_headless_surface(
+ &self,
+ create_info: &vk::HeadlessSurfaceCreateInfoEXT,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_headless_surface_ext)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtHeadlessSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtHeadlessSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/image_compression_control.rs b/third_party/rust/ash/src/extensions/ext/image_compression_control.rs
new file mode 100644
index 0000000000..a326116973
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/image_compression_control.rs
@@ -0,0 +1,47 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_image_compression_control.html>
+#[derive(Clone)]
+pub struct ImageCompressionControl {
+ handle: vk::Device,
+ fp: vk::ExtImageCompressionControlFn,
+}
+
+impl ImageCompressionControl {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtImageCompressionControlFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageSubresourceLayout2EXT.html>
+ #[inline]
+ pub unsafe fn get_image_subresource_layout2(
+ &self,
+ image: vk::Image,
+ subresource: &vk::ImageSubresource2EXT,
+ layout: &mut vk::SubresourceLayout2EXT,
+ ) {
+ (self.fp.get_image_subresource_layout2_ext)(self.handle, image, subresource, layout)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtImageCompressionControlFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtImageCompressionControlFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/image_drm_format_modifier.rs b/third_party/rust/ash/src/extensions/ext/image_drm_format_modifier.rs
new file mode 100644
index 0000000000..ccdce2eb5e
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/image_drm_format_modifier.rs
@@ -0,0 +1,48 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_image_drm_format_modifier.html>
+#[derive(Clone)]
+pub struct ImageDrmFormatModifier {
+ handle: vk::Device,
+ fp: vk::ExtImageDrmFormatModifierFn,
+}
+
+impl ImageDrmFormatModifier {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtImageDrmFormatModifierFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageDrmFormatModifierPropertiesEXT.html>
+ #[inline]
+ pub unsafe fn get_image_drm_format_modifier_properties(
+ &self,
+ image: vk::Image,
+ properties: &mut vk::ImageDrmFormatModifierPropertiesEXT,
+ ) -> VkResult<()> {
+ (self.fp.get_image_drm_format_modifier_properties_ext)(self.handle, image, properties)
+ .result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtImageDrmFormatModifierFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtImageDrmFormatModifierFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/mesh_shader.rs b/third_party/rust/ash/src/extensions/ext/mesh_shader.rs
new file mode 100644
index 0000000000..7d58f0c135
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/mesh_shader.rs
@@ -0,0 +1,94 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VK_EXT_mesh_shader.html>
+#[derive(Clone)]
+pub struct MeshShader {
+ fp: vk::ExtMeshShaderFn,
+}
+
+impl MeshShader {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::ExtMeshShaderFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdDrawMeshTasksEXT.html>
+ #[inline]
+ pub unsafe fn cmd_draw_mesh_tasks(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ group_count_x: u32,
+ group_count_y: u32,
+ group_count_z: u32,
+ ) {
+ (self.fp.cmd_draw_mesh_tasks_ext)(
+ command_buffer,
+ group_count_x,
+ group_count_y,
+ group_count_z,
+ );
+ }
+
+ /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdDrawMeshTasksIndirectEXT.html>
+ ///
+ /// `buffer` contains `draw_count` [`vk::DrawMeshTasksIndirectCommandEXT`] structures starting at `offset` in bytes, holding the draw parameters.
+ #[inline]
+ pub unsafe fn cmd_draw_mesh_tasks_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ draw_count: u32,
+ stride: u32,
+ ) {
+ (self.fp.cmd_draw_mesh_tasks_indirect_ext)(
+ command_buffer,
+ buffer,
+ offset,
+ draw_count,
+ stride,
+ );
+ }
+
+ /// <https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkCmdDrawMeshTasksIndirectCountEXT.html>
+ ///
+ /// `buffer` contains a maximum of `max_draw_count` [`vk::DrawMeshTasksIndirectCommandEXT`] structures starting at `offset` in bytes, holding the draw parameters.
+ /// `count_buffer` is the buffer containing the draw count, starting at `count_buffer_offset` in bytes.
+ /// The actual number of executed draw calls is the minimum of the count specified in `count_buffer` and `max_draw_count`.
+ #[inline]
+ pub unsafe fn cmd_draw_mesh_tasks_indirect_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ count_buffer: vk::Buffer,
+ count_buffer_offset: vk::DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+ ) {
+ (self.fp.cmd_draw_mesh_tasks_indirect_count_ext)(
+ command_buffer,
+ buffer,
+ offset,
+ count_buffer,
+ count_buffer_offset,
+ max_draw_count,
+ stride,
+ );
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtMeshShaderFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtMeshShaderFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/metal_surface.rs b/third_party/rust/ash/src/extensions/ext/metal_surface.rs
new file mode 100644
index 0000000000..9627d2f510
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/metal_surface.rs
@@ -0,0 +1,54 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct MetalSurface {
+ handle: vk::Instance,
+ fp: vk::ExtMetalSurfaceFn,
+}
+
+impl MetalSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::ExtMetalSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateMetalSurfaceEXT.html>
+ #[inline]
+ pub unsafe fn create_metal_surface(
+ &self,
+ create_info: &vk::MetalSurfaceCreateInfoEXT,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_metal_surface_ext)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtMetalSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtMetalSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/mod.rs b/third_party/rust/ash/src/extensions/ext/mod.rs
new file mode 100644
index 0000000000..26a78f77d6
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/mod.rs
@@ -0,0 +1,45 @@
+pub use self::acquire_drm_display::AcquireDrmDisplay;
+pub use self::buffer_device_address::BufferDeviceAddress;
+pub use self::calibrated_timestamps::CalibratedTimestamps;
+#[allow(deprecated)]
+pub use self::debug_marker::DebugMarker;
+#[allow(deprecated)]
+pub use self::debug_report::DebugReport;
+pub use self::debug_utils::DebugUtils;
+pub use self::descriptor_buffer::DescriptorBuffer;
+pub use self::extended_dynamic_state::ExtendedDynamicState;
+pub use self::extended_dynamic_state2::ExtendedDynamicState2;
+pub use self::extended_dynamic_state3::ExtendedDynamicState3;
+pub use self::full_screen_exclusive::FullScreenExclusive;
+pub use self::headless_surface::HeadlessSurface;
+pub use self::image_compression_control::ImageCompressionControl;
+pub use self::image_drm_format_modifier::ImageDrmFormatModifier;
+pub use self::mesh_shader::MeshShader;
+pub use self::metal_surface::MetalSurface;
+pub use self::physical_device_drm::PhysicalDeviceDrm;
+pub use self::private_data::PrivateData;
+pub use self::sample_locations::SampleLocations;
+pub use self::tooling_info::ToolingInfo;
+
+mod acquire_drm_display;
+mod buffer_device_address;
+mod calibrated_timestamps;
+#[deprecated(note = "Please use the [DebugUtils](struct.DebugUtils.html) extension instead.")]
+mod debug_marker;
+#[deprecated(note = "Please use the [DebugUtils](struct.DebugUtils.html) extension instead.")]
+mod debug_report;
+mod debug_utils;
+mod descriptor_buffer;
+mod extended_dynamic_state;
+mod extended_dynamic_state2;
+mod extended_dynamic_state3;
+mod full_screen_exclusive;
+mod headless_surface;
+mod image_compression_control;
+mod image_drm_format_modifier;
+mod mesh_shader;
+mod metal_surface;
+mod physical_device_drm;
+mod private_data;
+mod sample_locations;
+mod tooling_info;
diff --git a/third_party/rust/ash/src/extensions/ext/physical_device_drm.rs b/third_party/rust/ash/src/extensions/ext/physical_device_drm.rs
new file mode 100644
index 0000000000..29b6cd7b32
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/physical_device_drm.rs
@@ -0,0 +1,26 @@
+use crate::vk;
+use crate::Instance;
+use std::ffi::CStr;
+
+#[derive(Clone)]
+pub struct PhysicalDeviceDrm;
+
+impl PhysicalDeviceDrm {
+ #[inline]
+ pub unsafe fn get_properties(
+ instance: &Instance,
+ pdevice: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceDrmPropertiesEXT {
+ let mut props_drm = vk::PhysicalDeviceDrmPropertiesEXT::default();
+ {
+ let mut props = vk::PhysicalDeviceProperties2::builder().push_next(&mut props_drm);
+ instance.get_physical_device_properties2(pdevice, &mut props);
+ }
+ props_drm
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtPhysicalDeviceDrmFn::name()
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/private_data.rs b/third_party/rust/ash/src/extensions/ext/private_data.rs
new file mode 100644
index 0000000000..c6cc6aefb1
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/private_data.rs
@@ -0,0 +1,105 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_private_data.html>
+#[derive(Clone)]
+pub struct PrivateData {
+ handle: vk::Device,
+ fp: vk::ExtPrivateDataFn,
+}
+
+impl PrivateData {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::ExtPrivateDataFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreatePrivateDataSlotEXT.html>
+ #[inline]
+ pub unsafe fn create_private_data_slot(
+ &self,
+ create_info: &vk::PrivateDataSlotCreateInfoEXT,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::PrivateDataSlotEXT> {
+ let mut private_data_slot = mem::zeroed();
+ (self.fp.create_private_data_slot_ext)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut private_data_slot,
+ )
+ .result_with_success(private_data_slot)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyPrivateDataSlotEXT.html>
+ #[inline]
+ pub unsafe fn destroy_private_data_slot(
+ &self,
+ private_data_slot: vk::PrivateDataSlotEXT,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_private_data_slot_ext)(
+ self.handle,
+ private_data_slot,
+ allocation_callbacks.as_raw_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSetPrivateDataEXT.html>
+ #[inline]
+ pub unsafe fn set_private_data<T: vk::Handle>(
+ &self,
+ object: T,
+ private_data_slot: vk::PrivateDataSlotEXT,
+ data: u64,
+ ) -> VkResult<()> {
+ (self.fp.set_private_data_ext)(
+ self.handle,
+ T::TYPE,
+ object.as_raw(),
+ private_data_slot,
+ data,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPrivateDataEXT.html>
+ #[inline]
+ pub unsafe fn get_private_data<T: vk::Handle>(
+ &self,
+ object: T,
+ private_data_slot: vk::PrivateDataSlotEXT,
+ ) -> u64 {
+ let mut data = mem::zeroed();
+ (self.fp.get_private_data_ext)(
+ self.handle,
+ T::TYPE,
+ object.as_raw(),
+ private_data_slot,
+ &mut data,
+ );
+ data
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtPrivateDataFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtPrivateDataFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/sample_locations.rs b/third_party/rust/ash/src/extensions/ext/sample_locations.rs
new file mode 100644
index 0000000000..360b83a681
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/sample_locations.rs
@@ -0,0 +1,54 @@
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_EXT_sample_locations.html>
+#[derive(Clone)]
+pub struct SampleLocations {
+ fp: vk::ExtSampleLocationsFn,
+}
+
+impl SampleLocations {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let fp = vk::ExtSampleLocationsFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceMultisamplePropertiesEXT.html>
+ #[inline]
+ pub unsafe fn get_physical_device_multisample_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ samples: vk::SampleCountFlags,
+ multisample_properties: &mut vk::MultisamplePropertiesEXT,
+ ) {
+ (self.fp.get_physical_device_multisample_properties_ext)(
+ physical_device,
+ samples,
+ multisample_properties,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetSampleLocationsEXT.html>
+ #[inline]
+ pub unsafe fn cmd_set_sample_locations(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ sample_locations_info: &vk::SampleLocationsInfoEXT,
+ ) {
+ (self.fp.cmd_set_sample_locations_ext)(command_buffer, sample_locations_info)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtSampleLocationsFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtSampleLocationsFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/ext/tooling_info.rs b/third_party/rust/ash/src/extensions/ext/tooling_info.rs
new file mode 100644
index 0000000000..fd4bf994e7
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/ext/tooling_info.rs
@@ -0,0 +1,40 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct ToolingInfo {
+ fp: vk::ExtToolingInfoFn,
+}
+
+impl ToolingInfo {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let fp = vk::ExtToolingInfoFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceToolPropertiesEXT.html>
+ #[inline]
+ pub unsafe fn get_physical_device_tool_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> VkResult<Vec<vk::PhysicalDeviceToolPropertiesEXT>> {
+ read_into_defaulted_vector(|count, data| {
+ (self.fp.get_physical_device_tool_properties_ext)(physical_device, count, data)
+ })
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::ExtToolingInfoFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::ExtToolingInfoFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/acceleration_structure.rs b/third_party/rust/ash/src/extensions/khr/acceleration_structure.rs
new file mode 100644
index 0000000000..9772e9baf4
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/acceleration_structure.rs
@@ -0,0 +1,322 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct AccelerationStructure {
+ handle: vk::Device,
+ fp: vk::KhrAccelerationStructureFn,
+}
+
+impl AccelerationStructure {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrAccelerationStructureFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ #[inline]
+ pub unsafe fn get_properties(
+ instance: &Instance,
+ pdevice: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceAccelerationStructurePropertiesKHR {
+ let mut props_rt = vk::PhysicalDeviceAccelerationStructurePropertiesKHR::default();
+ {
+ let mut props = vk::PhysicalDeviceProperties2::builder().push_next(&mut props_rt);
+ instance.get_physical_device_properties2(pdevice, &mut props);
+ }
+ props_rt
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateAccelerationStructureKHR.html>
+ #[inline]
+ pub unsafe fn create_acceleration_structure(
+ &self,
+ create_info: &vk::AccelerationStructureCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::AccelerationStructureKHR> {
+ let mut accel_struct = mem::zeroed();
+ (self.fp.create_acceleration_structure_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut accel_struct,
+ )
+ .result_with_success(accel_struct)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyAccelerationStructureKHR.html>
+ #[inline]
+ pub unsafe fn destroy_acceleration_structure(
+ &self,
+ accel_struct: vk::AccelerationStructureKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_acceleration_structure_khr)(
+ self.handle,
+ accel_struct,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBuildAccelerationStructuresKHR.html>
+ #[inline]
+ pub unsafe fn cmd_build_acceleration_structures(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ infos: &[vk::AccelerationStructureBuildGeometryInfoKHR],
+ build_range_infos: &[&[vk::AccelerationStructureBuildRangeInfoKHR]],
+ ) {
+ assert_eq!(infos.len(), build_range_infos.len());
+
+ let build_range_infos = build_range_infos
+ .iter()
+ .zip(infos.iter())
+ .map(|(range_info, info)| {
+ assert_eq!(range_info.len(), info.geometry_count as usize);
+ range_info.as_ptr()
+ })
+ .collect::<Vec<_>>();
+
+ (self.fp.cmd_build_acceleration_structures_khr)(
+ command_buffer,
+ infos.len() as _,
+ infos.as_ptr(),
+ build_range_infos.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBuildAccelerationStructuresIndirectKHR.html>
+ #[inline]
+ pub unsafe fn cmd_build_acceleration_structures_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ infos: &[vk::AccelerationStructureBuildGeometryInfoKHR],
+ indirect_device_addresses: &[vk::DeviceAddress],
+ indirect_strides: &[u32],
+ max_primitive_counts: &[&[u32]],
+ ) {
+ assert_eq!(infos.len(), indirect_device_addresses.len());
+ assert_eq!(infos.len(), indirect_strides.len());
+ assert_eq!(infos.len(), max_primitive_counts.len());
+
+ let max_primitive_counts = max_primitive_counts
+ .iter()
+ .zip(infos.iter())
+ .map(|(cnt, info)| {
+ assert_eq!(cnt.len(), info.geometry_count as usize);
+ cnt.as_ptr()
+ })
+ .collect::<Vec<_>>();
+
+ (self.fp.cmd_build_acceleration_structures_indirect_khr)(
+ command_buffer,
+ infos.len() as _,
+ infos.as_ptr(),
+ indirect_device_addresses.as_ptr(),
+ indirect_strides.as_ptr(),
+ max_primitive_counts.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBuildAccelerationStructuresKHR.html>
+ #[inline]
+ pub unsafe fn build_acceleration_structures(
+ &self,
+ deferred_operation: vk::DeferredOperationKHR,
+ infos: &[vk::AccelerationStructureBuildGeometryInfoKHR],
+ build_range_infos: &[&[vk::AccelerationStructureBuildRangeInfoKHR]],
+ ) -> VkResult<()> {
+ assert_eq!(infos.len(), build_range_infos.len());
+
+ let build_range_infos = build_range_infos
+ .iter()
+ .zip(infos.iter())
+ .map(|(range_info, info)| {
+ assert_eq!(range_info.len(), info.geometry_count as usize);
+ range_info.as_ptr()
+ })
+ .collect::<Vec<_>>();
+
+ (self.fp.build_acceleration_structures_khr)(
+ self.handle,
+ deferred_operation,
+ infos.len() as _,
+ infos.as_ptr(),
+ build_range_infos.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCopyAccelerationStructureKHR.html>
+ #[inline]
+ pub unsafe fn copy_acceleration_structure(
+ &self,
+ deferred_operation: vk::DeferredOperationKHR,
+ info: &vk::CopyAccelerationStructureInfoKHR,
+ ) -> VkResult<()> {
+ (self.fp.copy_acceleration_structure_khr)(self.handle, deferred_operation, info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCopyAccelerationStructureToMemoryKHR.html>
+ #[inline]
+ pub unsafe fn copy_acceleration_structure_to_memory(
+ &self,
+ deferred_operation: vk::DeferredOperationKHR,
+ info: &vk::CopyAccelerationStructureToMemoryInfoKHR,
+ ) -> VkResult<()> {
+ (self.fp.copy_acceleration_structure_to_memory_khr)(self.handle, deferred_operation, info)
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCopyMemoryToAccelerationStructureKHR.html>
+ #[inline]
+ pub unsafe fn copy_memory_to_acceleration_structure(
+ &self,
+ deferred_operation: vk::DeferredOperationKHR,
+ info: &vk::CopyMemoryToAccelerationStructureInfoKHR,
+ ) -> VkResult<()> {
+ (self.fp.copy_memory_to_acceleration_structure_khr)(self.handle, deferred_operation, info)
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkWriteAccelerationStructuresPropertiesKHR.html>
+ #[inline]
+ pub unsafe fn write_acceleration_structures_properties(
+ &self,
+ acceleration_structures: &[vk::AccelerationStructureKHR],
+ query_type: vk::QueryType,
+ data: &mut [u8],
+ stride: usize,
+ ) -> VkResult<()> {
+ (self.fp.write_acceleration_structures_properties_khr)(
+ self.handle,
+ acceleration_structures.len() as _,
+ acceleration_structures.as_ptr(),
+ query_type,
+ data.len(),
+ data.as_mut_ptr().cast(),
+ stride,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyAccelerationStructureKHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_acceleration_structure(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ info: &vk::CopyAccelerationStructureInfoKHR,
+ ) {
+ (self.fp.cmd_copy_acceleration_structure_khr)(command_buffer, info);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyAccelerationStructureToMemoryKHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_acceleration_structure_to_memory(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ info: &vk::CopyAccelerationStructureToMemoryInfoKHR,
+ ) {
+ (self.fp.cmd_copy_acceleration_structure_to_memory_khr)(command_buffer, info);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyMemoryToAccelerationStructureKHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_memory_to_acceleration_structure(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ info: &vk::CopyMemoryToAccelerationStructureInfoKHR,
+ ) {
+ (self.fp.cmd_copy_memory_to_acceleration_structure_khr)(command_buffer, info);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetAccelerationStructureHandleKHR.html>
+ #[inline]
+ pub unsafe fn get_acceleration_structure_device_address(
+ &self,
+ info: &vk::AccelerationStructureDeviceAddressInfoKHR,
+ ) -> vk::DeviceAddress {
+ (self.fp.get_acceleration_structure_device_address_khr)(self.handle, info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWriteAccelerationStructuresPropertiesKHR.html>
+ #[inline]
+ pub unsafe fn cmd_write_acceleration_structures_properties(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ structures: &[vk::AccelerationStructureKHR],
+ query_type: vk::QueryType,
+ query_pool: vk::QueryPool,
+ first_query: u32,
+ ) {
+ (self.fp.cmd_write_acceleration_structures_properties_khr)(
+ command_buffer,
+ structures.len() as _,
+ structures.as_ptr(),
+ query_type,
+ query_pool,
+ first_query,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceAccelerationStructureCompatibilityKHR.html>
+ #[inline]
+ pub unsafe fn get_device_acceleration_structure_compatibility(
+ &self,
+ version: &vk::AccelerationStructureVersionInfoKHR,
+ ) -> vk::AccelerationStructureCompatibilityKHR {
+ let mut compatibility = vk::AccelerationStructureCompatibilityKHR::default();
+
+ (self.fp.get_device_acceleration_structure_compatibility_khr)(
+ self.handle,
+ version,
+ &mut compatibility,
+ );
+
+ compatibility
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetAccelerationStructureBuildSizesKHR.html>
+ #[inline]
+ pub unsafe fn get_acceleration_structure_build_sizes(
+ &self,
+ build_type: vk::AccelerationStructureBuildTypeKHR,
+ build_info: &vk::AccelerationStructureBuildGeometryInfoKHR,
+ max_primitive_counts: &[u32],
+ ) -> vk::AccelerationStructureBuildSizesInfoKHR {
+ assert_eq!(max_primitive_counts.len(), build_info.geometry_count as _);
+
+ let mut size_info = vk::AccelerationStructureBuildSizesInfoKHR::default();
+
+ (self.fp.get_acceleration_structure_build_sizes_khr)(
+ self.handle,
+ build_type,
+ build_info,
+ max_primitive_counts.as_ptr(),
+ &mut size_info,
+ );
+
+ size_info
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrAccelerationStructureFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrAccelerationStructureFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/android_surface.rs b/third_party/rust/ash/src/extensions/khr/android_surface.rs
new file mode 100755
index 0000000000..071d367510
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/android_surface.rs
@@ -0,0 +1,54 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct AndroidSurface {
+ handle: vk::Instance,
+ fp: vk::KhrAndroidSurfaceFn,
+}
+
+impl AndroidSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrAndroidSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateAndroidSurfaceKHR.html>
+ #[inline]
+ pub unsafe fn create_android_surface(
+ &self,
+ create_info: &vk::AndroidSurfaceCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_android_surface_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrAndroidSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrAndroidSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/buffer_device_address.rs b/third_party/rust/ash/src/extensions/khr/buffer_device_address.rs
new file mode 100644
index 0000000000..65eae69681
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/buffer_device_address.rs
@@ -0,0 +1,62 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct BufferDeviceAddress {
+ handle: vk::Device,
+ fp: vk::KhrBufferDeviceAddressFn,
+}
+
+impl BufferDeviceAddress {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrBufferDeviceAddressFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferDeviceAddressKHR.html>
+ #[inline]
+ pub unsafe fn get_buffer_device_address(
+ &self,
+ info: &vk::BufferDeviceAddressInfoKHR,
+ ) -> vk::DeviceAddress {
+ (self.fp.get_buffer_device_address_khr)(self.handle, info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferOpaqueCaptureAddressKHR.html>
+ #[inline]
+ pub unsafe fn get_buffer_opaque_capture_address(
+ &self,
+ info: &vk::BufferDeviceAddressInfoKHR,
+ ) -> u64 {
+ (self.fp.get_buffer_opaque_capture_address_khr)(self.handle, info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceMemoryOpaqueCaptureAddressKHR.html>
+ #[inline]
+ pub unsafe fn get_device_memory_opaque_capture_address(
+ &self,
+ info: &vk::DeviceMemoryOpaqueCaptureAddressInfoKHR,
+ ) -> u64 {
+ (self.fp.get_device_memory_opaque_capture_address_khr)(self.handle, info)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrBufferDeviceAddressFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrBufferDeviceAddressFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/copy_commands2.rs b/third_party/rust/ash/src/extensions/khr/copy_commands2.rs
new file mode 100644
index 0000000000..b46593093a
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/copy_commands2.rs
@@ -0,0 +1,84 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_copy_commands2.html>
+#[derive(Clone)]
+pub struct CopyCommands2 {
+ fp: vk::KhrCopyCommands2Fn,
+}
+
+impl CopyCommands2 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::KhrCopyCommands2Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyBuffer2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_buffer2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_buffer_info: &vk::CopyBufferInfo2KHR,
+ ) {
+ (self.fp.cmd_copy_buffer2_khr)(command_buffer, copy_buffer_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyImage2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_image_info: &vk::CopyImageInfo2KHR,
+ ) {
+ (self.fp.cmd_copy_image2_khr)(command_buffer, copy_image_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyBufferToImage2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_buffer_to_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2KHR,
+ ) {
+ (self.fp.cmd_copy_buffer_to_image2_khr)(command_buffer, copy_buffer_to_image_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyImageToBuffer2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_copy_image_to_buffer2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2KHR,
+ ) {
+ (self.fp.cmd_copy_image_to_buffer2_khr)(command_buffer, copy_image_to_buffer_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBlitImage2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_blit_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ blit_image_info: &vk::BlitImageInfo2KHR,
+ ) {
+ (self.fp.cmd_blit_image2_khr)(command_buffer, blit_image_info)
+ }
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResolveImage2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_resolve_image2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ resolve_image_info: &vk::ResolveImageInfo2KHR,
+ ) {
+ (self.fp.cmd_resolve_image2_khr)(command_buffer, resolve_image_info)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrCopyCommands2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrCopyCommands2Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/create_render_pass2.rs b/third_party/rust/ash/src/extensions/khr/create_render_pass2.rs
new file mode 100644
index 0000000000..bf78bc5f54
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/create_render_pass2.rs
@@ -0,0 +1,90 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct CreateRenderPass2 {
+ handle: vk::Device,
+ fp: vk::KhrCreateRenderpass2Fn,
+}
+
+impl CreateRenderPass2 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrCreateRenderpass2Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateRenderPass2.html>
+ #[inline]
+ pub unsafe fn create_render_pass2(
+ &self,
+ create_info: &vk::RenderPassCreateInfo2,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::RenderPass> {
+ let mut renderpass = mem::zeroed();
+ (self.fp.create_render_pass2_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut renderpass,
+ )
+ .result_with_success(renderpass)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginRenderPass2.html>
+ #[inline]
+ pub unsafe fn cmd_begin_render_pass2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ render_pass_begin_info: &vk::RenderPassBeginInfo,
+ subpass_begin_info: &vk::SubpassBeginInfo,
+ ) {
+ (self.fp.cmd_begin_render_pass2_khr)(
+ command_buffer,
+ render_pass_begin_info,
+ subpass_begin_info,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdNextSubpass2.html>
+ #[inline]
+ pub unsafe fn cmd_next_subpass2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ subpass_begin_info: &vk::SubpassBeginInfo,
+ subpass_end_info: &vk::SubpassEndInfo,
+ ) {
+ (self.fp.cmd_next_subpass2_khr)(command_buffer, subpass_begin_info, subpass_end_info);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndRenderPass2.html>
+ #[inline]
+ pub unsafe fn cmd_end_render_pass2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ subpass_end_info: &vk::SubpassEndInfo,
+ ) {
+ (self.fp.cmd_end_render_pass2_khr)(command_buffer, subpass_end_info);
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrCreateRenderpass2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrCreateRenderpass2Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/deferred_host_operations.rs b/third_party/rust/ash/src/extensions/khr/deferred_host_operations.rs
new file mode 100644
index 0000000000..51aeaad8bb
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/deferred_host_operations.rs
@@ -0,0 +1,93 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DeferredHostOperations {
+ handle: vk::Device,
+ fp: vk::KhrDeferredHostOperationsFn,
+}
+
+impl DeferredHostOperations {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrDeferredHostOperationsFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDeferredOperationKHR.html>
+ #[inline]
+ pub unsafe fn create_deferred_operation(
+ &self,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DeferredOperationKHR> {
+ let mut operation = mem::zeroed();
+ (self.fp.create_deferred_operation_khr)(
+ self.handle,
+ allocation_callbacks.as_raw_ptr(),
+ &mut operation,
+ )
+ .result_with_success(operation)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDeferredOperationJoinKHR.html>
+ #[inline]
+ pub unsafe fn deferred_operation_join(
+ &self,
+ operation: vk::DeferredOperationKHR,
+ ) -> VkResult<()> {
+ (self.fp.deferred_operation_join_khr)(self.handle, operation).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyDeferredOperationKHR.html>
+ #[inline]
+ pub unsafe fn destroy_deferred_operation(
+ &self,
+ operation: vk::DeferredOperationKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_deferred_operation_khr)(
+ self.handle,
+ operation,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeferredOperationMaxConcurrencyKHR.html>
+ #[inline]
+ pub unsafe fn get_deferred_operation_max_concurrency(
+ &self,
+ operation: vk::DeferredOperationKHR,
+ ) -> u32 {
+ (self.fp.get_deferred_operation_max_concurrency_khr)(self.handle, operation)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeferredOperationResultKHR.html>
+ #[inline]
+ pub unsafe fn get_deferred_operation_result(
+ &self,
+ operation: vk::DeferredOperationKHR,
+ ) -> VkResult<()> {
+ (self.fp.get_deferred_operation_result_khr)(self.handle, operation).result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDeferredHostOperationsFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDeferredHostOperationsFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/device_group.rs b/third_party/rust/ash/src/extensions/khr/device_group.rs
new file mode 100644
index 0000000000..356db71e4d
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/device_group.rs
@@ -0,0 +1,169 @@
+#[cfg(doc)]
+use super::Swapchain;
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_device_group.html>
+#[derive(Clone)]
+pub struct DeviceGroup {
+ handle: vk::Device,
+ fp: vk::KhrDeviceGroupFn,
+}
+
+impl DeviceGroup {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrDeviceGroupFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupPeerMemoryFeaturesKHR.html>
+ #[inline]
+ pub unsafe fn get_device_group_peer_memory_features(
+ &self,
+ heap_index: u32,
+ local_device_index: u32,
+ remote_device_index: u32,
+ ) -> vk::PeerMemoryFeatureFlags {
+ let mut peer_memory_features = mem::zeroed();
+ (self.fp.get_device_group_peer_memory_features_khr)(
+ self.handle,
+ heap_index,
+ local_device_index,
+ remote_device_index,
+ &mut peer_memory_features,
+ );
+ peer_memory_features
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetDeviceMaskKHR.html>
+ #[inline]
+ pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
+ (self.fp.cmd_set_device_mask_khr)(command_buffer, device_mask)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDispatchBaseKHR.html>
+ #[inline]
+ pub unsafe fn cmd_dispatch_base(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ base_group: (u32, u32, u32),
+ group_count: (u32, u32, u32),
+ ) {
+ (self.fp.cmd_dispatch_base_khr)(
+ command_buffer,
+ base_group.0,
+ base_group.1,
+ base_group.2,
+ group_count.0,
+ group_count.1,
+ group_count.2,
+ )
+ }
+
+ /// Requires [`VK_KHR_surface`] to be enabled.
+ ///
+ /// Also available as [`Swapchain::get_device_group_present_capabilities()`] since [Vulkan 1.1].
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_surface`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html
+ #[inline]
+ pub unsafe fn get_device_group_present_capabilities(
+ &self,
+ device_group_present_capabilities: &mut vk::DeviceGroupPresentCapabilitiesKHR,
+ ) -> VkResult<()> {
+ (self.fp.get_device_group_present_capabilities_khr)(
+ self.handle,
+ device_group_present_capabilities,
+ )
+ .result()
+ }
+
+ /// Requires [`VK_KHR_surface`] to be enabled.
+ ///
+ /// Also available as [`Swapchain::get_device_group_surface_present_modes()`] since [Vulkan 1.1].
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_surface`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html
+ #[inline]
+ pub unsafe fn get_device_group_surface_present_modes(
+ &self,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<vk::DeviceGroupPresentModeFlagsKHR> {
+ let mut modes = mem::zeroed();
+ (self.fp.get_device_group_surface_present_modes_khr)(self.handle, surface, &mut modes)
+ .result_with_success(modes)
+ }
+
+ /// Requires [`VK_KHR_surface`] to be enabled.
+ ///
+ /// Also available as [`Swapchain::get_physical_device_present_rectangles()`] since [Vulkan 1.1].
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_surface`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html
+ #[inline]
+ pub unsafe fn get_physical_device_present_rectangles(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<Vec<vk::Rect2D>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_present_rectangles_khr)(
+ physical_device,
+ surface,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// On success, returns the next image's index and whether the swapchain is suboptimal for the surface.
+ ///
+ /// Requires [`VK_KHR_swapchain`] to be enabled.
+ ///
+ /// Also available as [`Swapchain::acquire_next_image2()`] since [Vulkan 1.1].
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAcquireNextImage2KHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_swapchain`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_swapchain.html
+ #[inline]
+ pub unsafe fn acquire_next_image2(
+ &self,
+ acquire_info: &vk::AcquireNextImageInfoKHR,
+ ) -> VkResult<(u32, bool)> {
+ let mut index = 0;
+ let err_code = (self.fp.acquire_next_image2_khr)(self.handle, acquire_info, &mut index);
+ match err_code {
+ vk::Result::SUCCESS => Ok((index, false)),
+ vk::Result::SUBOPTIMAL_KHR => Ok((index, true)),
+ _ => Err(err_code),
+ }
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDeviceGroupFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDeviceGroupFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/device_group_creation.rs b/third_party/rust/ash/src/extensions/khr/device_group_creation.rs
new file mode 100644
index 0000000000..9a21a714c9
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/device_group_creation.rs
@@ -0,0 +1,66 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_device_group_creation.html>
+#[derive(Clone)]
+pub struct DeviceGroupCreation {
+ handle: vk::Instance,
+ fp: vk::KhrDeviceGroupCreationFn,
+}
+
+impl DeviceGroupCreation {
+ pub fn new(entry: Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrDeviceGroupCreationFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// Retrieve the number of elements to pass to [`enumerate_physical_device_groups()`][Self::enumerate_physical_device_groups()]
+ #[inline]
+ pub unsafe fn enumerate_physical_device_groups_len(&self) -> VkResult<usize> {
+ let mut group_count = 0;
+ (self.fp.enumerate_physical_device_groups_khr)(
+ self.handle,
+ &mut group_count,
+ ptr::null_mut(),
+ )
+ .result_with_success(group_count as usize)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumeratePhysicalDeviceGroupsKHR.html>
+ ///
+ /// Call [`enumerate_physical_device_groups_len()`][Self::enumerate_physical_device_groups_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn enumerate_physical_device_groups(
+ &self,
+ out: &mut [vk::PhysicalDeviceGroupProperties],
+ ) -> VkResult<()> {
+ let mut count = out.len() as u32;
+ (self.fp.enumerate_physical_device_groups_khr)(self.handle, &mut count, out.as_mut_ptr())
+ .result()?;
+ assert_eq!(count as usize, out.len());
+ Ok(())
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDeviceGroupCreationFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDeviceGroupCreationFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/display.rs b/third_party/rust/ash/src/extensions/khr/display.rs
new file mode 100755
index 0000000000..b71c9c11e1
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/display.rs
@@ -0,0 +1,143 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Display {
+ handle: vk::Instance,
+ fp: vk::KhrDisplayFn,
+}
+
+impl Display {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrDisplayFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceDisplayPropertiesKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_display_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> VkResult<Vec<vk::DisplayPropertiesKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_display_properties_khr)(physical_device, count, data)
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceDisplayPlanePropertiesKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_display_plane_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> VkResult<Vec<vk::DisplayPlanePropertiesKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_display_plane_properties_khr)(physical_device, count, data)
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDisplayPlaneSupportedDisplaysKHR.html>
+ #[inline]
+ pub unsafe fn get_display_plane_supported_displays(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ plane_index: u32,
+ ) -> VkResult<Vec<vk::DisplayKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_display_plane_supported_displays_khr)(
+ physical_device,
+ plane_index,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDisplayModePropertiesKHR.html>
+ #[inline]
+ pub unsafe fn get_display_mode_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ display: vk::DisplayKHR,
+ ) -> VkResult<Vec<vk::DisplayModePropertiesKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_display_mode_properties_khr)(physical_device, display, count, data)
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDisplayModeKHR.html>
+ #[inline]
+ pub unsafe fn create_display_mode(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ display: vk::DisplayKHR,
+ create_info: &vk::DisplayModeCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::DisplayModeKHR> {
+ let mut display_mode = mem::MaybeUninit::zeroed();
+ (self.fp.create_display_mode_khr)(
+ physical_device,
+ display,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ display_mode.as_mut_ptr(),
+ )
+ .assume_init_on_success(display_mode)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDisplayPlaneCapabilitiesKHR.html>
+ #[inline]
+ pub unsafe fn get_display_plane_capabilities(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ mode: vk::DisplayModeKHR,
+ plane_index: u32,
+ ) -> VkResult<vk::DisplayPlaneCapabilitiesKHR> {
+ let mut display_plane_capabilities = mem::MaybeUninit::zeroed();
+ (self.fp.get_display_plane_capabilities_khr)(
+ physical_device,
+ mode,
+ plane_index,
+ display_plane_capabilities.as_mut_ptr(),
+ )
+ .assume_init_on_success(display_plane_capabilities)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDisplayPlaneSurfaceKHR.html>
+ #[inline]
+ pub unsafe fn create_display_plane_surface(
+ &self,
+ create_info: &vk::DisplaySurfaceCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::MaybeUninit::zeroed();
+ (self.fp.create_display_plane_surface_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ surface.as_mut_ptr(),
+ )
+ .assume_init_on_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDisplayFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDisplayFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/display_swapchain.rs b/third_party/rust/ash/src/extensions/khr/display_swapchain.rs
new file mode 100755
index 0000000000..e4a33b2bdb
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/display_swapchain.rs
@@ -0,0 +1,57 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DisplaySwapchain {
+ handle: vk::Device,
+ fp: vk::KhrDisplaySwapchainFn,
+}
+
+impl DisplaySwapchain {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrDisplaySwapchainFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateSharedSwapchainsKHR.html>
+ #[inline]
+ pub unsafe fn create_shared_swapchains(
+ &self,
+ create_infos: &[vk::SwapchainCreateInfoKHR],
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<Vec<vk::SwapchainKHR>> {
+ let mut swapchains = Vec::with_capacity(create_infos.len());
+ (self.fp.create_shared_swapchains_khr)(
+ self.handle,
+ create_infos.len() as u32,
+ create_infos.as_ptr(),
+ allocation_callbacks.as_raw_ptr(),
+ swapchains.as_mut_ptr(),
+ )
+ .result()?;
+ swapchains.set_len(create_infos.len());
+ Ok(swapchains)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDisplaySwapchainFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDisplaySwapchainFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/draw_indirect_count.rs b/third_party/rust/ash/src/extensions/khr/draw_indirect_count.rs
new file mode 100644
index 0000000000..f35ecbe9e3
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/draw_indirect_count.rs
@@ -0,0 +1,74 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DrawIndirectCount {
+ fp: vk::KhrDrawIndirectCountFn,
+}
+
+impl DrawIndirectCount {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::KhrDrawIndirectCountFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndexedIndirectCountKHR.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indexed_indirect_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ count_buffer: vk::Buffer,
+ count_buffer_offset: vk::DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+ ) {
+ (self.fp.cmd_draw_indexed_indirect_count_khr)(
+ command_buffer,
+ buffer,
+ offset,
+ count_buffer,
+ count_buffer_offset,
+ max_draw_count,
+ stride,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawIndirectCountKHR.html>
+ #[inline]
+ pub unsafe fn cmd_draw_indirect_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ count_buffer: vk::Buffer,
+ count_buffer_offset: vk::DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+ ) {
+ (self.fp.cmd_draw_indirect_count_khr)(
+ command_buffer,
+ buffer,
+ offset,
+ count_buffer,
+ count_buffer_offset,
+ max_draw_count,
+ stride,
+ );
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDrawIndirectCountFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDrawIndirectCountFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/dynamic_rendering.rs b/third_party/rust/ash/src/extensions/khr/dynamic_rendering.rs
new file mode 100644
index 0000000000..ea6149a816
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/dynamic_rendering.rs
@@ -0,0 +1,44 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct DynamicRendering {
+ fp: vk::KhrDynamicRenderingFn,
+}
+
+impl DynamicRendering {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::KhrDynamicRenderingFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBeginRenderingKHR.html>
+ #[inline]
+ pub unsafe fn cmd_begin_rendering(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ rendering_info: &vk::RenderingInfoKHR,
+ ) {
+ (self.fp.cmd_begin_rendering_khr)(command_buffer, rendering_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdEndRenderingKHR.html>
+ #[inline]
+ pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
+ (self.fp.cmd_end_rendering_khr)(command_buffer)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrDynamicRenderingFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrDynamicRenderingFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/external_fence_fd.rs b/third_party/rust/ash/src/extensions/khr/external_fence_fd.rs
new file mode 100644
index 0000000000..6a3ec559d4
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/external_fence_fd.rs
@@ -0,0 +1,49 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct ExternalFenceFd {
+ handle: vk::Device,
+ fp: vk::KhrExternalFenceFdFn,
+}
+
+impl ExternalFenceFd {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrExternalFenceFdFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkImportFenceFdKHR.html>
+ #[inline]
+ pub unsafe fn import_fence_fd(&self, import_info: &vk::ImportFenceFdInfoKHR) -> VkResult<()> {
+ (self.fp.import_fence_fd_khr)(self.handle, import_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetFenceFdKHR.html>
+ #[inline]
+ pub unsafe fn get_fence_fd(&self, get_info: &vk::FenceGetFdInfoKHR) -> VkResult<i32> {
+ let mut fd = -1;
+ (self.fp.get_fence_fd_khr)(self.handle, get_info, &mut fd).result_with_success(fd)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrExternalFenceFdFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrExternalFenceFdFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/external_fence_win32.rs b/third_party/rust/ash/src/extensions/khr/external_fence_win32.rs
new file mode 100644
index 0000000000..627bd1cca9
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/external_fence_win32.rs
@@ -0,0 +1,58 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_external_fence_win32.html>
+#[derive(Clone)]
+pub struct ExternalFenceWin32 {
+ handle: vk::Device,
+ fp: vk::KhrExternalFenceWin32Fn,
+}
+
+impl ExternalFenceWin32 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrExternalFenceWin32Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkImportFenceWin32HandleKHR.html>
+ #[inline]
+ pub unsafe fn import_fence_win32_handle(
+ &self,
+ import_info: &vk::ImportFenceWin32HandleInfoKHR,
+ ) -> VkResult<()> {
+ (self.fp.import_fence_win32_handle_khr)(self.handle, import_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetFenceWin32HandleKHR.html>
+ #[inline]
+ pub unsafe fn get_fence_win32_handle(
+ &self,
+ get_info: &vk::FenceGetWin32HandleInfoKHR,
+ ) -> VkResult<vk::HANDLE> {
+ let mut handle = ptr::null_mut();
+ (self.fp.get_fence_win32_handle_khr)(self.handle, get_info, &mut handle)
+ .result_with_success(handle)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrExternalFenceWin32Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrExternalFenceWin32Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/external_memory_fd.rs b/third_party/rust/ash/src/extensions/khr/external_memory_fd.rs
new file mode 100644
index 0000000000..902349802f
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/external_memory_fd.rs
@@ -0,0 +1,60 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct ExternalMemoryFd {
+ handle: vk::Device,
+ fp: vk::KhrExternalMemoryFdFn,
+}
+
+impl ExternalMemoryFd {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrExternalMemoryFdFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetMemoryFdKHR.html>
+ #[inline]
+ pub unsafe fn get_memory_fd(&self, create_info: &vk::MemoryGetFdInfoKHR) -> VkResult<i32> {
+ let mut fd = -1;
+ (self.fp.get_memory_fd_khr)(self.handle, create_info, &mut fd).result_with_success(fd)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetMemoryFdPropertiesKHR.html>
+ #[inline]
+ pub unsafe fn get_memory_fd_properties(
+ &self,
+ handle_type: vk::ExternalMemoryHandleTypeFlags,
+ fd: i32,
+ ) -> VkResult<vk::MemoryFdPropertiesKHR> {
+ let mut memory_fd_properties = Default::default();
+ (self.fp.get_memory_fd_properties_khr)(
+ self.handle,
+ handle_type,
+ fd,
+ &mut memory_fd_properties,
+ )
+ .result_with_success(memory_fd_properties)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrExternalMemoryFdFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrExternalMemoryFdFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/external_memory_win32.rs b/third_party/rust/ash/src/extensions/khr/external_memory_win32.rs
new file mode 100644
index 0000000000..8e43a26d48
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/external_memory_win32.rs
@@ -0,0 +1,66 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_external_memory_win32.html>
+#[derive(Clone)]
+pub struct ExternalMemoryWin32 {
+ handle: vk::Device,
+ fp: vk::KhrExternalMemoryWin32Fn,
+}
+
+impl ExternalMemoryWin32 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrExternalMemoryWin32Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetMemoryWin32HandleKHR.html>
+ #[inline]
+ pub unsafe fn get_memory_win32_handle(
+ &self,
+ create_info: &vk::MemoryGetWin32HandleInfoKHR,
+ ) -> VkResult<vk::HANDLE> {
+ let mut handle = ptr::null_mut();
+ (self.fp.get_memory_win32_handle_khr)(self.handle, create_info, &mut handle)
+ .result_with_success(handle)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetMemoryWin32HandlePropertiesKHR.html>
+ #[inline]
+ pub unsafe fn get_memory_win32_handle_properties(
+ &self,
+ handle_type: vk::ExternalMemoryHandleTypeFlags,
+ handle: vk::HANDLE,
+ ) -> VkResult<vk::MemoryWin32HandlePropertiesKHR> {
+ let mut memory_win32_handle_properties = Default::default();
+ (self.fp.get_memory_win32_handle_properties_khr)(
+ self.handle,
+ handle_type,
+ handle,
+ &mut memory_win32_handle_properties,
+ )
+ .result_with_success(memory_win32_handle_properties)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrExternalMemoryWin32Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrExternalMemoryWin32Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/external_semaphore_fd.rs b/third_party/rust/ash/src/extensions/khr/external_semaphore_fd.rs
new file mode 100644
index 0000000000..eadebef40d
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/external_semaphore_fd.rs
@@ -0,0 +1,52 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct ExternalSemaphoreFd {
+ handle: vk::Device,
+ fp: vk::KhrExternalSemaphoreFdFn,
+}
+
+impl ExternalSemaphoreFd {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrExternalSemaphoreFdFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkImportSemaphoreFdKHR.html>
+ #[inline]
+ pub unsafe fn import_semaphore_fd(
+ &self,
+ import_info: &vk::ImportSemaphoreFdInfoKHR,
+ ) -> VkResult<()> {
+ (self.fp.import_semaphore_fd_khr)(self.handle, import_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetSemaphoreFdKHR.html>
+ #[inline]
+ pub unsafe fn get_semaphore_fd(&self, get_info: &vk::SemaphoreGetFdInfoKHR) -> VkResult<i32> {
+ let mut fd = -1;
+ (self.fp.get_semaphore_fd_khr)(self.handle, get_info, &mut fd).result_with_success(fd)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrExternalSemaphoreFdFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrExternalSemaphoreFdFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/external_semaphore_win32.rs b/third_party/rust/ash/src/extensions/khr/external_semaphore_win32.rs
new file mode 100644
index 0000000000..102da8c1ee
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/external_semaphore_win32.rs
@@ -0,0 +1,58 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_external_semaphore_win32.html>
+#[derive(Clone)]
+pub struct ExternalSemaphoreWin32 {
+ handle: vk::Device,
+ fp: vk::KhrExternalSemaphoreWin32Fn,
+}
+
+impl ExternalSemaphoreWin32 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrExternalSemaphoreWin32Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkImportSemaphoreWin32HandleKHR.html>
+ #[inline]
+ pub unsafe fn import_semaphore_win32_handle(
+ &self,
+ import_info: &vk::ImportSemaphoreWin32HandleInfoKHR,
+ ) -> VkResult<()> {
+ (self.fp.import_semaphore_win32_handle_khr)(self.handle, import_info).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetSemaphoreWin32HandleKHR.html>
+ #[inline]
+ pub unsafe fn get_semaphore_win32_handle(
+ &self,
+ get_info: &vk::SemaphoreGetWin32HandleInfoKHR,
+ ) -> VkResult<vk::HANDLE> {
+ let mut handle = ptr::null_mut();
+ (self.fp.get_semaphore_win32_handle_khr)(self.handle, get_info, &mut handle)
+ .result_with_success(handle)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrExternalSemaphoreWin32Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrExternalSemaphoreWin32Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/get_memory_requirements2.rs b/third_party/rust/ash/src/extensions/khr/get_memory_requirements2.rs
new file mode 100644
index 0000000000..13e5a1bf61
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/get_memory_requirements2.rs
@@ -0,0 +1,92 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+#[derive(Clone)]
+pub struct GetMemoryRequirements2 {
+ handle: vk::Device,
+ fp: vk::KhrGetMemoryRequirements2Fn,
+}
+
+impl GetMemoryRequirements2 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrGetMemoryRequirements2Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetBufferMemoryRequirements2KHR.html>
+ #[inline]
+ pub unsafe fn get_buffer_memory_requirements2(
+ &self,
+ info: &vk::BufferMemoryRequirementsInfo2KHR,
+ memory_requirements: &mut vk::MemoryRequirements2KHR,
+ ) {
+ (self.fp.get_buffer_memory_requirements2_khr)(self.handle, info, memory_requirements);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageMemoryRequirements2KHR.html>
+ #[inline]
+ pub unsafe fn get_image_memory_requirements2(
+ &self,
+ info: &vk::ImageMemoryRequirementsInfo2KHR,
+ memory_requirements: &mut vk::MemoryRequirements2KHR,
+ ) {
+ (self.fp.get_image_memory_requirements2_khr)(self.handle, info, memory_requirements);
+ }
+
+ /// Retrieve the number of elements to pass to [`get_image_sparse_memory_requirements2()`][Self::get_image_sparse_memory_requirements2()]
+ #[inline]
+ pub unsafe fn get_image_sparse_memory_requirements2_len(
+ &self,
+ info: &vk::ImageSparseMemoryRequirementsInfo2KHR,
+ ) -> usize {
+ let mut count = 0;
+ (self.fp.get_image_sparse_memory_requirements2_khr)(
+ self.handle,
+ info,
+ &mut count,
+ ptr::null_mut(),
+ );
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetImageSparseMemoryRequirements2KHR.html>
+ ///
+ /// Call [`get_image_sparse_memory_requirements2_len()`][Self::get_image_sparse_memory_requirements2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_image_sparse_memory_requirements2(
+ &self,
+ info: &vk::ImageSparseMemoryRequirementsInfo2KHR,
+ out: &mut [vk::SparseImageMemoryRequirements2KHR],
+ ) {
+ let mut count = out.len() as u32;
+ (self.fp.get_image_sparse_memory_requirements2_khr)(
+ self.handle,
+ info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrGetMemoryRequirements2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrGetMemoryRequirements2Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/get_physical_device_properties2.rs b/third_party/rust/ash/src/extensions/khr/get_physical_device_properties2.rs
new file mode 100644
index 0000000000..9e610b4831
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/get_physical_device_properties2.rs
@@ -0,0 +1,167 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::ptr;
+
+#[derive(Clone)]
+pub struct GetPhysicalDeviceProperties2 {
+ fp: vk::KhrGetPhysicalDeviceProperties2Fn,
+}
+
+impl GetPhysicalDeviceProperties2 {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let fp = vk::KhrGetPhysicalDeviceProperties2Fn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceFeatures2KHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_features2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ features: &mut vk::PhysicalDeviceFeatures2KHR,
+ ) {
+ (self.fp.get_physical_device_features2_khr)(physical_device, features);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceFormatProperties2KHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_format_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format: vk::Format,
+ format_properties: &mut vk::FormatProperties2KHR,
+ ) {
+ (self.fp.get_physical_device_format_properties2_khr)(
+ physical_device,
+ format,
+ format_properties,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceImageFormatProperties2KHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_image_format_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ image_format_info: &vk::PhysicalDeviceImageFormatInfo2KHR,
+ image_format_properties: &mut vk::ImageFormatProperties2KHR,
+ ) -> VkResult<()> {
+ (self.fp.get_physical_device_image_format_properties2_khr)(
+ physical_device,
+ image_format_info,
+ image_format_properties,
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceMemoryProperties2KHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_memory_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ memory_properties: &mut vk::PhysicalDeviceMemoryProperties2KHR,
+ ) {
+ (self.fp.get_physical_device_memory_properties2_khr)(physical_device, memory_properties);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceProperties2KHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ properties: &mut vk::PhysicalDeviceProperties2KHR,
+ ) {
+ (self.fp.get_physical_device_properties2_khr)(physical_device, properties);
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_queue_family_properties2()`][Self::get_physical_device_queue_family_properties2()]
+ #[inline]
+ pub unsafe fn get_physical_device_queue_family_properties2_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> usize {
+ let mut count = 0;
+ (self.fp.get_physical_device_queue_family_properties2_khr)(
+ physical_device,
+ &mut count,
+ ptr::null_mut(),
+ );
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceQueueFamilyProperties2KHR.html>
+ ///
+ /// Call [`get_physical_device_queue_family_properties2_len()`][Self::get_physical_device_queue_family_properties2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_queue_family_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ out: &mut [vk::QueueFamilyProperties2KHR],
+ ) {
+ let mut count = out.len() as u32;
+ (self.fp.get_physical_device_queue_family_properties2_khr)(
+ physical_device,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_sparse_image_format_properties2()`][Self::get_physical_device_sparse_image_format_properties2()]
+ #[inline]
+ pub unsafe fn get_physical_device_sparse_image_format_properties2_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format_info: &vk::PhysicalDeviceSparseImageFormatInfo2KHR,
+ ) -> usize {
+ let mut count = 0;
+ (self
+ .fp
+ .get_physical_device_sparse_image_format_properties2_khr)(
+ physical_device,
+ format_info,
+ &mut count,
+ ptr::null_mut(),
+ );
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2KHR.html>
+ ///
+ /// Call [`get_physical_device_sparse_image_format_properties2_len()`][Self::get_physical_device_sparse_image_format_properties2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_sparse_image_format_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format_info: &vk::PhysicalDeviceSparseImageFormatInfo2KHR,
+ out: &mut [vk::SparseImageFormatProperties2KHR],
+ ) {
+ let mut count = out.len() as u32;
+ (self
+ .fp
+ .get_physical_device_sparse_image_format_properties2_khr)(
+ physical_device,
+ format_info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrGetPhysicalDeviceProperties2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrGetPhysicalDeviceProperties2Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/get_surface_capabilities2.rs b/third_party/rust/ash/src/extensions/khr/get_surface_capabilities2.rs
new file mode 100644
index 0000000000..0208b7b4a5
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/get_surface_capabilities2.rs
@@ -0,0 +1,84 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct GetSurfaceCapabilities2 {
+ fp: vk::KhrGetSurfaceCapabilities2Fn,
+}
+
+impl GetSurfaceCapabilities2 {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let fp = vk::KhrGetSurfaceCapabilities2Fn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfaceCapabilities2KHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_surface_capabilities2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR,
+ ) -> VkResult<vk::SurfaceCapabilities2KHR> {
+ let mut surface_capabilities = Default::default();
+ (self.fp.get_physical_device_surface_capabilities2_khr)(
+ physical_device,
+ surface_info,
+ &mut surface_capabilities,
+ )
+ .result_with_success(surface_capabilities)
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_surface_formats2()`][Self::get_physical_device_surface_formats2()]
+ #[inline]
+ pub unsafe fn get_physical_device_surface_formats2_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR,
+ ) -> VkResult<usize> {
+ let mut count = 0;
+ let err_code = (self.fp.get_physical_device_surface_formats2_khr)(
+ physical_device,
+ surface_info,
+ &mut count,
+ std::ptr::null_mut(),
+ );
+ err_code.result_with_success(count as usize)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfaceFormats2KHR.html>
+ ///
+ /// Call [`get_physical_device_surface_formats2_len()`][Self::get_physical_device_surface_formats2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_surface_formats2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface_info: &vk::PhysicalDeviceSurfaceInfo2KHR,
+ out: &mut [vk::SurfaceFormat2KHR],
+ ) -> VkResult<()> {
+ let mut count = out.len() as u32;
+ let err_code = (self.fp.get_physical_device_surface_formats2_khr)(
+ physical_device,
+ surface_info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ err_code.result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrGetSurfaceCapabilities2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrGetSurfaceCapabilities2Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/maintenance1.rs b/third_party/rust/ash/src/extensions/khr/maintenance1.rs
new file mode 100644
index 0000000000..1226d31cd5
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/maintenance1.rs
@@ -0,0 +1,45 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Maintenance1 {
+ handle: vk::Device,
+ fp: vk::KhrMaintenance1Fn,
+}
+
+impl Maintenance1 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrMaintenance1Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkTrimCommandPoolKHR.html>
+ #[inline]
+ pub unsafe fn trim_command_pool(
+ &self,
+ command_pool: vk::CommandPool,
+ flags: vk::CommandPoolTrimFlagsKHR,
+ ) {
+ (self.fp.trim_command_pool_khr)(self.handle, command_pool, flags);
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrMaintenance1Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrMaintenance1Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/maintenance3.rs b/third_party/rust/ash/src/extensions/khr/maintenance3.rs
new file mode 100644
index 0000000000..3b22959332
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/maintenance3.rs
@@ -0,0 +1,45 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Maintenance3 {
+ handle: vk::Device,
+ fp: vk::KhrMaintenance3Fn,
+}
+
+impl Maintenance3 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrMaintenance3Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDescriptorSetLayoutSupportKHR.html>
+ #[inline]
+ pub unsafe fn get_descriptor_set_layout_support(
+ &self,
+ create_info: &vk::DescriptorSetLayoutCreateInfo,
+ out: &mut vk::DescriptorSetLayoutSupportKHR,
+ ) {
+ (self.fp.get_descriptor_set_layout_support_khr)(self.handle, create_info, out);
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrMaintenance3Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrMaintenance3Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/maintenance4.rs b/third_party/rust/ash/src/extensions/khr/maintenance4.rs
new file mode 100644
index 0000000000..003e1a9cf2
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/maintenance4.rs
@@ -0,0 +1,91 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Maintenance4 {
+ handle: vk::Device,
+ fp: vk::KhrMaintenance4Fn,
+}
+
+impl Maintenance4 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrMaintenance4Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceBufferMemoryRequirementsKHR.html>
+ #[inline]
+ pub unsafe fn get_device_buffer_memory_requirements(
+ &self,
+ create_info: &vk::DeviceBufferMemoryRequirementsKHR,
+ out: &mut vk::MemoryRequirements2,
+ ) {
+ (self.fp.get_device_buffer_memory_requirements_khr)(self.handle, create_info, out)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceImageMemoryRequirementsKHR.html>
+ #[inline]
+ pub unsafe fn get_device_image_memory_requirements(
+ &self,
+ create_info: &vk::DeviceImageMemoryRequirementsKHR,
+ out: &mut vk::MemoryRequirements2,
+ ) {
+ (self.fp.get_device_image_memory_requirements_khr)(self.handle, create_info, out)
+ }
+
+ /// Retrieve the number of elements to pass to [`get_device_image_sparse_memory_requirements()`][Self::get_device_image_sparse_memory_requirements()]
+ #[inline]
+ pub unsafe fn get_device_image_sparse_memory_requirements_len(
+ &self,
+ create_info: &vk::DeviceImageMemoryRequirementsKHR,
+ ) -> usize {
+ let mut count = 0;
+ (self.fp.get_device_image_sparse_memory_requirements_khr)(
+ self.handle,
+ create_info,
+ &mut count,
+ std::ptr::null_mut(),
+ );
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceImageSparseMemoryRequirementsKHR.html>
+ ///
+ /// Call [`get_device_image_sparse_memory_requirements_len()`][Self::get_device_image_sparse_memory_requirements_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_device_image_sparse_memory_requirements(
+ &self,
+ create_info: &vk::DeviceImageMemoryRequirementsKHR,
+ out: &mut [vk::SparseImageMemoryRequirements2],
+ ) {
+ let mut count = out.len() as u32;
+ (self.fp.get_device_image_sparse_memory_requirements_khr)(
+ self.handle,
+ create_info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrMaintenance4Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrMaintenance4Fn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/mod.rs b/third_party/rust/ash/src/extensions/khr/mod.rs
new file mode 100644
index 0000000000..ea809b43da
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/mod.rs
@@ -0,0 +1,75 @@
+pub use self::acceleration_structure::AccelerationStructure;
+pub use self::android_surface::AndroidSurface;
+pub use self::buffer_device_address::BufferDeviceAddress;
+pub use self::copy_commands2::CopyCommands2;
+pub use self::create_render_pass2::CreateRenderPass2;
+pub use self::deferred_host_operations::DeferredHostOperations;
+pub use self::device_group::DeviceGroup;
+pub use self::device_group_creation::DeviceGroupCreation;
+pub use self::display::Display;
+pub use self::display_swapchain::DisplaySwapchain;
+pub use self::draw_indirect_count::DrawIndirectCount;
+pub use self::dynamic_rendering::DynamicRendering;
+pub use self::external_fence_fd::ExternalFenceFd;
+pub use self::external_fence_win32::ExternalFenceWin32;
+pub use self::external_memory_fd::ExternalMemoryFd;
+pub use self::external_memory_win32::ExternalMemoryWin32;
+pub use self::external_semaphore_fd::ExternalSemaphoreFd;
+pub use self::external_semaphore_win32::ExternalSemaphoreWin32;
+pub use self::get_memory_requirements2::GetMemoryRequirements2;
+pub use self::get_physical_device_properties2::GetPhysicalDeviceProperties2;
+pub use self::get_surface_capabilities2::GetSurfaceCapabilities2;
+pub use self::maintenance1::Maintenance1;
+pub use self::maintenance3::Maintenance3;
+pub use self::maintenance4::Maintenance4;
+pub use self::pipeline_executable_properties::PipelineExecutableProperties;
+pub use self::present_wait::PresentWait;
+pub use self::push_descriptor::PushDescriptor;
+pub use self::ray_tracing_maintenance1::RayTracingMaintenance1;
+pub use self::ray_tracing_pipeline::RayTracingPipeline;
+pub use self::surface::Surface;
+pub use self::swapchain::Swapchain;
+pub use self::synchronization2::Synchronization2;
+pub use self::timeline_semaphore::TimelineSemaphore;
+pub use self::wayland_surface::WaylandSurface;
+pub use self::win32_surface::Win32Surface;
+pub use self::xcb_surface::XcbSurface;
+pub use self::xlib_surface::XlibSurface;
+
+mod acceleration_structure;
+mod android_surface;
+mod buffer_device_address;
+mod copy_commands2;
+mod create_render_pass2;
+mod deferred_host_operations;
+mod device_group;
+mod device_group_creation;
+mod display;
+mod display_swapchain;
+mod draw_indirect_count;
+mod dynamic_rendering;
+mod external_fence_fd;
+mod external_fence_win32;
+mod external_memory_fd;
+mod external_memory_win32;
+mod external_semaphore_fd;
+mod external_semaphore_win32;
+mod get_memory_requirements2;
+mod get_physical_device_properties2;
+mod get_surface_capabilities2;
+mod maintenance1;
+mod maintenance3;
+mod maintenance4;
+mod pipeline_executable_properties;
+mod present_wait;
+mod push_descriptor;
+mod ray_tracing_maintenance1;
+mod ray_tracing_pipeline;
+mod surface;
+mod swapchain;
+mod synchronization2;
+mod timeline_semaphore;
+mod wayland_surface;
+mod win32_surface;
+mod xcb_surface;
+mod xlib_surface;
diff --git a/third_party/rust/ash/src/extensions/khr/pipeline_executable_properties.rs b/third_party/rust/ash/src/extensions/khr/pipeline_executable_properties.rs
new file mode 100644
index 0000000000..68e43b4a81
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/pipeline_executable_properties.rs
@@ -0,0 +1,84 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct PipelineExecutableProperties {
+ handle: vk::Device,
+ fp: vk::KhrPipelineExecutablePropertiesFn,
+}
+
+impl PipelineExecutableProperties {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrPipelineExecutablePropertiesFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPipelineExecutableInternalRepresentationsKHR.html>
+ #[inline]
+ pub unsafe fn get_pipeline_executable_internal_representations(
+ &self,
+ executable_info: &vk::PipelineExecutableInfoKHR,
+ ) -> VkResult<Vec<vk::PipelineExecutableInternalRepresentationKHR>> {
+ read_into_defaulted_vector(|count, data| {
+ (self.fp.get_pipeline_executable_internal_representations_khr)(
+ self.handle,
+ executable_info,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPipelineExecutablePropertiesKHR.html>
+ #[inline]
+ pub unsafe fn get_pipeline_executable_properties(
+ &self,
+ pipeline_info: &vk::PipelineInfoKHR,
+ ) -> VkResult<Vec<vk::PipelineExecutablePropertiesKHR>> {
+ read_into_defaulted_vector(|count, data| {
+ (self.fp.get_pipeline_executable_properties_khr)(
+ self.handle,
+ pipeline_info,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPipelineExecutableStatisticsKHR.html>
+ #[inline]
+ pub unsafe fn get_pipeline_executable_statistics(
+ &self,
+ executable_info: &vk::PipelineExecutableInfoKHR,
+ ) -> VkResult<Vec<vk::PipelineExecutableStatisticKHR>> {
+ read_into_defaulted_vector(|count, data| {
+ (self.fp.get_pipeline_executable_statistics_khr)(
+ self.handle,
+ executable_info,
+ count,
+ data,
+ )
+ })
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrPipelineExecutablePropertiesFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrPipelineExecutablePropertiesFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/present_wait.rs b/third_party/rust/ash/src/extensions/khr/present_wait.rs
new file mode 100644
index 0000000000..4d8effe48d
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/present_wait.rs
@@ -0,0 +1,47 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct PresentWait {
+ handle: vk::Device,
+ fp: vk::KhrPresentWaitFn,
+}
+
+impl PresentWait {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrPresentWaitFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkWaitForPresentKHR.html>
+ #[inline]
+ pub unsafe fn wait_for_present(
+ &self,
+ swapchain: vk::SwapchainKHR,
+ present_id: u64,
+ timeout: u64,
+ ) -> VkResult<()> {
+ (self.fp.wait_for_present_khr)(self.handle, swapchain, present_id, timeout).result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrPresentWaitFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrPresentWaitFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/push_descriptor.rs b/third_party/rust/ash/src/extensions/khr/push_descriptor.rs
new file mode 100644
index 0000000000..22d1572883
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/push_descriptor.rs
@@ -0,0 +1,68 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::c_void;
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct PushDescriptor {
+ fp: vk::KhrPushDescriptorFn,
+}
+
+impl PushDescriptor {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::KhrPushDescriptorFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdPushDescriptorSetKHR.html>
+ #[inline]
+ pub unsafe fn cmd_push_descriptor_set(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_bind_point: vk::PipelineBindPoint,
+ layout: vk::PipelineLayout,
+ set: u32,
+ descriptor_writes: &[vk::WriteDescriptorSet],
+ ) {
+ (self.fp.cmd_push_descriptor_set_khr)(
+ command_buffer,
+ pipeline_bind_point,
+ layout,
+ set,
+ descriptor_writes.len() as u32,
+ descriptor_writes.as_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdPushDescriptorSetWithTemplateKHR.html>
+ #[inline]
+ pub unsafe fn cmd_push_descriptor_set_with_template(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ descriptor_update_template: vk::DescriptorUpdateTemplate,
+ layout: vk::PipelineLayout,
+ set: u32,
+ p_data: *const c_void,
+ ) {
+ (self.fp.cmd_push_descriptor_set_with_template_khr)(
+ command_buffer,
+ descriptor_update_template,
+ layout,
+ set,
+ p_data,
+ );
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrPushDescriptorFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrPushDescriptorFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/ray_tracing_maintenance1.rs b/third_party/rust/ash/src/extensions/khr/ray_tracing_maintenance1.rs
new file mode 100644
index 0000000000..a5159ad395
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/ray_tracing_maintenance1.rs
@@ -0,0 +1,42 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_ray_tracing_maintenance1.html>
+#[derive(Clone)]
+pub struct RayTracingMaintenance1 {
+ fp: vk::KhrRayTracingMaintenance1Fn,
+}
+
+impl RayTracingMaintenance1 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrRayTracingMaintenance1Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdTraceRaysIndirect2KHR.html>
+ ///
+ /// `indirect_device_address` is a buffer device address which is a pointer to a [`vk::TraceRaysIndirectCommand2KHR`] structure containing the trace ray parameters.
+ #[inline]
+ pub unsafe fn cmd_trace_rays_indirect2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ indirect_device_address: vk::DeviceAddress,
+ ) {
+ (self.fp.cmd_trace_rays_indirect2_khr)(command_buffer, indirect_device_address);
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrRayTracingMaintenance1Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrRayTracingMaintenance1Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/ray_tracing_pipeline.rs b/third_party/rust/ash/src/extensions/khr/ray_tracing_pipeline.rs
new file mode 100644
index 0000000000..af733430d1
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/ray_tracing_pipeline.rs
@@ -0,0 +1,194 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct RayTracingPipeline {
+ handle: vk::Device,
+ fp: vk::KhrRayTracingPipelineFn,
+}
+
+impl RayTracingPipeline {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrRayTracingPipelineFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ #[inline]
+ pub unsafe fn get_properties(
+ instance: &Instance,
+ pdevice: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceRayTracingPipelinePropertiesKHR {
+ let mut props_rt = vk::PhysicalDeviceRayTracingPipelinePropertiesKHR::default();
+ {
+ let mut props = vk::PhysicalDeviceProperties2::builder().push_next(&mut props_rt);
+ instance.get_physical_device_properties2(pdevice, &mut props);
+ }
+ props_rt
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdTraceRaysKHR.html>
+ #[inline]
+ pub unsafe fn cmd_trace_rays(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ raygen_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR,
+ miss_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR,
+ hit_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR,
+ callable_shader_binding_tables: &vk::StridedDeviceAddressRegionKHR,
+ width: u32,
+ height: u32,
+ depth: u32,
+ ) {
+ (self.fp.cmd_trace_rays_khr)(
+ command_buffer,
+ raygen_shader_binding_tables,
+ miss_shader_binding_tables,
+ hit_shader_binding_tables,
+ callable_shader_binding_tables,
+ width,
+ height,
+ depth,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateRayTracingPipelinesKHR.html>
+ #[inline]
+ pub unsafe fn create_ray_tracing_pipelines(
+ &self,
+ deferred_operation: vk::DeferredOperationKHR,
+ pipeline_cache: vk::PipelineCache,
+ create_info: &[vk::RayTracingPipelineCreateInfoKHR],
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<Vec<vk::Pipeline>> {
+ let mut pipelines = vec![mem::zeroed(); create_info.len()];
+ (self.fp.create_ray_tracing_pipelines_khr)(
+ self.handle,
+ deferred_operation,
+ pipeline_cache,
+ create_info.len() as u32,
+ create_info.as_ptr(),
+ allocation_callbacks.as_raw_ptr(),
+ pipelines.as_mut_ptr(),
+ )
+ .result_with_success(pipelines)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetRayTracingShaderGroupHandlesKHR.html>
+ #[inline]
+ pub unsafe fn get_ray_tracing_shader_group_handles(
+ &self,
+ pipeline: vk::Pipeline,
+ first_group: u32,
+ group_count: u32,
+ data_size: usize,
+ ) -> VkResult<Vec<u8>> {
+ let mut data = Vec::<u8>::with_capacity(data_size);
+ (self.fp.get_ray_tracing_shader_group_handles_khr)(
+ self.handle,
+ pipeline,
+ first_group,
+ group_count,
+ data_size,
+ data.as_mut_ptr().cast(),
+ )
+ .result()?;
+ data.set_len(data_size);
+ Ok(data)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetRayTracingCaptureReplayShaderGroupHandlesKHR.html>
+ #[inline]
+ pub unsafe fn get_ray_tracing_capture_replay_shader_group_handles(
+ &self,
+ pipeline: vk::Pipeline,
+ first_group: u32,
+ group_count: u32,
+ data_size: usize,
+ ) -> VkResult<Vec<u8>> {
+ let mut data = Vec::<u8>::with_capacity(data_size);
+ (self
+ .fp
+ .get_ray_tracing_capture_replay_shader_group_handles_khr)(
+ self.handle,
+ pipeline,
+ first_group,
+ group_count,
+ data_size,
+ data.as_mut_ptr().cast(),
+ )
+ .result()?;
+ data.set_len(data_size);
+ Ok(data)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdTraceRaysIndirectKHR.html>
+ ///
+ /// `indirect_device_address` is a buffer device address which is a pointer to a [`vk::TraceRaysIndirectCommandKHR`] structure containing the trace ray parameters.
+ #[inline]
+ pub unsafe fn cmd_trace_rays_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ raygen_shader_binding_table: &[vk::StridedDeviceAddressRegionKHR],
+ miss_shader_binding_table: &[vk::StridedDeviceAddressRegionKHR],
+ hit_shader_binding_table: &[vk::StridedDeviceAddressRegionKHR],
+ callable_shader_binding_table: &[vk::StridedDeviceAddressRegionKHR],
+ indirect_device_address: vk::DeviceAddress,
+ ) {
+ (self.fp.cmd_trace_rays_indirect_khr)(
+ command_buffer,
+ raygen_shader_binding_table.as_ptr(),
+ miss_shader_binding_table.as_ptr(),
+ hit_shader_binding_table.as_ptr(),
+ callable_shader_binding_table.as_ptr(),
+ indirect_device_address,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetRayTracingShaderGroupStackSizeKHR.html>
+ #[inline]
+ pub unsafe fn get_ray_tracing_shader_group_stack_size(
+ &self,
+ pipeline: vk::Pipeline,
+ group: u32,
+ group_shader: vk::ShaderGroupShaderKHR,
+ ) -> vk::DeviceSize {
+ (self.fp.get_ray_tracing_shader_group_stack_size_khr)(
+ self.handle,
+ pipeline,
+ group,
+ group_shader,
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetRayTracingPipelineStackSizeKHR.html>
+ #[inline]
+ pub unsafe fn cmd_set_ray_tracing_pipeline_stack_size(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ pipeline_stack_size: u32,
+ ) {
+ (self.fp.cmd_set_ray_tracing_pipeline_stack_size_khr)(command_buffer, pipeline_stack_size);
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrRayTracingPipelineFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrRayTracingPipelineFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/surface.rs b/third_party/rust/ash/src/extensions/khr/surface.rs
new file mode 100755
index 0000000000..11508bdcc9
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/surface.rs
@@ -0,0 +1,110 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Surface {
+ handle: vk::Instance,
+ fp: vk::KhrSurfaceFn,
+}
+
+impl Surface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfaceSupportKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_surface_support(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ queue_family_index: u32,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<bool> {
+ let mut b = 0;
+ (self.fp.get_physical_device_surface_support_khr)(
+ physical_device,
+ queue_family_index,
+ surface,
+ &mut b,
+ )
+ .result_with_success(b > 0)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfacePresentModesKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_surface_present_modes(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<Vec<vk::PresentModeKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_surface_present_modes_khr)(
+ physical_device,
+ surface,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfaceCapabilitiesKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_surface_capabilities(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<vk::SurfaceCapabilitiesKHR> {
+ let mut surface_capabilities = mem::zeroed();
+ (self.fp.get_physical_device_surface_capabilities_khr)(
+ physical_device,
+ surface,
+ &mut surface_capabilities,
+ )
+ .result_with_success(surface_capabilities)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSurfaceFormatsKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_surface_formats(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<Vec<vk::SurfaceFormatKHR>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_surface_formats_khr)(physical_device, surface, count, data)
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroySurfaceKHR.html>
+ #[inline]
+ pub unsafe fn destroy_surface(
+ &self,
+ surface: vk::SurfaceKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_surface_khr)(self.handle, surface, allocation_callbacks.as_raw_ptr());
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/swapchain.rs b/third_party/rust/ash/src/extensions/khr/swapchain.rs
new file mode 100755
index 0000000000..8f57729de7
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/swapchain.rs
@@ -0,0 +1,211 @@
+#[cfg(doc)]
+use super::DeviceGroup;
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Swapchain {
+ handle: vk::Device,
+ fp: vk::KhrSwapchainFn,
+}
+
+impl Swapchain {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrSwapchainFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateSwapchainKHR.html>
+ #[inline]
+ pub unsafe fn create_swapchain(
+ &self,
+ create_info: &vk::SwapchainCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SwapchainKHR> {
+ let mut swapchain = mem::zeroed();
+ (self.fp.create_swapchain_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut swapchain,
+ )
+ .result_with_success(swapchain)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroySwapchainKHR.html>
+ #[inline]
+ pub unsafe fn destroy_swapchain(
+ &self,
+ swapchain: vk::SwapchainKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_swapchain_khr)(self.handle, swapchain, allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetSwapchainImagesKHR.html>
+ #[inline]
+ pub unsafe fn get_swapchain_images(
+ &self,
+ swapchain: vk::SwapchainKHR,
+ ) -> VkResult<Vec<vk::Image>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_swapchain_images_khr)(self.handle, swapchain, count, data)
+ })
+ }
+
+ /// On success, returns the next image's index and whether the swapchain is suboptimal for the surface.
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAcquireNextImageKHR.html>
+ #[inline]
+ pub unsafe fn acquire_next_image(
+ &self,
+ swapchain: vk::SwapchainKHR,
+ timeout: u64,
+ semaphore: vk::Semaphore,
+ fence: vk::Fence,
+ ) -> VkResult<(u32, bool)> {
+ let mut index = 0;
+ let err_code = (self.fp.acquire_next_image_khr)(
+ self.handle,
+ swapchain,
+ timeout,
+ semaphore,
+ fence,
+ &mut index,
+ );
+ match err_code {
+ vk::Result::SUCCESS => Ok((index, false)),
+ vk::Result::SUBOPTIMAL_KHR => Ok((index, true)),
+ _ => Err(err_code),
+ }
+ }
+
+ /// On success, returns whether the swapchain is suboptimal for the surface.
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueuePresentKHR.html>
+ #[inline]
+ pub unsafe fn queue_present(
+ &self,
+ queue: vk::Queue,
+ present_info: &vk::PresentInfoKHR,
+ ) -> VkResult<bool> {
+ let err_code = (self.fp.queue_present_khr)(queue, present_info);
+ match err_code {
+ vk::Result::SUCCESS => Ok(false),
+ vk::Result::SUBOPTIMAL_KHR => Ok(true),
+ _ => Err(err_code),
+ }
+ }
+
+ /// Only available since [Vulkan 1.1].
+ ///
+ /// Also available as [`DeviceGroup::get_device_group_present_capabilities()`]
+ /// when [`VK_KHR_surface`] is enabled.
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupPresentCapabilitiesKHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_surface`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html
+ #[inline]
+ pub unsafe fn get_device_group_present_capabilities(
+ &self,
+ device_group_present_capabilities: &mut vk::DeviceGroupPresentCapabilitiesKHR,
+ ) -> VkResult<()> {
+ (self.fp.get_device_group_present_capabilities_khr)(
+ self.handle,
+ device_group_present_capabilities,
+ )
+ .result()
+ }
+
+ /// Only available since [Vulkan 1.1].
+ ///
+ /// Also available as [`DeviceGroup::get_device_group_surface_present_modes()`]
+ /// when [`VK_KHR_surface`] is enabled.
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceGroupSurfacePresentModesKHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_surface`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html
+ #[inline]
+ pub unsafe fn get_device_group_surface_present_modes(
+ &self,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<vk::DeviceGroupPresentModeFlagsKHR> {
+ let mut modes = mem::zeroed();
+ (self.fp.get_device_group_surface_present_modes_khr)(self.handle, surface, &mut modes)
+ .result_with_success(modes)
+ }
+
+ /// Only available since [Vulkan 1.1].
+ ///
+ /// Also available as [`DeviceGroup::get_physical_device_present_rectangles()`]
+ /// when [`VK_KHR_surface`] is enabled.
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDevicePresentRectanglesKHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_surface`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_surface.html
+ #[inline]
+ pub unsafe fn get_physical_device_present_rectangles(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ surface: vk::SurfaceKHR,
+ ) -> VkResult<Vec<vk::Rect2D>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.fp.get_physical_device_present_rectangles_khr)(
+ physical_device,
+ surface,
+ count,
+ data,
+ )
+ })
+ }
+
+ /// On success, returns the next image's index and whether the swapchain is suboptimal for the surface.
+ ///
+ /// Only available since [Vulkan 1.1].
+ ///
+ /// Also available as [`DeviceGroup::acquire_next_image2()`]
+ /// when [`VK_KHR_swapchain`] is enabled.
+ ///
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkAcquireNextImage2KHR.html>
+ ///
+ /// [Vulkan 1.1]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_1_1.html
+ /// [`VK_KHR_swapchain`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_KHR_swapchain.html
+ #[inline]
+ pub unsafe fn acquire_next_image2(
+ &self,
+ acquire_info: &vk::AcquireNextImageInfoKHR,
+ ) -> VkResult<(u32, bool)> {
+ let mut index = 0;
+ let err_code = (self.fp.acquire_next_image2_khr)(self.handle, acquire_info, &mut index);
+ match err_code {
+ vk::Result::SUCCESS => Ok((index, false)),
+ vk::Result::SUBOPTIMAL_KHR => Ok((index, true)),
+ _ => Err(err_code),
+ }
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrSwapchainFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrSwapchainFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/synchronization2.rs b/third_party/rust/ash/src/extensions/khr/synchronization2.rs
new file mode 100644
index 0000000000..d8baefd602
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/synchronization2.rs
@@ -0,0 +1,101 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Synchronization2 {
+ fp: vk::KhrSynchronization2Fn,
+}
+
+impl Synchronization2 {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::KhrSynchronization2Fn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdPipelineBarrier2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_pipeline_barrier2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ dependency_info: &vk::DependencyInfoKHR,
+ ) {
+ (self.fp.cmd_pipeline_barrier2_khr)(command_buffer, dependency_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdResetEvent2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_reset_event2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ event: vk::Event,
+ stage_mask: vk::PipelineStageFlags2KHR,
+ ) {
+ (self.fp.cmd_reset_event2_khr)(command_buffer, event, stage_mask)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetEvent2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_set_event2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ event: vk::Event,
+ dependency_info: &vk::DependencyInfoKHR,
+ ) {
+ (self.fp.cmd_set_event2_khr)(command_buffer, event, dependency_info)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWaitEvents2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_wait_events2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ events: &[vk::Event],
+ dependency_infos: &[vk::DependencyInfoKHR],
+ ) {
+ assert_eq!(events.len(), dependency_infos.len());
+ (self.fp.cmd_wait_events2_khr)(
+ command_buffer,
+ events.len() as u32,
+ events.as_ptr(),
+ dependency_infos.as_ptr(),
+ )
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWriteTimestamp2KHR.html>
+ #[inline]
+ pub unsafe fn cmd_write_timestamp2(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ stage: vk::PipelineStageFlags2KHR,
+ query_pool: vk::QueryPool,
+ query: u32,
+ ) {
+ (self.fp.cmd_write_timestamp2_khr)(command_buffer, stage, query_pool, query)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkQueueSubmit2KHR.html>
+ #[inline]
+ pub unsafe fn queue_submit2(
+ &self,
+ queue: vk::Queue,
+ submits: &[vk::SubmitInfo2KHR],
+ fence: vk::Fence,
+ ) -> VkResult<()> {
+ (self.fp.queue_submit2_khr)(queue, submits.len() as u32, submits.as_ptr(), fence).result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrSynchronization2Fn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrSynchronization2Fn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/timeline_semaphore.rs b/third_party/rust/ash/src/extensions/khr/timeline_semaphore.rs
new file mode 100644
index 0000000000..de9a492506
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/timeline_semaphore.rs
@@ -0,0 +1,60 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct TimelineSemaphore {
+ handle: vk::Device,
+ fp: vk::KhrTimelineSemaphoreFn,
+}
+
+impl TimelineSemaphore {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::KhrTimelineSemaphoreFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetSemaphoreCounterValue.html>
+ #[inline]
+ pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
+ let mut value = 0;
+ (self.fp.get_semaphore_counter_value_khr)(self.handle, semaphore, &mut value)
+ .result_with_success(value)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkWaitSemaphores.html>
+ #[inline]
+ pub unsafe fn wait_semaphores(
+ &self,
+ wait_info: &vk::SemaphoreWaitInfo,
+ timeout: u64,
+ ) -> VkResult<()> {
+ (self.fp.wait_semaphores_khr)(self.handle, wait_info, timeout).result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkSignalSemaphore.html>
+ #[inline]
+ pub unsafe fn signal_semaphore(&self, signal_info: &vk::SemaphoreSignalInfo) -> VkResult<()> {
+ (self.fp.signal_semaphore_khr)(self.handle, signal_info).result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrTimelineSemaphoreFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrTimelineSemaphoreFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/wayland_surface.rs b/third_party/rust/ash/src/extensions/khr/wayland_surface.rs
new file mode 100755
index 0000000000..d51fcb9914
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/wayland_surface.rs
@@ -0,0 +1,71 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct WaylandSurface {
+ handle: vk::Instance,
+ fp: vk::KhrWaylandSurfaceFn,
+}
+
+impl WaylandSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrWaylandSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateWaylandSurfaceKHR.html>
+ #[inline]
+ pub unsafe fn create_wayland_surface(
+ &self,
+ create_info: &vk::WaylandSurfaceCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_wayland_surface_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceWaylandPresentationSupportKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_wayland_presentation_support(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ queue_family_index: u32,
+ wl_display: &mut vk::wl_display,
+ ) -> bool {
+ let b = (self.fp.get_physical_device_wayland_presentation_support_khr)(
+ physical_device,
+ queue_family_index,
+ wl_display,
+ );
+
+ b > 0
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrWaylandSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrWaylandSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/win32_surface.rs b/third_party/rust/ash/src/extensions/khr/win32_surface.rs
new file mode 100755
index 0000000000..be56a02e2e
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/win32_surface.rs
@@ -0,0 +1,69 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct Win32Surface {
+ handle: vk::Instance,
+ fp: vk::KhrWin32SurfaceFn,
+}
+
+impl Win32Surface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrWin32SurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateWin32SurfaceKHR.html>
+ #[inline]
+ pub unsafe fn create_win32_surface(
+ &self,
+ create_info: &vk::Win32SurfaceCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_win32_surface_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceWin32PresentationSupportKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_win32_presentation_support(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ queue_family_index: u32,
+ ) -> bool {
+ let b = (self.fp.get_physical_device_win32_presentation_support_khr)(
+ physical_device,
+ queue_family_index,
+ );
+
+ b > 0
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrWin32SurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrWin32SurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/xcb_surface.rs b/third_party/rust/ash/src/extensions/khr/xcb_surface.rs
new file mode 100755
index 0000000000..967376e7c6
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/xcb_surface.rs
@@ -0,0 +1,73 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct XcbSurface {
+ handle: vk::Instance,
+ fp: vk::KhrXcbSurfaceFn,
+}
+
+impl XcbSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrXcbSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateXcbSurfaceKHR.html>
+ #[inline]
+ pub unsafe fn create_xcb_surface(
+ &self,
+ create_info: &vk::XcbSurfaceCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_xcb_surface_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceXcbPresentationSupportKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_xcb_presentation_support(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ queue_family_index: u32,
+ connection: &mut vk::xcb_connection_t,
+ visual_id: vk::xcb_visualid_t,
+ ) -> bool {
+ let b = (self.fp.get_physical_device_xcb_presentation_support_khr)(
+ physical_device,
+ queue_family_index,
+ connection,
+ visual_id,
+ );
+
+ b > 0
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrXcbSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrXcbSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/khr/xlib_surface.rs b/third_party/rust/ash/src/extensions/khr/xlib_surface.rs
new file mode 100755
index 0000000000..59ad416e47
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/khr/xlib_surface.rs
@@ -0,0 +1,73 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct XlibSurface {
+ handle: vk::Instance,
+ fp: vk::KhrXlibSurfaceFn,
+}
+
+impl XlibSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::KhrXlibSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateXlibSurfaceKHR.html>
+ #[inline]
+ pub unsafe fn create_xlib_surface(
+ &self,
+ create_info: &vk::XlibSurfaceCreateInfoKHR,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_xlib_surface_khr)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceXlibPresentationSupportKHR.html>
+ #[inline]
+ pub unsafe fn get_physical_device_xlib_presentation_support(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ queue_family_index: u32,
+ display: &mut vk::Display,
+ visual_id: vk::VisualID,
+ ) -> bool {
+ let b = (self.fp.get_physical_device_xlib_presentation_support_khr)(
+ physical_device,
+ queue_family_index,
+ display,
+ visual_id,
+ );
+
+ b > 0
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::KhrXlibSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::KhrXlibSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/mod.rs b/third_party/rust/ash/src/extensions/mod.rs
new file mode 100644
index 0000000000..fefba9b688
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/mod.rs
@@ -0,0 +1,6 @@
+pub mod experimental;
+pub mod ext;
+pub mod khr;
+pub mod mvk;
+pub mod nn;
+pub mod nv;
diff --git a/third_party/rust/ash/src/extensions/mvk/ios_surface.rs b/third_party/rust/ash/src/extensions/mvk/ios_surface.rs
new file mode 100755
index 0000000000..8ae83e98d2
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/mvk/ios_surface.rs
@@ -0,0 +1,54 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct IOSSurface {
+ handle: vk::Instance,
+ fp: vk::MvkIosSurfaceFn,
+}
+
+impl IOSSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::MvkIosSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateIOSSurfaceMVK.html>
+ #[inline]
+ pub unsafe fn create_ios_surface(
+ &self,
+ create_info: &vk::IOSSurfaceCreateInfoMVK,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_ios_surface_mvk)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::MvkIosSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::MvkIosSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/mvk/macos_surface.rs b/third_party/rust/ash/src/extensions/mvk/macos_surface.rs
new file mode 100755
index 0000000000..878829e452
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/mvk/macos_surface.rs
@@ -0,0 +1,54 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct MacOSSurface {
+ handle: vk::Instance,
+ fp: vk::MvkMacosSurfaceFn,
+}
+
+impl MacOSSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::MvkMacosSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateMacOSSurfaceMVK.html>
+ #[inline]
+ pub unsafe fn create_mac_os_surface(
+ &self,
+ create_info: &vk::MacOSSurfaceCreateInfoMVK,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_mac_os_surface_mvk)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::MvkMacosSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::MvkMacosSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/mvk/mod.rs b/third_party/rust/ash/src/extensions/mvk/mod.rs
new file mode 100644
index 0000000000..e475d53563
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/mvk/mod.rs
@@ -0,0 +1,5 @@
+pub use self::ios_surface::IOSSurface;
+pub use self::macos_surface::MacOSSurface;
+
+mod ios_surface;
+mod macos_surface;
diff --git a/third_party/rust/ash/src/extensions/nn/mod.rs b/third_party/rust/ash/src/extensions/nn/mod.rs
new file mode 100644
index 0000000000..e429050a6c
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nn/mod.rs
@@ -0,0 +1,3 @@
+pub use self::vi_surface::ViSurface;
+
+mod vi_surface;
diff --git a/third_party/rust/ash/src/extensions/nn/vi_surface.rs b/third_party/rust/ash/src/extensions/nn/vi_surface.rs
new file mode 100644
index 0000000000..0aa188ac27
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nn/vi_surface.rs
@@ -0,0 +1,54 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct ViSurface {
+ handle: vk::Instance,
+ fp: vk::NnViSurfaceFn,
+}
+
+impl ViSurface {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let handle = instance.handle();
+ let fp = vk::NnViSurfaceFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateViSurfaceNN.html>
+ #[inline]
+ pub unsafe fn create_vi_surface(
+ &self,
+ create_info: &vk::ViSurfaceCreateInfoNN,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::SurfaceKHR> {
+ let mut surface = mem::zeroed();
+ (self.fp.create_vi_surface_nn)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut surface,
+ )
+ .result_with_success(surface)
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::NnViSurfaceFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::NnViSurfaceFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn instance(&self) -> vk::Instance {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/nv/coverage_reduction_mode.rs b/third_party/rust/ash/src/extensions/nv/coverage_reduction_mode.rs
new file mode 100644
index 0000000000..b4304772e3
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nv/coverage_reduction_mode.rs
@@ -0,0 +1,70 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::{Entry, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_NV_coverage_reduction_mode.html>
+#[derive(Clone)]
+pub struct CoverageReductionMode {
+ fp: vk::NvCoverageReductionModeFn,
+}
+
+impl CoverageReductionMode {
+ pub fn new(entry: &Entry, instance: &Instance) -> Self {
+ let fp = vk::NvCoverageReductionModeFn::load(|name| unsafe {
+ mem::transmute(entry.get_instance_proc_addr(instance.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_supported_framebuffer_mixed_samples_combinations()`][Self::get_physical_device_supported_framebuffer_mixed_samples_combinations()]
+ #[inline]
+ pub unsafe fn get_physical_device_supported_framebuffer_mixed_samples_combinations_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> VkResult<usize> {
+ let mut count = 0;
+ (self
+ .fp
+ .get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)(
+ physical_device,
+ &mut count,
+ std::ptr::null_mut(),
+ )
+ .result_with_success(count as usize)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV.html>
+ ///
+ /// Call [`get_physical_device_supported_framebuffer_mixed_samples_combinations_len()`][Self::get_physical_device_supported_framebuffer_mixed_samples_combinations_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_supported_framebuffer_mixed_samples_combinations(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ out: &mut [vk::FramebufferMixedSamplesCombinationNV],
+ ) -> VkResult<()> {
+ let mut count = out.len() as u32;
+ (self
+ .fp
+ .get_physical_device_supported_framebuffer_mixed_samples_combinations_nv)(
+ physical_device,
+ &mut count,
+ out.as_mut_ptr(),
+ )
+ .result()?;
+ assert_eq!(count as usize, out.len());
+ Ok(())
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::NvCoverageReductionModeFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::NvCoverageReductionModeFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/nv/device_diagnostic_checkpoints.rs b/third_party/rust/ash/src/extensions/nv/device_diagnostic_checkpoints.rs
new file mode 100644
index 0000000000..ce43d046e6
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nv/device_diagnostic_checkpoints.rs
@@ -0,0 +1,63 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+use std::os::raw::c_void;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_NV_device_diagnostic_checkpoints.html>
+#[derive(Clone)]
+pub struct DeviceDiagnosticCheckpoints {
+ fp: vk::NvDeviceDiagnosticCheckpointsFn,
+}
+
+impl DeviceDiagnosticCheckpoints {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::NvDeviceDiagnosticCheckpointsFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdSetCheckpointNV.html>
+ #[inline]
+ pub unsafe fn cmd_set_checkpoint(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ p_checkpoint_marker: *const c_void,
+ ) {
+ (self.fp.cmd_set_checkpoint_nv)(command_buffer, p_checkpoint_marker);
+ }
+
+ /// Retrieve the number of elements to pass to [`get_queue_checkpoint_data()`][Self::get_queue_checkpoint_data()]
+ #[inline]
+ pub unsafe fn get_queue_checkpoint_data_len(&self, queue: vk::Queue) -> usize {
+ let mut count = 0;
+ (self.fp.get_queue_checkpoint_data_nv)(queue, &mut count, std::ptr::null_mut());
+ count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetQueueCheckpointDataNV.html>
+ ///
+ /// Call [`get_queue_checkpoint_data_len()`][Self::get_queue_checkpoint_data_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_queue_checkpoint_data(
+ &self,
+ queue: vk::Queue,
+ out: &mut [vk::CheckpointDataNV],
+ ) {
+ let mut count = out.len() as u32;
+ (self.fp.get_queue_checkpoint_data_nv)(queue, &mut count, out.as_mut_ptr());
+ assert_eq!(count as usize, out.len());
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::NvDeviceDiagnosticCheckpointsFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::NvDeviceDiagnosticCheckpointsFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/nv/mesh_shader.rs b/third_party/rust/ash/src/extensions/nv/mesh_shader.rs
new file mode 100755
index 0000000000..14ef8d44ac
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nv/mesh_shader.rs
@@ -0,0 +1,81 @@
+use crate::vk;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct MeshShader {
+ fp: vk::NvMeshShaderFn,
+}
+
+impl MeshShader {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let fp = vk::NvMeshShaderFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(device.handle(), name.as_ptr()))
+ });
+ Self { fp }
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawMeshTasksNV.html>
+ #[inline]
+ pub unsafe fn cmd_draw_mesh_tasks(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ task_count: u32,
+ first_task: u32,
+ ) {
+ (self.fp.cmd_draw_mesh_tasks_nv)(command_buffer, task_count, first_task);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawMeshTasksIndirectNV.html>
+ #[inline]
+ pub unsafe fn cmd_draw_mesh_tasks_indirect(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ draw_count: u32,
+ stride: u32,
+ ) {
+ (self.fp.cmd_draw_mesh_tasks_indirect_nv)(
+ command_buffer,
+ buffer,
+ offset,
+ draw_count,
+ stride,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdDrawMeshTasksIndirectCountNV.html>
+ #[inline]
+ pub unsafe fn cmd_draw_mesh_tasks_indirect_count(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ buffer: vk::Buffer,
+ offset: vk::DeviceSize,
+ count_buffer: vk::Buffer,
+ count_buffer_offset: vk::DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+ ) {
+ (self.fp.cmd_draw_mesh_tasks_indirect_count_nv)(
+ command_buffer,
+ buffer,
+ offset,
+ count_buffer,
+ count_buffer_offset,
+ max_draw_count,
+ stride,
+ );
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::NvMeshShaderFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::NvMeshShaderFn {
+ &self.fp
+ }
+}
diff --git a/third_party/rust/ash/src/extensions/nv/mod.rs b/third_party/rust/ash/src/extensions/nv/mod.rs
new file mode 100644
index 0000000000..bfde37a55c
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nv/mod.rs
@@ -0,0 +1,9 @@
+pub use self::coverage_reduction_mode::CoverageReductionMode;
+pub use self::device_diagnostic_checkpoints::DeviceDiagnosticCheckpoints;
+pub use self::mesh_shader::MeshShader;
+pub use self::ray_tracing::RayTracing;
+
+mod coverage_reduction_mode;
+mod device_diagnostic_checkpoints;
+mod mesh_shader;
+mod ray_tracing;
diff --git a/third_party/rust/ash/src/extensions/nv/ray_tracing.rs b/third_party/rust/ash/src/extensions/nv/ray_tracing.rs
new file mode 100755
index 0000000000..d97ec85dfe
--- /dev/null
+++ b/third_party/rust/ash/src/extensions/nv/ray_tracing.rs
@@ -0,0 +1,271 @@
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use crate::{Device, Instance};
+use std::ffi::CStr;
+use std::mem;
+
+#[derive(Clone)]
+pub struct RayTracing {
+ handle: vk::Device,
+ fp: vk::NvRayTracingFn,
+}
+
+impl RayTracing {
+ pub fn new(instance: &Instance, device: &Device) -> Self {
+ let handle = device.handle();
+ let fp = vk::NvRayTracingFn::load(|name| unsafe {
+ mem::transmute(instance.get_device_proc_addr(handle, name.as_ptr()))
+ });
+ Self { handle, fp }
+ }
+
+ #[inline]
+ pub unsafe fn get_properties(
+ instance: &Instance,
+ pdevice: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceRayTracingPropertiesNV {
+ let mut props_rt = vk::PhysicalDeviceRayTracingPropertiesNV::default();
+ {
+ let mut props = vk::PhysicalDeviceProperties2::builder().push_next(&mut props_rt);
+ instance.get_physical_device_properties2(pdevice, &mut props);
+ }
+ props_rt
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateAccelerationStructureNV.html>
+ #[inline]
+ pub unsafe fn create_acceleration_structure(
+ &self,
+ create_info: &vk::AccelerationStructureCreateInfoNV,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<vk::AccelerationStructureNV> {
+ let mut accel_struct = mem::zeroed();
+ (self.fp.create_acceleration_structure_nv)(
+ self.handle,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut accel_struct,
+ )
+ .result_with_success(accel_struct)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyAccelerationStructureNV.html>
+ #[inline]
+ pub unsafe fn destroy_acceleration_structure(
+ &self,
+ accel_struct: vk::AccelerationStructureNV,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) {
+ (self.fp.destroy_acceleration_structure_nv)(
+ self.handle,
+ accel_struct,
+ allocation_callbacks.as_raw_ptr(),
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetAccelerationStructureMemoryRequirementsNV.html>
+ #[inline]
+ pub unsafe fn get_acceleration_structure_memory_requirements(
+ &self,
+ info: &vk::AccelerationStructureMemoryRequirementsInfoNV,
+ ) -> vk::MemoryRequirements2KHR {
+ let mut requirements = mem::zeroed();
+ (self.fp.get_acceleration_structure_memory_requirements_nv)(
+ self.handle,
+ info,
+ &mut requirements,
+ );
+ requirements
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkBindAccelerationStructureMemoryNV.html>
+ #[inline]
+ pub unsafe fn bind_acceleration_structure_memory(
+ &self,
+ bind_info: &[vk::BindAccelerationStructureMemoryInfoNV],
+ ) -> VkResult<()> {
+ (self.fp.bind_acceleration_structure_memory_nv)(
+ self.handle,
+ bind_info.len() as u32,
+ bind_info.as_ptr(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdBuildAccelerationStructureNV.html>
+ #[inline]
+ pub unsafe fn cmd_build_acceleration_structure(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ info: &vk::AccelerationStructureInfoNV,
+ instance_data: vk::Buffer,
+ instance_offset: vk::DeviceSize,
+ update: bool,
+ dst: vk::AccelerationStructureNV,
+ src: vk::AccelerationStructureNV,
+ scratch: vk::Buffer,
+ scratch_offset: vk::DeviceSize,
+ ) {
+ (self.fp.cmd_build_acceleration_structure_nv)(
+ command_buffer,
+ info,
+ instance_data,
+ instance_offset,
+ if update { vk::TRUE } else { vk::FALSE },
+ dst,
+ src,
+ scratch,
+ scratch_offset,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdCopyAccelerationStructureNV.html>
+ #[inline]
+ pub unsafe fn cmd_copy_acceleration_structure(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ dst: vk::AccelerationStructureNV,
+ src: vk::AccelerationStructureNV,
+ mode: vk::CopyAccelerationStructureModeNV,
+ ) {
+ (self.fp.cmd_copy_acceleration_structure_nv)(command_buffer, dst, src, mode);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdTraceRaysNV.html>
+ #[inline]
+ pub unsafe fn cmd_trace_rays(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ raygen_shader_binding_table_buffer: vk::Buffer,
+ raygen_shader_binding_offset: vk::DeviceSize,
+ miss_shader_binding_table_buffer: vk::Buffer,
+ miss_shader_binding_offset: vk::DeviceSize,
+ miss_shader_binding_stride: vk::DeviceSize,
+ hit_shader_binding_table_buffer: vk::Buffer,
+ hit_shader_binding_offset: vk::DeviceSize,
+ hit_shader_binding_stride: vk::DeviceSize,
+ callable_shader_binding_table_buffer: vk::Buffer,
+ callable_shader_binding_offset: vk::DeviceSize,
+ callable_shader_binding_stride: vk::DeviceSize,
+ width: u32,
+ height: u32,
+ depth: u32,
+ ) {
+ (self.fp.cmd_trace_rays_nv)(
+ command_buffer,
+ raygen_shader_binding_table_buffer,
+ raygen_shader_binding_offset,
+ miss_shader_binding_table_buffer,
+ miss_shader_binding_offset,
+ miss_shader_binding_stride,
+ hit_shader_binding_table_buffer,
+ hit_shader_binding_offset,
+ hit_shader_binding_stride,
+ callable_shader_binding_table_buffer,
+ callable_shader_binding_offset,
+ callable_shader_binding_stride,
+ width,
+ height,
+ depth,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateRayTracingPipelinesNV.html>
+ #[inline]
+ pub unsafe fn create_ray_tracing_pipelines(
+ &self,
+ pipeline_cache: vk::PipelineCache,
+ create_info: &[vk::RayTracingPipelineCreateInfoNV],
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<Vec<vk::Pipeline>> {
+ let mut pipelines = vec![mem::zeroed(); create_info.len()];
+ (self.fp.create_ray_tracing_pipelines_nv)(
+ self.handle,
+ pipeline_cache,
+ create_info.len() as u32,
+ create_info.as_ptr(),
+ allocation_callbacks.as_raw_ptr(),
+ pipelines.as_mut_ptr(),
+ )
+ .result_with_success(pipelines)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetRayTracingShaderGroupHandlesNV.html>
+ #[inline]
+ pub unsafe fn get_ray_tracing_shader_group_handles(
+ &self,
+ pipeline: vk::Pipeline,
+ first_group: u32,
+ group_count: u32,
+ data: &mut [u8],
+ ) -> VkResult<()> {
+ (self.fp.get_ray_tracing_shader_group_handles_nv)(
+ self.handle,
+ pipeline,
+ first_group,
+ group_count,
+ data.len(),
+ data.as_mut_ptr().cast(),
+ )
+ .result()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetAccelerationStructureHandleNV.html>
+ #[inline]
+ pub unsafe fn get_acceleration_structure_handle(
+ &self,
+ accel_struct: vk::AccelerationStructureNV,
+ ) -> VkResult<u64> {
+ let mut handle: u64 = 0;
+ let handle_ptr: *mut u64 = &mut handle;
+ (self.fp.get_acceleration_structure_handle_nv)(
+ self.handle,
+ accel_struct,
+ std::mem::size_of::<u64>(),
+ handle_ptr.cast(),
+ )
+ .result_with_success(handle)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCmdWriteAccelerationStructuresPropertiesNV.html>
+ #[inline]
+ pub unsafe fn cmd_write_acceleration_structures_properties(
+ &self,
+ command_buffer: vk::CommandBuffer,
+ structures: &[vk::AccelerationStructureNV],
+ query_type: vk::QueryType,
+ query_pool: vk::QueryPool,
+ first_query: u32,
+ ) {
+ (self.fp.cmd_write_acceleration_structures_properties_nv)(
+ command_buffer,
+ structures.len() as u32,
+ structures.as_ptr(),
+ query_type,
+ query_pool,
+ first_query,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCompileDeferredNV.html>
+ #[inline]
+ pub unsafe fn compile_deferred(&self, pipeline: vk::Pipeline, shader: u32) -> VkResult<()> {
+ (self.fp.compile_deferred_nv)(self.handle, pipeline, shader).result()
+ }
+
+ #[inline]
+ pub const fn name() -> &'static CStr {
+ vk::NvRayTracingFn::name()
+ }
+
+ #[inline]
+ pub fn fp(&self) -> &vk::NvRayTracingFn {
+ &self.fp
+ }
+
+ #[inline]
+ pub fn device(&self) -> vk::Device {
+ self.handle
+ }
+}
diff --git a/third_party/rust/ash/src/instance.rs b/third_party/rust/ash/src/instance.rs
new file mode 100644
index 0000000000..8e3bd1a8b9
--- /dev/null
+++ b/third_party/rust/ash/src/instance.rs
@@ -0,0 +1,535 @@
+use crate::device::Device;
+use crate::prelude::*;
+use crate::vk;
+use crate::RawPtr;
+use std::mem;
+use std::os::raw::c_char;
+use std::ptr;
+
+/// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInstance.html>
+#[derive(Clone)]
+pub struct Instance {
+ pub(crate) handle: vk::Instance,
+
+ pub(crate) instance_fn_1_0: vk::InstanceFnV1_0,
+ pub(crate) instance_fn_1_1: vk::InstanceFnV1_1,
+ pub(crate) instance_fn_1_2: vk::InstanceFnV1_2,
+ pub(crate) instance_fn_1_3: vk::InstanceFnV1_3,
+}
+
+impl Instance {
+ pub unsafe fn load(static_fn: &vk::StaticFn, instance: vk::Instance) -> Self {
+ let load_fn = |name: &std::ffi::CStr| {
+ mem::transmute((static_fn.get_instance_proc_addr)(instance, name.as_ptr()))
+ };
+
+ Self {
+ handle: instance,
+
+ instance_fn_1_0: vk::InstanceFnV1_0::load(load_fn),
+ instance_fn_1_1: vk::InstanceFnV1_1::load(load_fn),
+ instance_fn_1_2: vk::InstanceFnV1_2::load(load_fn),
+ instance_fn_1_3: vk::InstanceFnV1_3::load(load_fn),
+ }
+ }
+
+ #[inline]
+ pub fn handle(&self) -> vk::Instance {
+ self.handle
+ }
+}
+
+/// Vulkan core 1.3
+#[allow(non_camel_case_types)]
+impl Instance {
+ #[inline]
+ pub fn fp_v1_3(&self) -> &vk::InstanceFnV1_3 {
+ &self.instance_fn_1_3
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_tool_properties()`][Self::get_physical_device_tool_properties()]
+ #[inline]
+ pub unsafe fn get_physical_device_tool_properties_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> VkResult<usize> {
+ let mut count = 0;
+ (self.instance_fn_1_3.get_physical_device_tool_properties)(
+ physical_device,
+ &mut count,
+ ptr::null_mut(),
+ )
+ .result_with_success(count as usize)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceToolProperties.html>
+ ///
+ /// Call [`get_physical_device_tool_properties_len()`][Self::get_physical_device_tool_properties_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_tool_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ out: &mut [vk::PhysicalDeviceToolProperties],
+ ) -> VkResult<()> {
+ let mut count = out.len() as u32;
+ (self.instance_fn_1_3.get_physical_device_tool_properties)(
+ physical_device,
+ &mut count,
+ out.as_mut_ptr(),
+ )
+ .result()?;
+ assert_eq!(count as usize, out.len());
+ Ok(())
+ }
+}
+
+/// Vulkan core 1.2
+#[allow(non_camel_case_types)]
+impl Instance {
+ #[inline]
+ pub fn fp_v1_2(&self) -> &vk::InstanceFnV1_2 {
+ &self.instance_fn_1_2
+ }
+}
+
+/// Vulkan core 1.1
+#[allow(non_camel_case_types)]
+impl Instance {
+ #[inline]
+ pub fn fp_v1_1(&self) -> &vk::InstanceFnV1_1 {
+ &self.instance_fn_1_1
+ }
+
+ /// Retrieve the number of elements to pass to [`enumerate_physical_device_groups()`][Self::enumerate_physical_device_groups()]
+ #[inline]
+ pub unsafe fn enumerate_physical_device_groups_len(&self) -> VkResult<usize> {
+ let mut group_count = 0;
+ (self.instance_fn_1_1.enumerate_physical_device_groups)(
+ self.handle(),
+ &mut group_count,
+ ptr::null_mut(),
+ )
+ .result_with_success(group_count as usize)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumeratePhysicalDeviceGroups.html>
+ ///
+ /// Call [`enumerate_physical_device_groups_len()`][Self::enumerate_physical_device_groups_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn enumerate_physical_device_groups(
+ &self,
+ out: &mut [vk::PhysicalDeviceGroupProperties],
+ ) -> VkResult<()> {
+ let mut count = out.len() as u32;
+ (self.instance_fn_1_1.enumerate_physical_device_groups)(
+ self.handle(),
+ &mut count,
+ out.as_mut_ptr(),
+ )
+ .result()?;
+ assert_eq!(count as usize, out.len());
+ Ok(())
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceFeatures2.html>
+ #[inline]
+ pub unsafe fn get_physical_device_features2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ features: &mut vk::PhysicalDeviceFeatures2,
+ ) {
+ (self.instance_fn_1_1.get_physical_device_features2)(physical_device, features);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceProperties2.html>
+ #[inline]
+ pub unsafe fn get_physical_device_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ prop: &mut vk::PhysicalDeviceProperties2,
+ ) {
+ (self.instance_fn_1_1.get_physical_device_properties2)(physical_device, prop);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceFormatProperties2.html>
+ #[inline]
+ pub unsafe fn get_physical_device_format_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format: vk::Format,
+ out: &mut vk::FormatProperties2,
+ ) {
+ (self.instance_fn_1_1.get_physical_device_format_properties2)(physical_device, format, out);
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceImageFormatProperties2.html>
+ #[inline]
+ pub unsafe fn get_physical_device_image_format_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format_info: &vk::PhysicalDeviceImageFormatInfo2,
+ image_format_prop: &mut vk::ImageFormatProperties2,
+ ) -> VkResult<()> {
+ (self
+ .instance_fn_1_1
+ .get_physical_device_image_format_properties2)(
+ physical_device,
+ format_info,
+ image_format_prop,
+ )
+ .result()
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_queue_family_properties2()`][Self::get_physical_device_queue_family_properties2()]
+ #[inline]
+ pub unsafe fn get_physical_device_queue_family_properties2_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> usize {
+ let mut queue_count = 0;
+ (self
+ .instance_fn_1_1
+ .get_physical_device_queue_family_properties2)(
+ physical_device,
+ &mut queue_count,
+ ptr::null_mut(),
+ );
+ queue_count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceQueueFamilyProperties2.html>
+ ///
+ /// Call [`get_physical_device_queue_family_properties2_len()`][Self::get_physical_device_queue_family_properties2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_queue_family_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ out: &mut [vk::QueueFamilyProperties2],
+ ) {
+ let mut count = out.len() as u32;
+ (self
+ .instance_fn_1_1
+ .get_physical_device_queue_family_properties2)(
+ physical_device,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceMemoryProperties2.html>
+ #[inline]
+ pub unsafe fn get_physical_device_memory_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ out: &mut vk::PhysicalDeviceMemoryProperties2,
+ ) {
+ (self.instance_fn_1_1.get_physical_device_memory_properties2)(physical_device, out);
+ }
+
+ /// Retrieve the number of elements to pass to [`get_physical_device_sparse_image_format_properties2()`][Self::get_physical_device_sparse_image_format_properties2()]
+ #[inline]
+ pub unsafe fn get_physical_device_sparse_image_format_properties2_len(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format_info: &vk::PhysicalDeviceSparseImageFormatInfo2,
+ ) -> usize {
+ let mut format_count = 0;
+ (self
+ .instance_fn_1_1
+ .get_physical_device_sparse_image_format_properties2)(
+ physical_device,
+ format_info,
+ &mut format_count,
+ ptr::null_mut(),
+ );
+ format_count as usize
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSparseImageFormatProperties2.html>
+ ///
+ /// Call [`get_physical_device_sparse_image_format_properties2_len()`][Self::get_physical_device_sparse_image_format_properties2_len()] to query the number of elements to pass to `out`.
+ /// Be sure to [`Default::default()`]-initialize these elements and optionally set their `p_next` pointer.
+ #[inline]
+ pub unsafe fn get_physical_device_sparse_image_format_properties2(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format_info: &vk::PhysicalDeviceSparseImageFormatInfo2,
+ out: &mut [vk::SparseImageFormatProperties2],
+ ) {
+ let mut count = out.len() as u32;
+ (self
+ .instance_fn_1_1
+ .get_physical_device_sparse_image_format_properties2)(
+ physical_device,
+ format_info,
+ &mut count,
+ out.as_mut_ptr(),
+ );
+ assert_eq!(count as usize, out.len());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceExternalBufferProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_external_buffer_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ external_buffer_info: &vk::PhysicalDeviceExternalBufferInfo,
+ out: &mut vk::ExternalBufferProperties,
+ ) {
+ (self
+ .instance_fn_1_1
+ .get_physical_device_external_buffer_properties)(
+ physical_device,
+ external_buffer_info,
+ out,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceExternalFenceProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_external_fence_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ external_fence_info: &vk::PhysicalDeviceExternalFenceInfo,
+ out: &mut vk::ExternalFenceProperties,
+ ) {
+ (self
+ .instance_fn_1_1
+ .get_physical_device_external_fence_properties)(
+ physical_device,
+ external_fence_info,
+ out,
+ );
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceExternalSemaphoreProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_external_semaphore_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ external_semaphore_info: &vk::PhysicalDeviceExternalSemaphoreInfo,
+ out: &mut vk::ExternalSemaphoreProperties,
+ ) {
+ (self
+ .instance_fn_1_1
+ .get_physical_device_external_semaphore_properties)(
+ physical_device,
+ external_semaphore_info,
+ out,
+ );
+ }
+}
+
+/// Vulkan core 1.0
+#[allow(non_camel_case_types)]
+impl Instance {
+ #[inline]
+ pub fn fp_v1_0(&self) -> &vk::InstanceFnV1_0 {
+ &self.instance_fn_1_0
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkCreateDevice.html>
+ ///
+ /// # Safety
+ /// In order for the created [`Device`] to be valid for the duration of its
+ /// usage, the [`Instance`] this was called on must be dropped later than the
+ /// resulting [`Device`].
+ #[inline]
+ pub unsafe fn create_device(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ create_info: &vk::DeviceCreateInfo,
+ allocation_callbacks: Option<&vk::AllocationCallbacks>,
+ ) -> VkResult<Device> {
+ let mut device = mem::zeroed();
+ (self.instance_fn_1_0.create_device)(
+ physical_device,
+ create_info,
+ allocation_callbacks.as_raw_ptr(),
+ &mut device,
+ )
+ .result()?;
+ Ok(Device::load(&self.instance_fn_1_0, device))
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetDeviceProcAddr.html>
+ #[inline]
+ pub unsafe fn get_device_proc_addr(
+ &self,
+ device: vk::Device,
+ p_name: *const c_char,
+ ) -> vk::PFN_vkVoidFunction {
+ (self.instance_fn_1_0.get_device_proc_addr)(device, p_name)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkDestroyInstance.html>
+ #[inline]
+ pub unsafe fn destroy_instance(&self, allocation_callbacks: Option<&vk::AllocationCallbacks>) {
+ (self.instance_fn_1_0.destroy_instance)(self.handle(), allocation_callbacks.as_raw_ptr());
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceFormatProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_format_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format: vk::Format,
+ ) -> vk::FormatProperties {
+ let mut format_prop = mem::zeroed();
+ (self.instance_fn_1_0.get_physical_device_format_properties)(
+ physical_device,
+ format,
+ &mut format_prop,
+ );
+ format_prop
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceImageFormatProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_image_format_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format: vk::Format,
+ typ: vk::ImageType,
+ tiling: vk::ImageTiling,
+ usage: vk::ImageUsageFlags,
+ flags: vk::ImageCreateFlags,
+ ) -> VkResult<vk::ImageFormatProperties> {
+ let mut image_format_prop = mem::zeroed();
+ (self
+ .instance_fn_1_0
+ .get_physical_device_image_format_properties)(
+ physical_device,
+ format,
+ typ,
+ tiling,
+ usage,
+ flags,
+ &mut image_format_prop,
+ )
+ .result_with_success(image_format_prop)
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceMemoryProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_memory_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceMemoryProperties {
+ let mut memory_prop = mem::zeroed();
+ (self.instance_fn_1_0.get_physical_device_memory_properties)(
+ physical_device,
+ &mut memory_prop,
+ );
+ memory_prop
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceProperties {
+ let mut prop = mem::zeroed();
+ (self.instance_fn_1_0.get_physical_device_properties)(physical_device, &mut prop);
+ prop
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceQueueFamilyProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_queue_family_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> Vec<vk::QueueFamilyProperties> {
+ read_into_uninitialized_vector(|count, data| {
+ (self
+ .instance_fn_1_0
+ .get_physical_device_queue_family_properties)(
+ physical_device, count, data
+ );
+ vk::Result::SUCCESS
+ })
+ // The closure always returns SUCCESS
+ .unwrap()
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceFeatures.html>
+ #[inline]
+ pub unsafe fn get_physical_device_features(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ ) -> vk::PhysicalDeviceFeatures {
+ let mut prop = mem::zeroed();
+ (self.instance_fn_1_0.get_physical_device_features)(physical_device, &mut prop);
+ prop
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumeratePhysicalDevices.html>
+ #[inline]
+ pub unsafe fn enumerate_physical_devices(&self) -> VkResult<Vec<vk::PhysicalDevice>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.instance_fn_1_0.enumerate_physical_devices)(self.handle(), count, data)
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateDeviceExtensionProperties.html>
+ #[inline]
+ pub unsafe fn enumerate_device_extension_properties(
+ &self,
+ device: vk::PhysicalDevice,
+ ) -> VkResult<Vec<vk::ExtensionProperties>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.instance_fn_1_0.enumerate_device_extension_properties)(
+ device,
+ ptr::null(),
+ count,
+ data,
+ )
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateDeviceLayerProperties.html>
+ #[inline]
+ pub unsafe fn enumerate_device_layer_properties(
+ &self,
+ device: vk::PhysicalDevice,
+ ) -> VkResult<Vec<vk::LayerProperties>> {
+ read_into_uninitialized_vector(|count, data| {
+ (self.instance_fn_1_0.enumerate_device_layer_properties)(device, count, data)
+ })
+ }
+
+ /// <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkGetPhysicalDeviceSparseImageFormatProperties.html>
+ #[inline]
+ pub unsafe fn get_physical_device_sparse_image_format_properties(
+ &self,
+ physical_device: vk::PhysicalDevice,
+ format: vk::Format,
+ typ: vk::ImageType,
+ samples: vk::SampleCountFlags,
+ usage: vk::ImageUsageFlags,
+ tiling: vk::ImageTiling,
+ ) -> Vec<vk::SparseImageFormatProperties> {
+ read_into_uninitialized_vector(|count, data| {
+ (self
+ .instance_fn_1_0
+ .get_physical_device_sparse_image_format_properties)(
+ physical_device,
+ format,
+ typ,
+ samples,
+ usage,
+ tiling,
+ count,
+ data,
+ );
+ vk::Result::SUCCESS
+ })
+ // The closure always returns SUCCESS
+ .unwrap()
+ }
+}
diff --git a/third_party/rust/ash/src/lib.rs b/third_party/rust/ash/src/lib.rs
new file mode 100644
index 0000000000..c900fc6c4d
--- /dev/null
+++ b/third_party/rust/ash/src/lib.rs
@@ -0,0 +1,197 @@
+#![deny(clippy::use_self)]
+#![warn(trivial_casts, trivial_numeric_casts)]
+#![allow(
+ clippy::too_many_arguments,
+ clippy::missing_safety_doc,
+ clippy::upper_case_acronyms
+)]
+#![cfg_attr(docsrs, feature(doc_cfg))]
+//! # Vulkan API
+//!
+//! <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/index.html>
+//!
+//! ## Examples
+//!
+//! ```no_run
+//! use ash::{vk, Entry};
+//! # fn main() -> Result<(), Box<dyn std::error::Error>> {
+//! let entry = Entry::linked();
+//! let app_info = vk::ApplicationInfo {
+//! api_version: vk::make_api_version(0, 1, 0, 0),
+//! ..Default::default()
+//! };
+//! let create_info = vk::InstanceCreateInfo {
+//! p_application_info: &app_info,
+//! ..Default::default()
+//! };
+//! let instance = unsafe { entry.create_instance(&create_info, None)? };
+//! # Ok(()) }
+//! ```
+//!
+//! ## Getting started
+//!
+//! Load the Vulkan library linked at compile time using [`Entry::linked()`], or load it at runtime
+//! using [`Entry::load()`], which uses `libloading`. If you want to perform entry point loading
+//! yourself, call [`Entry::from_static_fn()`].
+//!
+//! ## Crate features
+//!
+//! * **debug** (default): Whether Vulkan structs should implement `Debug`.
+//! * **loaded** (default): Support searching for the Vulkan loader manually at runtime.
+//! * **linked**: Link the Vulkan loader at compile time.
+
+pub use crate::device::Device;
+pub use crate::entry::Entry;
+#[cfg(feature = "loaded")]
+pub use crate::entry::LoadingError;
+pub use crate::instance::Instance;
+
+mod device;
+mod entry;
+mod instance;
+pub mod prelude;
+pub mod util;
+/// Raw Vulkan bindings and types, generated from `vk.xml`
+#[macro_use]
+pub mod vk;
+
+// macros of vk need to be defined beforehand
+/// Wrappers for Vulkan extensions
+pub mod extensions;
+
+pub trait RawPtr<T> {
+ fn as_raw_ptr(&self) -> *const T;
+}
+
+impl<'r, T> RawPtr<T> for Option<&'r T> {
+ fn as_raw_ptr(&self) -> *const T {
+ match *self {
+ Some(inner) => inner,
+ _ => ::std::ptr::null(),
+ }
+ }
+}
+
+/// Given a mutable raw pointer to a type with an `s_type` member such as [`vk::BaseOutStructure`],
+/// match on a set of Vulkan structures. The struct will be rebound to the given variable of the
+/// type of the given Vulkan structure.
+///
+/// Note that all match bodies have to be enclosed by curly braces due to macro parsing limitations.
+/// It is unfortunately not possible to write `x @ ash::vk::SomeStruct => one_line_expression(),`.
+///
+/// ```
+/// let mut info = ash::vk::DeviceCreateInfo::default();
+/// let info: *mut ash::vk::BaseOutStructure = <*mut _>::cast(&mut info);
+/// unsafe {
+/// ash::match_out_struct!(match info {
+/// info @ ash::vk::DeviceQueueCreateInfo => {
+/// dbg!(&info); // Unreachable
+/// }
+/// info @ ash::vk::DeviceCreateInfo => {
+/// dbg!(&info);
+/// }
+/// })
+/// }
+/// ```
+///
+/// In addition this macro propagates implicit return values just like normal `match` blocks, as
+/// long as a default value or expression is provided in the "any" match arm
+/// (`_ => { some_value() }`). For the time being said arm must be wrapped in curly braces; an
+/// expression like `_ => None` is not yet supported.
+///
+/// ```
+/// # let mut info = ash::vk::DeviceCreateInfo::default();
+/// # let info: *mut ash::vk::BaseOutStructure = <*mut _>::cast(&mut info);
+/// let device_create_flags: Option<ash::vk::DeviceCreateFlags> = unsafe {
+/// ash::match_out_struct!(match info {
+/// info @ ash::vk::DeviceQueueCreateInfo => {
+/// dbg!(&info); // Unreachable
+/// Some(ash::vk::DeviceCreateFlags::empty())
+/// }
+/// info @ ash::vk::DeviceCreateInfo => {
+/// dbg!(&info);
+/// Some(info.flags)
+/// }
+/// _ => {
+/// None
+/// }
+/// })
+/// };
+/// ```
+#[macro_export]
+macro_rules! match_out_struct {
+ (match $p:ident { $($bind:ident @ $ty:path => $body:block $(,)?)+ $(_ => $any:block $(,)?)? }) => {
+ match std::ptr::addr_of!((*$p).s_type).read() {
+ $(<$ty as $crate::vk::TaggedStructure>::STRUCTURE_TYPE => {
+ let $bind = $p
+ .cast::<$ty>()
+ .as_mut()
+ .unwrap();
+ $body
+ }),+
+ _ => { $($any)? }
+ }
+ };
+}
+
+/// Given an immutable raw pointer to a type with an `s_type` member such as [`vk::BaseInStructure`],
+/// match on a set of Vulkan structures. The struct will be rebound to the given variable of the
+/// type of the given Vulkan structure.
+///
+/// Note that all match bodies have to be enclosed by curly braces due to macro parsing limitations.
+/// It is unfortunately not possible to write `x @ ash::vk::SomeStruct => one_line_expression(),`.
+///
+/// ```
+/// let info = ash::vk::DeviceCreateInfo::default();
+/// let info: *const ash::vk::BaseInStructure = <*const _>::cast(&info);
+/// unsafe {
+/// ash::match_in_struct!(match info {
+/// info @ ash::vk::DeviceQueueCreateInfo => {
+/// dbg!(&info); // Unreachable
+/// }
+/// info @ ash::vk::DeviceCreateInfo => {
+/// dbg!(&info);
+/// }
+/// })
+/// }
+/// ```
+///
+/// See the [`match_out_struct!`] documentation for an example with implicit return values.
+#[macro_export]
+macro_rules! match_in_struct {
+ (match $p:ident { $($bind:ident @ $ty:path => $body:block $(,)?)+ $(_ => $any:block $(,)?)? }) => {
+ match std::ptr::addr_of!((*$p).s_type).read() {
+ $(<$ty as $crate::vk::TaggedStructure>::STRUCTURE_TYPE => {
+ let $bind = $p
+ .cast::<$ty>()
+ .as_ref()
+ .unwrap();
+ $body
+ }),+
+ _ => { $($any)? }
+ }
+ };
+}
+
+#[cfg(test)]
+mod tests {
+ use super::vk;
+ #[test]
+ fn test_ptr_chains() {
+ let mut variable_pointers = vk::PhysicalDeviceVariablePointerFeatures::builder();
+ let mut corner = vk::PhysicalDeviceCornerSampledImageFeaturesNV::builder();
+ let chain = vec![
+ <*mut _>::cast(&mut variable_pointers),
+ <*mut _>::cast(&mut corner),
+ ];
+ let mut device_create_info = vk::DeviceCreateInfo::builder()
+ .push_next(&mut corner)
+ .push_next(&mut variable_pointers);
+ let chain2: Vec<*mut vk::BaseOutStructure> = unsafe {
+ vk::ptr_chain_iter(&mut device_create_info)
+ .skip(1)
+ .collect()
+ };
+ assert_eq!(chain, chain2);
+ }
+}
diff --git a/third_party/rust/ash/src/prelude.rs b/third_party/rust/ash/src/prelude.rs
new file mode 100644
index 0000000000..321c3aaddd
--- /dev/null
+++ b/third_party/rust/ash/src/prelude.rs
@@ -0,0 +1,122 @@
+use std::convert::TryInto;
+#[cfg(feature = "debug")]
+use std::fmt;
+use std::mem;
+
+use crate::vk;
+pub type VkResult<T> = Result<T, vk::Result>;
+
+impl vk::Result {
+ #[inline]
+ pub fn result(self) -> VkResult<()> {
+ self.result_with_success(())
+ }
+
+ #[inline]
+ pub fn result_with_success<T>(self, v: T) -> VkResult<T> {
+ match self {
+ Self::SUCCESS => Ok(v),
+ _ => Err(self),
+ }
+ }
+
+ #[inline]
+ pub unsafe fn assume_init_on_success<T>(self, v: mem::MaybeUninit<T>) -> VkResult<T> {
+ self.result().map(move |()| v.assume_init())
+ }
+}
+
+/// Repeatedly calls `f` until it does not return [`vk::Result::INCOMPLETE`] anymore,
+/// ensuring all available data has been read into the vector.
+///
+/// See for example [`vkEnumerateInstanceExtensionProperties`]: the number of available
+/// items may change between calls; [`vk::Result::INCOMPLETE`] is returned when the count
+/// increased (and the vector is not large enough after querying the initial size),
+/// requiring Ash to try again.
+///
+/// [`vkEnumerateInstanceExtensionProperties`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html
+pub(crate) unsafe fn read_into_uninitialized_vector<N: Copy + Default + TryInto<usize>, T>(
+ f: impl Fn(&mut N, *mut T) -> vk::Result,
+) -> VkResult<Vec<T>>
+where
+ <N as TryInto<usize>>::Error: std::fmt::Debug,
+{
+ loop {
+ let mut count = N::default();
+ f(&mut count, std::ptr::null_mut()).result()?;
+ let mut data =
+ Vec::with_capacity(count.try_into().expect("`N` failed to convert to `usize`"));
+
+ let err_code = f(&mut count, data.as_mut_ptr());
+ if err_code != vk::Result::INCOMPLETE {
+ err_code.result()?;
+ data.set_len(count.try_into().expect("`N` failed to convert to `usize`"));
+ break Ok(data);
+ }
+ }
+}
+
+/// Repeatedly calls `f` until it does not return [`vk::Result::INCOMPLETE`] anymore,
+/// ensuring all available data has been read into the vector.
+///
+/// Items in the target vector are [`default()`][`Default::default()`]-initialized which
+/// is required for [`vk::BaseOutStructure`]-like structs where [`vk::BaseOutStructure::s_type`]
+/// needs to be a valid type and [`vk::BaseOutStructure::p_next`] a valid or
+/// [`null`][`std::ptr::null_mut()`] pointer.
+///
+/// See for example [`vkEnumerateInstanceExtensionProperties`]: the number of available
+/// items may change between calls; [`vk::Result::INCOMPLETE`] is returned when the count
+/// increased (and the vector is not large enough after querying the initial size),
+/// requiring Ash to try again.
+///
+/// [`vkEnumerateInstanceExtensionProperties`]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/vkEnumerateInstanceExtensionProperties.html
+pub(crate) unsafe fn read_into_defaulted_vector<
+ N: Copy + Default + TryInto<usize>,
+ T: Default + Clone,
+>(
+ f: impl Fn(&mut N, *mut T) -> vk::Result,
+) -> VkResult<Vec<T>>
+where
+ <N as TryInto<usize>>::Error: std::fmt::Debug,
+{
+ loop {
+ let mut count = N::default();
+ f(&mut count, std::ptr::null_mut()).result()?;
+ let mut data =
+ vec![Default::default(); count.try_into().expect("`N` failed to convert to `usize`")];
+
+ let err_code = f(&mut count, data.as_mut_ptr());
+ if err_code != vk::Result::INCOMPLETE {
+ data.set_len(count.try_into().expect("`N` failed to convert to `usize`"));
+ break err_code.result_with_success(data);
+ }
+ }
+}
+
+#[cfg(feature = "debug")]
+pub(crate) fn debug_flags<Value: Into<u64> + Copy>(
+ f: &mut fmt::Formatter,
+ known: &[(Value, &'static str)],
+ value: Value,
+) -> fmt::Result {
+ let mut first = true;
+ let mut accum = value.into();
+ for &(bit, name) in known {
+ let bit = bit.into();
+ if bit != 0 && accum & bit == bit {
+ if !first {
+ f.write_str(" | ")?;
+ }
+ f.write_str(name)?;
+ first = false;
+ accum &= !bit;
+ }
+ }
+ if accum != 0 {
+ if !first {
+ f.write_str(" | ")?;
+ }
+ write!(f, "{:b}", accum)?;
+ }
+ Ok(())
+}
diff --git a/third_party/rust/ash/src/util.rs b/third_party/rust/ash/src/util.rs
new file mode 100644
index 0000000000..119fab9c65
--- /dev/null
+++ b/third_party/rust/ash/src/util.rs
@@ -0,0 +1,139 @@
+use crate::vk;
+use std::iter::Iterator;
+use std::marker::PhantomData;
+use std::mem::size_of;
+use std::os::raw::c_void;
+use std::{io, slice};
+
+/// [`Align`] handles dynamic alignment. The is useful for dynamic uniform buffers where
+/// the alignment might be different. For example a 4x4 f32 matrix has a size of 64 bytes
+/// but the min alignment for a dynamic uniform buffer might be 256 bytes. A slice of `&[Mat4x4<f32>]`
+/// has a memory layout of `[[64 bytes], [64 bytes], [64 bytes]]`, but it might need to have a memory
+/// layout of `[[256 bytes], [256 bytes], [256 bytes]]`.
+/// [`Align::copy_from_slice`] will copy a slice of `&[T]` directly into the host memory without
+/// an additional allocation and with the correct alignment.
+#[derive(Debug, Clone)]
+pub struct Align<T> {
+ ptr: *mut c_void,
+ elem_size: vk::DeviceSize,
+ size: vk::DeviceSize,
+ _m: PhantomData<T>,
+}
+
+#[derive(Debug)]
+pub struct AlignIter<'a, T: 'a> {
+ align: &'a mut Align<T>,
+ current: vk::DeviceSize,
+}
+
+impl<T: Copy> Align<T> {
+ pub fn copy_from_slice(&mut self, slice: &[T]) {
+ use std::slice::from_raw_parts_mut;
+ if self.elem_size == size_of::<T>() as u64 {
+ unsafe {
+ let mapped_slice = from_raw_parts_mut(self.ptr.cast(), slice.len());
+ mapped_slice.copy_from_slice(slice);
+ }
+ } else {
+ for (i, val) in self.iter_mut().enumerate().take(slice.len()) {
+ *val = slice[i];
+ }
+ }
+ }
+}
+
+fn calc_padding(adr: vk::DeviceSize, align: vk::DeviceSize) -> vk::DeviceSize {
+ (align - adr % align) % align
+}
+
+impl<T> Align<T> {
+ pub unsafe fn new(ptr: *mut c_void, alignment: vk::DeviceSize, size: vk::DeviceSize) -> Self {
+ let padding = calc_padding(size_of::<T>() as vk::DeviceSize, alignment);
+ let elem_size = size_of::<T>() as vk::DeviceSize + padding;
+ assert!(calc_padding(size, alignment) == 0, "size must be aligned");
+ Self {
+ ptr,
+ elem_size,
+ size,
+ _m: PhantomData,
+ }
+ }
+
+ pub fn iter_mut(&mut self) -> AlignIter<T> {
+ AlignIter {
+ current: 0,
+ align: self,
+ }
+ }
+}
+
+impl<'a, T: Copy + 'a> Iterator for AlignIter<'a, T> {
+ type Item = &'a mut T;
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.current == self.align.size {
+ return None;
+ }
+ unsafe {
+ // Need to cast to *mut u8 because () has size 0
+ let ptr = (self.align.ptr.cast::<u8>())
+ .offset(self.current as isize)
+ .cast();
+ self.current += self.align.elem_size;
+ Some(&mut *ptr)
+ }
+ }
+}
+
+/// Decode SPIR-V from bytes.
+///
+/// This function handles SPIR-V of arbitrary endianness gracefully, and returns correctly aligned
+/// storage.
+///
+/// # Examples
+/// ```no_run
+/// // Decode SPIR-V from a file
+/// let mut file = std::fs::File::open("/path/to/shader.spv").unwrap();
+/// let words = ash::util::read_spv(&mut file).unwrap();
+/// ```
+/// ```
+/// // Decode SPIR-V from memory
+/// const SPIRV: &[u8] = &[
+/// // ...
+/// # 0x03, 0x02, 0x23, 0x07,
+/// ];
+/// let words = ash::util::read_spv(&mut std::io::Cursor::new(&SPIRV[..])).unwrap();
+/// ```
+pub fn read_spv<R: io::Read + io::Seek>(x: &mut R) -> io::Result<Vec<u32>> {
+ let size = x.seek(io::SeekFrom::End(0))?;
+ if size % 4 != 0 {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ "input length not divisible by 4",
+ ));
+ }
+ if size > usize::max_value() as u64 {
+ return Err(io::Error::new(io::ErrorKind::InvalidData, "input too long"));
+ }
+ let words = (size / 4) as usize;
+ // https://github.com/MaikKlein/ash/issues/354:
+ // Zero-initialize the result to prevent read_exact from possibly
+ // reading uninitialized memory.
+ let mut result = vec![0u32; words];
+ x.seek(io::SeekFrom::Start(0))?;
+ x.read_exact(unsafe {
+ slice::from_raw_parts_mut(result.as_mut_ptr().cast::<u8>(), words * 4)
+ })?;
+ const MAGIC_NUMBER: u32 = 0x0723_0203;
+ if !result.is_empty() && result[0] == MAGIC_NUMBER.swap_bytes() {
+ for word in &mut result {
+ *word = word.swap_bytes();
+ }
+ }
+ if result.is_empty() || result[0] != MAGIC_NUMBER {
+ return Err(io::Error::new(
+ io::ErrorKind::InvalidData,
+ "input missing SPIR-V magic number",
+ ));
+ }
+ Ok(result)
+}
diff --git a/third_party/rust/ash/src/vk.rs b/third_party/rust/ash/src/vk.rs
new file mode 100644
index 0000000000..02a5e9c627
--- /dev/null
+++ b/third_party/rust/ash/src/vk.rs
@@ -0,0 +1,52 @@
+#![allow(
+ clippy::too_many_arguments,
+ clippy::cognitive_complexity,
+ clippy::wrong_self_convention
+)]
+#[macro_use]
+mod macros;
+pub use macros::*;
+mod aliases;
+pub use aliases::*;
+mod bitflags;
+pub use bitflags::*;
+#[cfg(feature = "debug")]
+mod const_debugs;
+mod constants;
+pub use constants::*;
+mod definitions;
+pub use definitions::*;
+mod enums;
+pub use enums::*;
+mod extensions;
+pub use extensions::*;
+mod feature_extensions;
+pub use feature_extensions::*;
+mod features;
+pub use features::*;
+mod prelude;
+pub use prelude::*;
+/// Native bindings from Vulkan headers, generated by bindgen
+#[allow(clippy::useless_transmute, nonstandard_style)]
+pub mod native;
+mod platform_types;
+pub use platform_types::*;
+/// Iterates through the pointer chain. Includes the item that is passed into the function.
+/// Stops at the last [`BaseOutStructure`] that has a null [`BaseOutStructure::p_next`] field.
+pub(crate) unsafe fn ptr_chain_iter<T>(ptr: &mut T) -> impl Iterator<Item = *mut BaseOutStructure> {
+ let ptr = <*mut T>::cast::<BaseOutStructure>(ptr);
+ (0..).scan(ptr, |p_ptr, _| {
+ if p_ptr.is_null() {
+ return None;
+ }
+ let n_ptr = (**p_ptr).p_next;
+ let old = *p_ptr;
+ *p_ptr = n_ptr;
+ Some(old)
+ })
+}
+pub trait Handle {
+ const TYPE: ObjectType;
+ fn as_raw(self) -> u64;
+ fn from_raw(_: u64) -> Self;
+}
diff --git a/third_party/rust/ash/src/vk/aliases.rs b/third_party/rust/ash/src/vk/aliases.rs
new file mode 100644
index 0000000000..1e5d20fe23
--- /dev/null
+++ b/third_party/rust/ash/src/vk/aliases.rs
@@ -0,0 +1,251 @@
+use crate::vk::bitflags::*;
+use crate::vk::definitions::*;
+use crate::vk::enums::*;
+pub type GeometryFlagsNV = GeometryFlagsKHR;
+pub type GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
+pub type BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
+pub type PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags;
+pub type DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
+pub type PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags;
+pub type SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
+pub type AccessFlags2KHR = AccessFlags2;
+pub type PipelineStageFlags2KHR = PipelineStageFlags2;
+pub type FormatFeatureFlags2KHR = FormatFeatureFlags2;
+pub type RenderingFlagsKHR = RenderingFlags;
+pub type PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
+pub type MemoryAllocateFlagsKHR = MemoryAllocateFlags;
+pub type CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
+pub type ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
+pub type ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
+pub type ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
+pub type ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
+pub type SemaphoreImportFlagsKHR = SemaphoreImportFlags;
+pub type ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
+pub type ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
+pub type FenceImportFlagsKHR = FenceImportFlags;
+pub type DescriptorBindingFlagsEXT = DescriptorBindingFlags;
+pub type ResolveModeFlagsKHR = ResolveModeFlags;
+pub type ToolPurposeFlagsEXT = ToolPurposeFlags;
+pub type SubmitFlagsKHR = SubmitFlags;
+pub type DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
+pub type SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
+pub type PrivateDataSlotEXT = PrivateDataSlot;
+pub type DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
+pub type PointClippingBehaviorKHR = PointClippingBehavior;
+pub type QueueGlobalPriorityEXT = QueueGlobalPriorityKHR;
+pub type SemaphoreTypeKHR = SemaphoreType;
+pub type CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
+pub type AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
+pub type GeometryTypeNV = GeometryTypeKHR;
+pub type RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
+pub type TessellationDomainOriginKHR = TessellationDomainOrigin;
+pub type SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
+pub type SamplerYcbcrRangeKHR = SamplerYcbcrRange;
+pub type ChromaLocationKHR = ChromaLocation;
+pub type SamplerReductionModeEXT = SamplerReductionMode;
+pub type ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
+pub type DriverIdKHR = DriverId;
+pub type DevicePrivateDataCreateInfoEXT = DevicePrivateDataCreateInfo;
+pub type PrivateDataSlotCreateInfoEXT = PrivateDataSlotCreateInfo;
+pub type PhysicalDevicePrivateDataFeaturesEXT = PhysicalDevicePrivateDataFeatures;
+pub type PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
+pub type PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
+pub type FormatProperties2KHR = FormatProperties2;
+pub type ImageFormatProperties2KHR = ImageFormatProperties2;
+pub type PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
+pub type QueueFamilyProperties2KHR = QueueFamilyProperties2;
+pub type PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
+pub type SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
+pub type PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
+pub type ConformanceVersionKHR = ConformanceVersion;
+pub type PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
+pub type PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+pub type PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
+pub type PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
+pub type ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
+pub type PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
+pub type ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
+pub type PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
+pub type ExternalBufferPropertiesKHR = ExternalBufferProperties;
+pub type PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
+pub type ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
+pub type ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
+pub type ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
+pub type PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
+pub type ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
+pub type ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
+pub type PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
+pub type ExternalFencePropertiesKHR = ExternalFenceProperties;
+pub type ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
+pub type PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
+pub type PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
+pub type RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
+pub type PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
+pub type MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
+pub type BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
+pub type BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
+pub type BindImageMemoryInfoKHR = BindImageMemoryInfo;
+pub type BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
+pub type DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
+pub type DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
+pub type DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
+pub type DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
+pub type DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
+pub type DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
+pub type DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
+pub type InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
+pub type RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
+pub type PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
+pub type PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR =
+ PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+pub type BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
+pub type DeviceBufferMemoryRequirementsKHR = DeviceBufferMemoryRequirements;
+pub type ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
+pub type ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
+pub type DeviceImageMemoryRequirementsKHR = DeviceImageMemoryRequirements;
+pub type MemoryRequirements2KHR = MemoryRequirements2;
+pub type SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
+pub type PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
+pub type MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
+pub type MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
+pub type ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
+pub type PipelineTessellationDomainOriginStateCreateInfoKHR =
+ PipelineTessellationDomainOriginStateCreateInfo;
+pub type SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
+pub type SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
+pub type BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
+pub type ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
+pub type PhysicalDeviceSamplerYcbcrConversionFeaturesKHR =
+ PhysicalDeviceSamplerYcbcrConversionFeatures;
+pub type SamplerYcbcrConversionImageFormatPropertiesKHR =
+ SamplerYcbcrConversionImageFormatProperties;
+pub type PhysicalDeviceSamplerFilterMinmaxPropertiesEXT =
+ PhysicalDeviceSamplerFilterMinmaxProperties;
+pub type SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
+pub type PhysicalDeviceInlineUniformBlockFeaturesEXT = PhysicalDeviceInlineUniformBlockFeatures;
+pub type PhysicalDeviceInlineUniformBlockPropertiesEXT = PhysicalDeviceInlineUniformBlockProperties;
+pub type WriteDescriptorSetInlineUniformBlockEXT = WriteDescriptorSetInlineUniformBlock;
+pub type DescriptorPoolInlineUniformBlockCreateInfoEXT = DescriptorPoolInlineUniformBlockCreateInfo;
+pub type ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
+pub type PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
+pub type PhysicalDeviceMaintenance4FeaturesKHR = PhysicalDeviceMaintenance4Features;
+pub type PhysicalDeviceMaintenance4PropertiesKHR = PhysicalDeviceMaintenance4Properties;
+pub type DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
+pub type PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
+pub type PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+pub type PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
+pub type PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
+pub type PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
+pub type DeviceQueueGlobalPriorityCreateInfoEXT = DeviceQueueGlobalPriorityCreateInfoKHR;
+pub type PhysicalDeviceGlobalPriorityQueryFeaturesEXT =
+ PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+pub type QueueFamilyGlobalPriorityPropertiesEXT = QueueFamilyGlobalPriorityPropertiesKHR;
+pub type PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
+pub type PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
+pub type DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
+pub type DescriptorSetVariableDescriptorCountAllocateInfoEXT =
+ DescriptorSetVariableDescriptorCountAllocateInfo;
+pub type DescriptorSetVariableDescriptorCountLayoutSupportEXT =
+ DescriptorSetVariableDescriptorCountLayoutSupport;
+pub type AttachmentDescription2KHR = AttachmentDescription2;
+pub type AttachmentReference2KHR = AttachmentReference2;
+pub type SubpassDescription2KHR = SubpassDescription2;
+pub type SubpassDependency2KHR = SubpassDependency2;
+pub type RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
+pub type SubpassBeginInfoKHR = SubpassBeginInfo;
+pub type SubpassEndInfoKHR = SubpassEndInfo;
+pub type PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
+pub type PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
+pub type SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
+pub type TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
+pub type SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
+pub type SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
+pub type PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
+pub type PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
+pub type PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
+pub type PhysicalDeviceDepthStencilResolvePropertiesKHR =
+ PhysicalDeviceDepthStencilResolveProperties;
+pub type SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
+pub type PhysicalDeviceFragmentShaderBarycentricFeaturesNV =
+ PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+pub type ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
+pub type PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
+pub type PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR =
+ PhysicalDeviceUniformBufferStandardLayoutFeatures;
+pub type PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
+pub type PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+pub type BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
+pub type BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
+pub type BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
+pub type PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
+pub type FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
+pub type FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
+pub type RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
+pub type PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT =
+ PhysicalDeviceTextureCompressionASTCHDRFeatures;
+pub type PipelineCreationFeedbackEXT = PipelineCreationFeedback;
+pub type PipelineCreationFeedbackCreateInfoEXT = PipelineCreationFeedbackCreateInfo;
+pub type QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
+pub type PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR =
+ PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+pub type AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
+pub type AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
+pub type PipelineInfoEXT = PipelineInfoKHR;
+pub type PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT =
+ PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+pub type PhysicalDeviceTexelBufferAlignmentPropertiesEXT =
+ PhysicalDeviceTexelBufferAlignmentProperties;
+pub type PhysicalDeviceSubgroupSizeControlFeaturesEXT = PhysicalDeviceSubgroupSizeControlFeatures;
+pub type PhysicalDeviceSubgroupSizeControlPropertiesEXT =
+ PhysicalDeviceSubgroupSizeControlProperties;
+pub type PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT =
+ PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+pub type MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
+pub type DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
+pub type PhysicalDevicePipelineCreationCacheControlFeaturesEXT =
+ PhysicalDevicePipelineCreationCacheControlFeatures;
+pub type PhysicalDeviceToolPropertiesEXT = PhysicalDeviceToolProperties;
+pub type AabbPositionsNV = AabbPositionsKHR;
+pub type TransformMatrixNV = TransformMatrixKHR;
+pub type AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
+pub type PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR =
+ PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+pub type PhysicalDeviceImageRobustnessFeaturesEXT = PhysicalDeviceImageRobustnessFeatures;
+pub type BufferCopy2KHR = BufferCopy2;
+pub type ImageCopy2KHR = ImageCopy2;
+pub type ImageBlit2KHR = ImageBlit2;
+pub type BufferImageCopy2KHR = BufferImageCopy2;
+pub type ImageResolve2KHR = ImageResolve2;
+pub type CopyBufferInfo2KHR = CopyBufferInfo2;
+pub type CopyImageInfo2KHR = CopyImageInfo2;
+pub type BlitImageInfo2KHR = BlitImageInfo2;
+pub type CopyBufferToImageInfo2KHR = CopyBufferToImageInfo2;
+pub type CopyImageToBufferInfo2KHR = CopyImageToBufferInfo2;
+pub type ResolveImageInfo2KHR = ResolveImageInfo2;
+pub type PhysicalDeviceShaderTerminateInvocationFeaturesKHR =
+ PhysicalDeviceShaderTerminateInvocationFeatures;
+pub type PhysicalDeviceMutableDescriptorTypeFeaturesVALVE =
+ PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+pub type MutableDescriptorTypeListVALVE = MutableDescriptorTypeListEXT;
+pub type MutableDescriptorTypeCreateInfoVALVE = MutableDescriptorTypeCreateInfoEXT;
+pub type MemoryBarrier2KHR = MemoryBarrier2;
+pub type ImageMemoryBarrier2KHR = ImageMemoryBarrier2;
+pub type BufferMemoryBarrier2KHR = BufferMemoryBarrier2;
+pub type DependencyInfoKHR = DependencyInfo;
+pub type SemaphoreSubmitInfoKHR = SemaphoreSubmitInfo;
+pub type CommandBufferSubmitInfoKHR = CommandBufferSubmitInfo;
+pub type SubmitInfo2KHR = SubmitInfo2;
+pub type PhysicalDeviceSynchronization2FeaturesKHR = PhysicalDeviceSynchronization2Features;
+pub type PhysicalDeviceShaderIntegerDotProductFeaturesKHR =
+ PhysicalDeviceShaderIntegerDotProductFeatures;
+pub type PhysicalDeviceShaderIntegerDotProductPropertiesKHR =
+ PhysicalDeviceShaderIntegerDotProductProperties;
+pub type FormatProperties3KHR = FormatProperties3;
+pub type PipelineRenderingCreateInfoKHR = PipelineRenderingCreateInfo;
+pub type RenderingInfoKHR = RenderingInfo;
+pub type RenderingAttachmentInfoKHR = RenderingAttachmentInfo;
+pub type PhysicalDeviceDynamicRenderingFeaturesKHR = PhysicalDeviceDynamicRenderingFeatures;
+pub type CommandBufferInheritanceRenderingInfoKHR = CommandBufferInheritanceRenderingInfo;
+pub type AttachmentSampleCountInfoNV = AttachmentSampleCountInfoAMD;
+pub type PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM =
+ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
diff --git a/third_party/rust/ash/src/vk/bitflags.rs b/third_party/rust/ash/src/vk/bitflags.rs
new file mode 100644
index 0000000000..66699d7214
--- /dev/null
+++ b/third_party/rust/ash/src/vk/bitflags.rs
@@ -0,0 +1,1643 @@
+use crate::vk::definitions::*;
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCacheCreateFlagBits.html>"]
+pub struct PipelineCacheCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCacheCreateFlags, Flags);
+impl PipelineCacheCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFlagBits.html>"]
+pub struct QueueFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(QueueFlags, Flags);
+impl QueueFlags {
+ #[doc = "Queue supports graphics operations"]
+ pub const GRAPHICS: Self = Self(0b1);
+ #[doc = "Queue supports compute operations"]
+ pub const COMPUTE: Self = Self(0b10);
+ #[doc = "Queue supports transfer operations"]
+ pub const TRANSFER: Self = Self(0b100);
+ #[doc = "Queue supports sparse resource memory management operations"]
+ pub const SPARSE_BINDING: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCullModeFlagBits.html>"]
+pub struct CullModeFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(CullModeFlags, Flags);
+impl CullModeFlags {
+ pub const NONE: Self = Self(0);
+ pub const FRONT: Self = Self(0b1);
+ pub const BACK: Self = Self(0b10);
+ pub const FRONT_AND_BACK: Self = Self(0x0000_0003);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassCreateFlagBits.html>"]
+pub struct RenderPassCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(RenderPassCreateFlags, Flags);
+impl RenderPassCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceQueueCreateFlagBits.html>"]
+pub struct DeviceQueueCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DeviceQueueCreateFlags, Flags);
+impl DeviceQueueCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryPropertyFlagBits.html>"]
+pub struct MemoryPropertyFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(MemoryPropertyFlags, Flags);
+impl MemoryPropertyFlags {
+ #[doc = "If otherwise stated, then allocate memory on device"]
+ pub const DEVICE_LOCAL: Self = Self(0b1);
+ #[doc = "Memory is mappable by host"]
+ pub const HOST_VISIBLE: Self = Self(0b10);
+ #[doc = "Memory will have i/o coherency. If not set, application may need to use vkFlushMappedMemoryRanges and vkInvalidateMappedMemoryRanges to flush/invalidate host cache"]
+ pub const HOST_COHERENT: Self = Self(0b100);
+ #[doc = "Memory will be cached by the host"]
+ pub const HOST_CACHED: Self = Self(0b1000);
+ #[doc = "Memory may be allocated by the driver when it is required"]
+ pub const LAZILY_ALLOCATED: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryHeapFlagBits.html>"]
+pub struct MemoryHeapFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(MemoryHeapFlags, Flags);
+impl MemoryHeapFlags {
+ #[doc = "If set, heap represents device memory"]
+ pub const DEVICE_LOCAL: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccessFlagBits.html>"]
+pub struct AccessFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(AccessFlags, Flags);
+impl AccessFlags {
+ #[doc = "Controls coherency of indirect command reads"]
+ pub const INDIRECT_COMMAND_READ: Self = Self(0b1);
+ #[doc = "Controls coherency of index reads"]
+ pub const INDEX_READ: Self = Self(0b10);
+ #[doc = "Controls coherency of vertex attribute reads"]
+ pub const VERTEX_ATTRIBUTE_READ: Self = Self(0b100);
+ #[doc = "Controls coherency of uniform buffer reads"]
+ pub const UNIFORM_READ: Self = Self(0b1000);
+ #[doc = "Controls coherency of input attachment reads"]
+ pub const INPUT_ATTACHMENT_READ: Self = Self(0b1_0000);
+ #[doc = "Controls coherency of shader reads"]
+ pub const SHADER_READ: Self = Self(0b10_0000);
+ #[doc = "Controls coherency of shader writes"]
+ pub const SHADER_WRITE: Self = Self(0b100_0000);
+ #[doc = "Controls coherency of color attachment reads"]
+ pub const COLOR_ATTACHMENT_READ: Self = Self(0b1000_0000);
+ #[doc = "Controls coherency of color attachment writes"]
+ pub const COLOR_ATTACHMENT_WRITE: Self = Self(0b1_0000_0000);
+ #[doc = "Controls coherency of depth/stencil attachment reads"]
+ pub const DEPTH_STENCIL_ATTACHMENT_READ: Self = Self(0b10_0000_0000);
+ #[doc = "Controls coherency of depth/stencil attachment writes"]
+ pub const DEPTH_STENCIL_ATTACHMENT_WRITE: Self = Self(0b100_0000_0000);
+ #[doc = "Controls coherency of transfer reads"]
+ pub const TRANSFER_READ: Self = Self(0b1000_0000_0000);
+ #[doc = "Controls coherency of transfer writes"]
+ pub const TRANSFER_WRITE: Self = Self(0b1_0000_0000_0000);
+ #[doc = "Controls coherency of host reads"]
+ pub const HOST_READ: Self = Self(0b10_0000_0000_0000);
+ #[doc = "Controls coherency of host writes"]
+ pub const HOST_WRITE: Self = Self(0b100_0000_0000_0000);
+ #[doc = "Controls coherency of memory reads"]
+ pub const MEMORY_READ: Self = Self(0b1000_0000_0000_0000);
+ #[doc = "Controls coherency of memory writes"]
+ pub const MEMORY_WRITE: Self = Self(0b1_0000_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferUsageFlagBits.html>"]
+pub struct BufferUsageFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(BufferUsageFlags, Flags);
+impl BufferUsageFlags {
+ #[doc = "Can be used as a source of transfer operations"]
+ pub const TRANSFER_SRC: Self = Self(0b1);
+ #[doc = "Can be used as a destination of transfer operations"]
+ pub const TRANSFER_DST: Self = Self(0b10);
+ #[doc = "Can be used as TBO"]
+ pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b100);
+ #[doc = "Can be used as IBO"]
+ pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1000);
+ #[doc = "Can be used as UBO"]
+ pub const UNIFORM_BUFFER: Self = Self(0b1_0000);
+ #[doc = "Can be used as SSBO"]
+ pub const STORAGE_BUFFER: Self = Self(0b10_0000);
+ #[doc = "Can be used as source of fixed-function index fetch (index buffer)"]
+ pub const INDEX_BUFFER: Self = Self(0b100_0000);
+ #[doc = "Can be used as source of fixed-function vertex fetch (VBO)"]
+ pub const VERTEX_BUFFER: Self = Self(0b1000_0000);
+ #[doc = "Can be the source of indirect parameters (e.g. indirect buffer, parameter buffer)"]
+ pub const INDIRECT_BUFFER: Self = Self(0b1_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCreateFlagBits.html>"]
+pub struct BufferCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(BufferCreateFlags, Flags);
+impl BufferCreateFlags {
+ #[doc = "Buffer should support sparse backing"]
+ pub const SPARSE_BINDING: Self = Self(0b1);
+ #[doc = "Buffer should support sparse backing with partial residency"]
+ pub const SPARSE_RESIDENCY: Self = Self(0b10);
+ #[doc = "Buffer should support constant data access to physical memory ranges mapped into multiple locations of sparse buffers"]
+ pub const SPARSE_ALIASED: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderStageFlagBits.html>"]
+pub struct ShaderStageFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ShaderStageFlags, Flags);
+impl ShaderStageFlags {
+ pub const VERTEX: Self = Self(0b1);
+ pub const TESSELLATION_CONTROL: Self = Self(0b10);
+ pub const TESSELLATION_EVALUATION: Self = Self(0b100);
+ pub const GEOMETRY: Self = Self(0b1000);
+ pub const FRAGMENT: Self = Self(0b1_0000);
+ pub const COMPUTE: Self = Self(0b10_0000);
+ pub const ALL_GRAPHICS: Self = Self(0x0000_001F);
+ pub const ALL: Self = Self(0x7FFF_FFFF);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageUsageFlagBits.html>"]
+pub struct ImageUsageFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageUsageFlags, Flags);
+impl ImageUsageFlags {
+ #[doc = "Can be used as a source of transfer operations"]
+ pub const TRANSFER_SRC: Self = Self(0b1);
+ #[doc = "Can be used as a destination of transfer operations"]
+ pub const TRANSFER_DST: Self = Self(0b10);
+ #[doc = "Can be sampled from (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"]
+ pub const SAMPLED: Self = Self(0b100);
+ #[doc = "Can be used as storage image (STORAGE_IMAGE descriptor type)"]
+ pub const STORAGE: Self = Self(0b1000);
+ #[doc = "Can be used as framebuffer color attachment"]
+ pub const COLOR_ATTACHMENT: Self = Self(0b1_0000);
+ #[doc = "Can be used as framebuffer depth/stencil attachment"]
+ pub const DEPTH_STENCIL_ATTACHMENT: Self = Self(0b10_0000);
+ #[doc = "Image data not needed outside of rendering"]
+ pub const TRANSIENT_ATTACHMENT: Self = Self(0b100_0000);
+ #[doc = "Can be used as framebuffer input attachment"]
+ pub const INPUT_ATTACHMENT: Self = Self(0b1000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCreateFlagBits.html>"]
+pub struct ImageCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageCreateFlags, Flags);
+impl ImageCreateFlags {
+ #[doc = "Image should support sparse backing"]
+ pub const SPARSE_BINDING: Self = Self(0b1);
+ #[doc = "Image should support sparse backing with partial residency"]
+ pub const SPARSE_RESIDENCY: Self = Self(0b10);
+ #[doc = "Image should support constant data access to physical memory ranges mapped into multiple locations of sparse images"]
+ pub const SPARSE_ALIASED: Self = Self(0b100);
+ #[doc = "Allows image views to have different format than the base image"]
+ pub const MUTABLE_FORMAT: Self = Self(0b1000);
+ #[doc = "Allows creating image views with cube type from the created image"]
+ pub const CUBE_COMPATIBLE: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewCreateFlagBits.html>"]
+pub struct ImageViewCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageViewCreateFlags, Flags);
+impl ImageViewCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerCreateFlagBits.html>"]
+pub struct SamplerCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SamplerCreateFlags, Flags);
+impl SamplerCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCreateFlagBits.html>"]
+pub struct PipelineCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCreateFlags, Flags);
+impl PipelineCreateFlags {
+ pub const DISABLE_OPTIMIZATION: Self = Self(0b1);
+ pub const ALLOW_DERIVATIVES: Self = Self(0b10);
+ pub const DERIVATIVE: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineShaderStageCreateFlagBits.html>"]
+pub struct PipelineShaderStageCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineShaderStageCreateFlags, Flags);
+impl PipelineShaderStageCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkColorComponentFlagBits.html>"]
+pub struct ColorComponentFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ColorComponentFlags, Flags);
+impl ColorComponentFlags {
+ pub const R: Self = Self(0b1);
+ pub const G: Self = Self(0b10);
+ pub const B: Self = Self(0b100);
+ pub const A: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFenceCreateFlagBits.html>"]
+pub struct FenceCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(FenceCreateFlags, Flags);
+impl FenceCreateFlags {
+ pub const SIGNALED: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreCreateFlagBits.html>"]
+pub struct SemaphoreCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SemaphoreCreateFlags, Flags);
+impl SemaphoreCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFormatFeatureFlagBits.html>"]
+pub struct FormatFeatureFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(FormatFeatureFlags, Flags);
+impl FormatFeatureFlags {
+ #[doc = "Format can be used for sampled images (SAMPLED_IMAGE and COMBINED_IMAGE_SAMPLER descriptor types)"]
+ pub const SAMPLED_IMAGE: Self = Self(0b1);
+ #[doc = "Format can be used for storage images (STORAGE_IMAGE descriptor type)"]
+ pub const STORAGE_IMAGE: Self = Self(0b10);
+ #[doc = "Format supports atomic operations in case it is used for storage images"]
+ pub const STORAGE_IMAGE_ATOMIC: Self = Self(0b100);
+ #[doc = "Format can be used for uniform texel buffers (TBOs)"]
+ pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b1000);
+ #[doc = "Format can be used for storage texel buffers (IBOs)"]
+ pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1_0000);
+ #[doc = "Format supports atomic operations in case it is used for storage texel buffers"]
+ pub const STORAGE_TEXEL_BUFFER_ATOMIC: Self = Self(0b10_0000);
+ #[doc = "Format can be used for vertex buffers (VBOs)"]
+ pub const VERTEX_BUFFER: Self = Self(0b100_0000);
+ #[doc = "Format can be used for color attachment images"]
+ pub const COLOR_ATTACHMENT: Self = Self(0b1000_0000);
+ #[doc = "Format supports blending in case it is used for color attachment images"]
+ pub const COLOR_ATTACHMENT_BLEND: Self = Self(0b1_0000_0000);
+ #[doc = "Format can be used for depth/stencil attachment images"]
+ pub const DEPTH_STENCIL_ATTACHMENT: Self = Self(0b10_0000_0000);
+ #[doc = "Format can be used as the source image of blits with vkCmdBlitImage"]
+ pub const BLIT_SRC: Self = Self(0b100_0000_0000);
+ #[doc = "Format can be used as the destination image of blits with vkCmdBlitImage"]
+ pub const BLIT_DST: Self = Self(0b1000_0000_0000);
+ #[doc = "Format can be filtered with VK_FILTER_LINEAR when being sampled"]
+ pub const SAMPLED_IMAGE_FILTER_LINEAR: Self = Self(0b1_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryControlFlagBits.html>"]
+pub struct QueryControlFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(QueryControlFlags, Flags);
+impl QueryControlFlags {
+ #[doc = "Require precise results to be collected by the query"]
+ pub const PRECISE: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryResultFlagBits.html>"]
+pub struct QueryResultFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(QueryResultFlags, Flags);
+impl QueryResultFlags {
+ #[doc = "Results of the queries are written to the destination buffer as 64-bit values"]
+ pub const TYPE_64: Self = Self(0b1);
+ #[doc = "Results of the queries are waited on before proceeding with the result copy"]
+ pub const WAIT: Self = Self(0b10);
+ #[doc = "Besides the results of the query, the availability of the results is also written"]
+ pub const WITH_AVAILABILITY: Self = Self(0b100);
+ #[doc = "Copy the partial results of the query even if the final results are not available"]
+ pub const PARTIAL: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferUsageFlagBits.html>"]
+pub struct CommandBufferUsageFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(CommandBufferUsageFlags, Flags);
+impl CommandBufferUsageFlags {
+ pub const ONE_TIME_SUBMIT: Self = Self(0b1);
+ pub const RENDER_PASS_CONTINUE: Self = Self(0b10);
+ #[doc = "Command buffer may be submitted/executed more than once simultaneously"]
+ pub const SIMULTANEOUS_USE: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPipelineStatisticFlagBits.html>"]
+pub struct QueryPipelineStatisticFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(QueryPipelineStatisticFlags, Flags);
+impl QueryPipelineStatisticFlags {
+ #[doc = "Optional"]
+ pub const INPUT_ASSEMBLY_VERTICES: Self = Self(0b1);
+ #[doc = "Optional"]
+ pub const INPUT_ASSEMBLY_PRIMITIVES: Self = Self(0b10);
+ #[doc = "Optional"]
+ pub const VERTEX_SHADER_INVOCATIONS: Self = Self(0b100);
+ #[doc = "Optional"]
+ pub const GEOMETRY_SHADER_INVOCATIONS: Self = Self(0b1000);
+ #[doc = "Optional"]
+ pub const GEOMETRY_SHADER_PRIMITIVES: Self = Self(0b1_0000);
+ #[doc = "Optional"]
+ pub const CLIPPING_INVOCATIONS: Self = Self(0b10_0000);
+ #[doc = "Optional"]
+ pub const CLIPPING_PRIMITIVES: Self = Self(0b100_0000);
+ #[doc = "Optional"]
+ pub const FRAGMENT_SHADER_INVOCATIONS: Self = Self(0b1000_0000);
+ #[doc = "Optional"]
+ pub const TESSELLATION_CONTROL_SHADER_PATCHES: Self = Self(0b1_0000_0000);
+ #[doc = "Optional"]
+ pub const TESSELLATION_EVALUATION_SHADER_INVOCATIONS: Self = Self(0b10_0000_0000);
+ #[doc = "Optional"]
+ pub const COMPUTE_SHADER_INVOCATIONS: Self = Self(0b100_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageAspectFlagBits.html>"]
+pub struct ImageAspectFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageAspectFlags, Flags);
+impl ImageAspectFlags {
+ pub const COLOR: Self = Self(0b1);
+ pub const DEPTH: Self = Self(0b10);
+ pub const STENCIL: Self = Self(0b100);
+ pub const METADATA: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageFormatFlagBits.html>"]
+pub struct SparseImageFormatFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SparseImageFormatFlags, Flags);
+impl SparseImageFormatFlags {
+ #[doc = "Image uses a single mip tail region for all array layers"]
+ pub const SINGLE_MIPTAIL: Self = Self(0b1);
+ #[doc = "Image requires mip level dimensions to be an integer multiple of the sparse image block dimensions for non-tail mip levels."]
+ pub const ALIGNED_MIP_SIZE: Self = Self(0b10);
+ #[doc = "Image uses a non-standard sparse image block dimensions"]
+ pub const NONSTANDARD_BLOCK_SIZE: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseMemoryBindFlagBits.html>"]
+pub struct SparseMemoryBindFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SparseMemoryBindFlags, Flags);
+impl SparseMemoryBindFlags {
+ #[doc = "Operation binds resource metadata to memory"]
+ pub const METADATA: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineStageFlagBits.html>"]
+pub struct PipelineStageFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineStageFlags, Flags);
+impl PipelineStageFlags {
+ #[doc = "Before subsequent commands are processed"]
+ pub const TOP_OF_PIPE: Self = Self(0b1);
+ #[doc = "Draw/DispatchIndirect command fetch"]
+ pub const DRAW_INDIRECT: Self = Self(0b10);
+ #[doc = "Vertex/index fetch"]
+ pub const VERTEX_INPUT: Self = Self(0b100);
+ #[doc = "Vertex shading"]
+ pub const VERTEX_SHADER: Self = Self(0b1000);
+ #[doc = "Tessellation control shading"]
+ pub const TESSELLATION_CONTROL_SHADER: Self = Self(0b1_0000);
+ #[doc = "Tessellation evaluation shading"]
+ pub const TESSELLATION_EVALUATION_SHADER: Self = Self(0b10_0000);
+ #[doc = "Geometry shading"]
+ pub const GEOMETRY_SHADER: Self = Self(0b100_0000);
+ #[doc = "Fragment shading"]
+ pub const FRAGMENT_SHADER: Self = Self(0b1000_0000);
+ #[doc = "Early fragment (depth and stencil) tests"]
+ pub const EARLY_FRAGMENT_TESTS: Self = Self(0b1_0000_0000);
+ #[doc = "Late fragment (depth and stencil) tests"]
+ pub const LATE_FRAGMENT_TESTS: Self = Self(0b10_0000_0000);
+ #[doc = "Color attachment writes"]
+ pub const COLOR_ATTACHMENT_OUTPUT: Self = Self(0b100_0000_0000);
+ #[doc = "Compute shading"]
+ pub const COMPUTE_SHADER: Self = Self(0b1000_0000_0000);
+ #[doc = "Transfer/copy operations"]
+ pub const TRANSFER: Self = Self(0b1_0000_0000_0000);
+ #[doc = "After previous commands have completed"]
+ pub const BOTTOM_OF_PIPE: Self = Self(0b10_0000_0000_0000);
+ #[doc = "Indicates host (CPU) is a source/sink of the dependency"]
+ pub const HOST: Self = Self(0b100_0000_0000_0000);
+ #[doc = "All stages of the graphics pipeline"]
+ pub const ALL_GRAPHICS: Self = Self(0b1000_0000_0000_0000);
+ #[doc = "All stages supported on the queue"]
+ pub const ALL_COMMANDS: Self = Self(0b1_0000_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandPoolCreateFlagBits.html>"]
+pub struct CommandPoolCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(CommandPoolCreateFlags, Flags);
+impl CommandPoolCreateFlags {
+ #[doc = "Command buffers have a short lifetime"]
+ pub const TRANSIENT: Self = Self(0b1);
+ #[doc = "Command buffers may release their memory individually"]
+ pub const RESET_COMMAND_BUFFER: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandPoolResetFlagBits.html>"]
+pub struct CommandPoolResetFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(CommandPoolResetFlags, Flags);
+impl CommandPoolResetFlags {
+ #[doc = "Release resources owned by the pool"]
+ pub const RELEASE_RESOURCES: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferResetFlagBits.html>"]
+pub struct CommandBufferResetFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(CommandBufferResetFlags, Flags);
+impl CommandBufferResetFlags {
+ #[doc = "Release resources owned by the buffer"]
+ pub const RELEASE_RESOURCES: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSampleCountFlagBits.html>"]
+pub struct SampleCountFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SampleCountFlags, Flags);
+impl SampleCountFlags {
+ #[doc = "Sample count 1 supported"]
+ pub const TYPE_1: Self = Self(0b1);
+ #[doc = "Sample count 2 supported"]
+ pub const TYPE_2: Self = Self(0b10);
+ #[doc = "Sample count 4 supported"]
+ pub const TYPE_4: Self = Self(0b100);
+ #[doc = "Sample count 8 supported"]
+ pub const TYPE_8: Self = Self(0b1000);
+ #[doc = "Sample count 16 supported"]
+ pub const TYPE_16: Self = Self(0b1_0000);
+ #[doc = "Sample count 32 supported"]
+ pub const TYPE_32: Self = Self(0b10_0000);
+ #[doc = "Sample count 64 supported"]
+ pub const TYPE_64: Self = Self(0b100_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentDescriptionFlagBits.html>"]
+pub struct AttachmentDescriptionFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(AttachmentDescriptionFlags, Flags);
+impl AttachmentDescriptionFlags {
+ #[doc = "The attachment may alias physical memory of another attachment in the same render pass"]
+ pub const MAY_ALIAS: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStencilFaceFlagBits.html>"]
+pub struct StencilFaceFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(StencilFaceFlags, Flags);
+impl StencilFaceFlags {
+ #[doc = "Front face"]
+ pub const FRONT: Self = Self(0b1);
+ #[doc = "Back face"]
+ pub const BACK: Self = Self(0b10);
+ #[doc = "Front and back faces"]
+ pub const FRONT_AND_BACK: Self = Self(0x0000_0003);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorPoolCreateFlagBits.html>"]
+pub struct DescriptorPoolCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DescriptorPoolCreateFlags, Flags);
+impl DescriptorPoolCreateFlags {
+ #[doc = "Descriptor sets may be freed individually"]
+ pub const FREE_DESCRIPTOR_SET: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDependencyFlagBits.html>"]
+pub struct DependencyFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DependencyFlags, Flags);
+impl DependencyFlags {
+ #[doc = "Dependency is per pixel region "]
+ pub const BY_REGION: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreWaitFlagBits.html>"]
+pub struct SemaphoreWaitFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SemaphoreWaitFlags, Flags);
+impl SemaphoreWaitFlags {
+ pub const ANY: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPlaneAlphaFlagBitsKHR.html>"]
+pub struct DisplayPlaneAlphaFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(DisplayPlaneAlphaFlagsKHR, Flags);
+impl DisplayPlaneAlphaFlagsKHR {
+ pub const OPAQUE: Self = Self(0b1);
+ pub const GLOBAL: Self = Self(0b10);
+ pub const PER_PIXEL: Self = Self(0b100);
+ pub const PER_PIXEL_PREMULTIPLIED: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCompositeAlphaFlagBitsKHR.html>"]
+pub struct CompositeAlphaFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(CompositeAlphaFlagsKHR, Flags);
+impl CompositeAlphaFlagsKHR {
+ pub const OPAQUE: Self = Self(0b1);
+ pub const PRE_MULTIPLIED: Self = Self(0b10);
+ pub const POST_MULTIPLIED: Self = Self(0b100);
+ pub const INHERIT: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceTransformFlagBitsKHR.html>"]
+pub struct SurfaceTransformFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(SurfaceTransformFlagsKHR, Flags);
+impl SurfaceTransformFlagsKHR {
+ pub const IDENTITY: Self = Self(0b1);
+ pub const ROTATE_90: Self = Self(0b10);
+ pub const ROTATE_180: Self = Self(0b100);
+ pub const ROTATE_270: Self = Self(0b1000);
+ pub const HORIZONTAL_MIRROR: Self = Self(0b1_0000);
+ pub const HORIZONTAL_MIRROR_ROTATE_90: Self = Self(0b10_0000);
+ pub const HORIZONTAL_MIRROR_ROTATE_180: Self = Self(0b100_0000);
+ pub const HORIZONTAL_MIRROR_ROTATE_270: Self = Self(0b1000_0000);
+ pub const INHERIT: Self = Self(0b1_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainImageUsageFlagBitsANDROID.html>"]
+pub struct SwapchainImageUsageFlagsANDROID(pub(crate) Flags);
+vk_bitflags_wrapped!(SwapchainImageUsageFlagsANDROID, Flags);
+impl SwapchainImageUsageFlagsANDROID {
+ pub const SHARED: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugReportFlagBitsEXT.html>"]
+pub struct DebugReportFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DebugReportFlagsEXT, Flags);
+impl DebugReportFlagsEXT {
+ pub const INFORMATION: Self = Self(0b1);
+ pub const WARNING: Self = Self(0b10);
+ pub const PERFORMANCE_WARNING: Self = Self(0b100);
+ pub const ERROR: Self = Self(0b1000);
+ pub const DEBUG: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryHandleTypeFlagBitsNV.html>"]
+pub struct ExternalMemoryHandleTypeFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalMemoryHandleTypeFlagsNV, Flags);
+impl ExternalMemoryHandleTypeFlagsNV {
+ pub const OPAQUE_WIN32: Self = Self(0b1);
+ pub const OPAQUE_WIN32_KMT: Self = Self(0b10);
+ pub const D3D11_IMAGE: Self = Self(0b100);
+ pub const D3D11_IMAGE_KMT: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryFeatureFlagBitsNV.html>"]
+pub struct ExternalMemoryFeatureFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalMemoryFeatureFlagsNV, Flags);
+impl ExternalMemoryFeatureFlagsNV {
+ pub const DEDICATED_ONLY: Self = Self(0b1);
+ pub const EXPORTABLE: Self = Self(0b10);
+ pub const IMPORTABLE: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubgroupFeatureFlagBits.html>"]
+pub struct SubgroupFeatureFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SubgroupFeatureFlags, Flags);
+impl SubgroupFeatureFlags {
+ #[doc = "Basic subgroup operations"]
+ pub const BASIC: Self = Self(0b1);
+ #[doc = "Vote subgroup operations"]
+ pub const VOTE: Self = Self(0b10);
+ #[doc = "Arithmetic subgroup operations"]
+ pub const ARITHMETIC: Self = Self(0b100);
+ #[doc = "Ballot subgroup operations"]
+ pub const BALLOT: Self = Self(0b1000);
+ #[doc = "Shuffle subgroup operations"]
+ pub const SHUFFLE: Self = Self(0b1_0000);
+ #[doc = "Shuffle relative subgroup operations"]
+ pub const SHUFFLE_RELATIVE: Self = Self(0b10_0000);
+ #[doc = "Clustered subgroup operations"]
+ pub const CLUSTERED: Self = Self(0b100_0000);
+ #[doc = "Quad subgroup operations"]
+ pub const QUAD: Self = Self(0b1000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectCommandsLayoutUsageFlagBitsNV.html>"]
+pub struct IndirectCommandsLayoutUsageFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(IndirectCommandsLayoutUsageFlagsNV, Flags);
+impl IndirectCommandsLayoutUsageFlagsNV {
+ pub const EXPLICIT_PREPROCESS: Self = Self(0b1);
+ pub const INDEXED_SEQUENCES: Self = Self(0b10);
+ pub const UNORDERED_SEQUENCES: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectStateFlagBitsNV.html>"]
+pub struct IndirectStateFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(IndirectStateFlagsNV, Flags);
+impl IndirectStateFlagsNV {
+ pub const FLAG_FRONTFACE: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPrivateDataSlotCreateFlagBits.html>"]
+pub struct PrivateDataSlotCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PrivateDataSlotCreateFlags, Flags);
+impl PrivateDataSlotCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayoutCreateFlagBits.html>"]
+pub struct DescriptorSetLayoutCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DescriptorSetLayoutCreateFlags, Flags);
+impl DescriptorSetLayoutCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryHandleTypeFlagBits.html>"]
+pub struct ExternalMemoryHandleTypeFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalMemoryHandleTypeFlags, Flags);
+impl ExternalMemoryHandleTypeFlags {
+ pub const OPAQUE_FD: Self = Self(0b1);
+ pub const OPAQUE_WIN32: Self = Self(0b10);
+ pub const OPAQUE_WIN32_KMT: Self = Self(0b100);
+ pub const D3D11_TEXTURE: Self = Self(0b1000);
+ pub const D3D11_TEXTURE_KMT: Self = Self(0b1_0000);
+ pub const D3D12_HEAP: Self = Self(0b10_0000);
+ pub const D3D12_RESOURCE: Self = Self(0b100_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryFeatureFlagBits.html>"]
+pub struct ExternalMemoryFeatureFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalMemoryFeatureFlags, Flags);
+impl ExternalMemoryFeatureFlags {
+ pub const DEDICATED_ONLY: Self = Self(0b1);
+ pub const EXPORTABLE: Self = Self(0b10);
+ pub const IMPORTABLE: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalSemaphoreHandleTypeFlagBits.html>"]
+pub struct ExternalSemaphoreHandleTypeFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalSemaphoreHandleTypeFlags, Flags);
+impl ExternalSemaphoreHandleTypeFlags {
+ pub const OPAQUE_FD: Self = Self(0b1);
+ pub const OPAQUE_WIN32: Self = Self(0b10);
+ pub const OPAQUE_WIN32_KMT: Self = Self(0b100);
+ pub const D3D12_FENCE: Self = Self(0b1000);
+ pub const D3D11_FENCE: Self = Self::D3D12_FENCE;
+ pub const SYNC_FD: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalSemaphoreFeatureFlagBits.html>"]
+pub struct ExternalSemaphoreFeatureFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalSemaphoreFeatureFlags, Flags);
+impl ExternalSemaphoreFeatureFlags {
+ pub const EXPORTABLE: Self = Self(0b1);
+ pub const IMPORTABLE: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreImportFlagBits.html>"]
+pub struct SemaphoreImportFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SemaphoreImportFlags, Flags);
+impl SemaphoreImportFlags {
+ pub const TEMPORARY: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalFenceHandleTypeFlagBits.html>"]
+pub struct ExternalFenceHandleTypeFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalFenceHandleTypeFlags, Flags);
+impl ExternalFenceHandleTypeFlags {
+ pub const OPAQUE_FD: Self = Self(0b1);
+ pub const OPAQUE_WIN32: Self = Self(0b10);
+ pub const OPAQUE_WIN32_KMT: Self = Self(0b100);
+ pub const SYNC_FD: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalFenceFeatureFlagBits.html>"]
+pub struct ExternalFenceFeatureFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ExternalFenceFeatureFlags, Flags);
+impl ExternalFenceFeatureFlags {
+ pub const EXPORTABLE: Self = Self(0b1);
+ pub const IMPORTABLE: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFenceImportFlagBits.html>"]
+pub struct FenceImportFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(FenceImportFlags, Flags);
+impl FenceImportFlags {
+ pub const TEMPORARY: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceCounterFlagBitsEXT.html>"]
+pub struct SurfaceCounterFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(SurfaceCounterFlagsEXT, Flags);
+impl SurfaceCounterFlagsEXT {
+ pub const VBLANK: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPeerMemoryFeatureFlagBits.html>"]
+pub struct PeerMemoryFeatureFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PeerMemoryFeatureFlags, Flags);
+impl PeerMemoryFeatureFlags {
+ #[doc = "Can read with vkCmdCopy commands"]
+ pub const COPY_SRC: Self = Self(0b1);
+ #[doc = "Can write with vkCmdCopy commands"]
+ pub const COPY_DST: Self = Self(0b10);
+ #[doc = "Can read with any access type/command"]
+ pub const GENERIC_SRC: Self = Self(0b100);
+ #[doc = "Can write with and access type/command"]
+ pub const GENERIC_DST: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryAllocateFlagBits.html>"]
+pub struct MemoryAllocateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(MemoryAllocateFlags, Flags);
+impl MemoryAllocateFlags {
+ #[doc = "Force allocation on specific devices"]
+ pub const DEVICE_MASK: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupPresentModeFlagBitsKHR.html>"]
+pub struct DeviceGroupPresentModeFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(DeviceGroupPresentModeFlagsKHR, Flags);
+impl DeviceGroupPresentModeFlagsKHR {
+ #[doc = "Present from local memory"]
+ pub const LOCAL: Self = Self(0b1);
+ #[doc = "Present from remote memory"]
+ pub const REMOTE: Self = Self(0b10);
+ #[doc = "Present sum of local and/or remote memory"]
+ pub const SUM: Self = Self(0b100);
+ #[doc = "Each physical device presents from local memory"]
+ pub const LOCAL_MULTI_DEVICE: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainCreateFlagBitsKHR.html>"]
+pub struct SwapchainCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(SwapchainCreateFlagsKHR, Flags);
+impl SwapchainCreateFlagsKHR {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassDescriptionFlagBits.html>"]
+pub struct SubpassDescriptionFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SubpassDescriptionFlags, Flags);
+impl SubpassDescriptionFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessageSeverityFlagBitsEXT.html>"]
+pub struct DebugUtilsMessageSeverityFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DebugUtilsMessageSeverityFlagsEXT, Flags);
+impl DebugUtilsMessageSeverityFlagsEXT {
+ pub const VERBOSE: Self = Self(0b1);
+ pub const INFO: Self = Self(0b1_0000);
+ pub const WARNING: Self = Self(0b1_0000_0000);
+ pub const ERROR: Self = Self(0b1_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessageTypeFlagBitsEXT.html>"]
+pub struct DebugUtilsMessageTypeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DebugUtilsMessageTypeFlagsEXT, Flags);
+impl DebugUtilsMessageTypeFlagsEXT {
+ pub const GENERAL: Self = Self(0b1);
+ pub const VALIDATION: Self = Self(0b10);
+ pub const PERFORMANCE: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorBindingFlagBits.html>"]
+pub struct DescriptorBindingFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DescriptorBindingFlags, Flags);
+impl DescriptorBindingFlags {
+ pub const UPDATE_AFTER_BIND: Self = Self(0b1);
+ pub const UPDATE_UNUSED_WHILE_PENDING: Self = Self(0b10);
+ pub const PARTIALLY_BOUND: Self = Self(0b100);
+ pub const VARIABLE_DESCRIPTOR_COUNT: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkConditionalRenderingFlagBitsEXT.html>"]
+pub struct ConditionalRenderingFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(ConditionalRenderingFlagsEXT, Flags);
+impl ConditionalRenderingFlagsEXT {
+ pub const INVERTED: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkResolveModeFlagBits.html>"]
+pub struct ResolveModeFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ResolveModeFlags, Flags);
+impl ResolveModeFlags {
+ pub const NONE: Self = Self(0);
+ pub const SAMPLE_ZERO: Self = Self(0b1);
+ pub const AVERAGE: Self = Self(0b10);
+ pub const MIN: Self = Self(0b100);
+ pub const MAX: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryInstanceFlagBitsKHR.html>"]
+pub struct GeometryInstanceFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(GeometryInstanceFlagsKHR, Flags);
+impl GeometryInstanceFlagsKHR {
+ pub const TRIANGLE_FACING_CULL_DISABLE: Self = Self(0b1);
+ pub const TRIANGLE_FLIP_FACING: Self = Self(0b10);
+ pub const FORCE_OPAQUE: Self = Self(0b100);
+ pub const FORCE_NO_OPAQUE: Self = Self(0b1000);
+ pub const TRIANGLE_FRONT_COUNTERCLOCKWISE: Self = Self::TRIANGLE_FLIP_FACING;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryFlagBitsKHR.html>"]
+pub struct GeometryFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(GeometryFlagsKHR, Flags);
+impl GeometryFlagsKHR {
+ pub const OPAQUE: Self = Self(0b1);
+ pub const NO_DUPLICATE_ANY_HIT_INVOCATION: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBuildAccelerationStructureFlagBitsKHR.html>"]
+pub struct BuildAccelerationStructureFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(BuildAccelerationStructureFlagsKHR, Flags);
+impl BuildAccelerationStructureFlagsKHR {
+ pub const ALLOW_UPDATE: Self = Self(0b1);
+ pub const ALLOW_COMPACTION: Self = Self(0b10);
+ pub const PREFER_FAST_TRACE: Self = Self(0b100);
+ pub const PREFER_FAST_BUILD: Self = Self(0b1000);
+ pub const LOW_MEMORY: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureCreateFlagBitsKHR.html>"]
+pub struct AccelerationStructureCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(AccelerationStructureCreateFlagsKHR, Flags);
+impl AccelerationStructureCreateFlagsKHR {
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFramebufferCreateFlagBits.html>"]
+pub struct FramebufferCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(FramebufferCreateFlags, Flags);
+impl FramebufferCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceDiagnosticsConfigFlagBitsNV.html>"]
+pub struct DeviceDiagnosticsConfigFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(DeviceDiagnosticsConfigFlagsNV, Flags);
+impl DeviceDiagnosticsConfigFlagsNV {
+ pub const ENABLE_SHADER_DEBUG_INFO: Self = Self(0b1);
+ pub const ENABLE_RESOURCE_TRACKING: Self = Self(0b10);
+ pub const ENABLE_AUTOMATIC_CHECKPOINTS: Self = Self(0b100);
+ pub const ENABLE_SHADER_ERROR_REPORTING: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCreationFeedbackFlagBits.html>"]
+pub struct PipelineCreationFeedbackFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCreationFeedbackFlags, Flags);
+impl PipelineCreationFeedbackFlags {
+ pub const VALID: Self = Self(0b1);
+ pub const VALID_EXT: Self = Self::VALID;
+ pub const APPLICATION_PIPELINE_CACHE_HIT: Self = Self(0b10);
+ pub const APPLICATION_PIPELINE_CACHE_HIT_EXT: Self = Self::APPLICATION_PIPELINE_CACHE_HIT;
+ pub const BASE_PIPELINE_ACCELERATION: Self = Self(0b100);
+ pub const BASE_PIPELINE_ACCELERATION_EXT: Self = Self::BASE_PIPELINE_ACCELERATION;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryDecompressionMethodFlagBitsNV.html>"]
+pub struct MemoryDecompressionMethodFlagsNV(pub(crate) Flags64);
+vk_bitflags_wrapped!(MemoryDecompressionMethodFlagsNV, Flags64);
+impl MemoryDecompressionMethodFlagsNV {
+ pub const GDEFLATE_1_0: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterDescriptionFlagBitsKHR.html>"]
+pub struct PerformanceCounterDescriptionFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(PerformanceCounterDescriptionFlagsKHR, Flags);
+impl PerformanceCounterDescriptionFlagsKHR {
+ pub const PERFORMANCE_IMPACTING: Self = Self(0b1);
+ pub const CONCURRENTLY_IMPACTED: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAcquireProfilingLockFlagBitsKHR.html>"]
+pub struct AcquireProfilingLockFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(AcquireProfilingLockFlagsKHR, Flags);
+impl AcquireProfilingLockFlagsKHR {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderCorePropertiesFlagBitsAMD.html>"]
+pub struct ShaderCorePropertiesFlagsAMD(pub(crate) Flags);
+vk_bitflags_wrapped!(ShaderCorePropertiesFlagsAMD, Flags);
+impl ShaderCorePropertiesFlagsAMD {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderModuleCreateFlagBits.html>"]
+pub struct ShaderModuleCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ShaderModuleCreateFlags, Flags);
+impl ShaderModuleCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCompilerControlFlagBitsAMD.html>"]
+pub struct PipelineCompilerControlFlagsAMD(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCompilerControlFlagsAMD, Flags);
+impl PipelineCompilerControlFlagsAMD {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkToolPurposeFlagBits.html>"]
+pub struct ToolPurposeFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(ToolPurposeFlags, Flags);
+impl ToolPurposeFlags {
+ pub const VALIDATION: Self = Self(0b1);
+ pub const VALIDATION_EXT: Self = Self::VALIDATION;
+ pub const PROFILING: Self = Self(0b10);
+ pub const PROFILING_EXT: Self = Self::PROFILING;
+ pub const TRACING: Self = Self(0b100);
+ pub const TRACING_EXT: Self = Self::TRACING;
+ pub const ADDITIONAL_FEATURES: Self = Self(0b1000);
+ pub const ADDITIONAL_FEATURES_EXT: Self = Self::ADDITIONAL_FEATURES;
+ pub const MODIFYING_FEATURES: Self = Self(0b1_0000);
+ pub const MODIFYING_FEATURES_EXT: Self = Self::MODIFYING_FEATURES;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccessFlagBits2.html>"]
+pub struct AccessFlags2(pub(crate) Flags64);
+vk_bitflags_wrapped!(AccessFlags2, Flags64);
+impl AccessFlags2 {
+ pub const NONE: Self = Self(0);
+ pub const NONE_KHR: Self = Self::NONE;
+ pub const INDIRECT_COMMAND_READ: Self = Self(0b1);
+ pub const INDIRECT_COMMAND_READ_KHR: Self = Self::INDIRECT_COMMAND_READ;
+ pub const INDEX_READ: Self = Self(0b10);
+ pub const INDEX_READ_KHR: Self = Self::INDEX_READ;
+ pub const VERTEX_ATTRIBUTE_READ: Self = Self(0b100);
+ pub const VERTEX_ATTRIBUTE_READ_KHR: Self = Self::VERTEX_ATTRIBUTE_READ;
+ pub const UNIFORM_READ: Self = Self(0b1000);
+ pub const UNIFORM_READ_KHR: Self = Self::UNIFORM_READ;
+ pub const INPUT_ATTACHMENT_READ: Self = Self(0b1_0000);
+ pub const INPUT_ATTACHMENT_READ_KHR: Self = Self::INPUT_ATTACHMENT_READ;
+ pub const SHADER_READ: Self = Self(0b10_0000);
+ pub const SHADER_READ_KHR: Self = Self::SHADER_READ;
+ pub const SHADER_WRITE: Self = Self(0b100_0000);
+ pub const SHADER_WRITE_KHR: Self = Self::SHADER_WRITE;
+ pub const COLOR_ATTACHMENT_READ: Self = Self(0b1000_0000);
+ pub const COLOR_ATTACHMENT_READ_KHR: Self = Self::COLOR_ATTACHMENT_READ;
+ pub const COLOR_ATTACHMENT_WRITE: Self = Self(0b1_0000_0000);
+ pub const COLOR_ATTACHMENT_WRITE_KHR: Self = Self::COLOR_ATTACHMENT_WRITE;
+ pub const DEPTH_STENCIL_ATTACHMENT_READ: Self = Self(0b10_0000_0000);
+ pub const DEPTH_STENCIL_ATTACHMENT_READ_KHR: Self = Self::DEPTH_STENCIL_ATTACHMENT_READ;
+ pub const DEPTH_STENCIL_ATTACHMENT_WRITE: Self = Self(0b100_0000_0000);
+ pub const DEPTH_STENCIL_ATTACHMENT_WRITE_KHR: Self = Self::DEPTH_STENCIL_ATTACHMENT_WRITE;
+ pub const TRANSFER_READ: Self = Self(0b1000_0000_0000);
+ pub const TRANSFER_READ_KHR: Self = Self::TRANSFER_READ;
+ pub const TRANSFER_WRITE: Self = Self(0b1_0000_0000_0000);
+ pub const TRANSFER_WRITE_KHR: Self = Self::TRANSFER_WRITE;
+ pub const HOST_READ: Self = Self(0b10_0000_0000_0000);
+ pub const HOST_READ_KHR: Self = Self::HOST_READ;
+ pub const HOST_WRITE: Self = Self(0b100_0000_0000_0000);
+ pub const HOST_WRITE_KHR: Self = Self::HOST_WRITE;
+ pub const MEMORY_READ: Self = Self(0b1000_0000_0000_0000);
+ pub const MEMORY_READ_KHR: Self = Self::MEMORY_READ;
+ pub const MEMORY_WRITE: Self = Self(0b1_0000_0000_0000_0000);
+ pub const MEMORY_WRITE_KHR: Self = Self::MEMORY_WRITE;
+ pub const SHADER_SAMPLED_READ: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const SHADER_SAMPLED_READ_KHR: Self = Self::SHADER_SAMPLED_READ;
+ pub const SHADER_STORAGE_READ: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const SHADER_STORAGE_READ_KHR: Self = Self::SHADER_STORAGE_READ;
+ pub const SHADER_STORAGE_WRITE: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const SHADER_STORAGE_WRITE_KHR: Self = Self::SHADER_STORAGE_WRITE;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineStageFlagBits2.html>"]
+pub struct PipelineStageFlags2(pub(crate) Flags64);
+vk_bitflags_wrapped!(PipelineStageFlags2, Flags64);
+impl PipelineStageFlags2 {
+ pub const NONE: Self = Self(0);
+ pub const NONE_KHR: Self = Self::NONE;
+ pub const TOP_OF_PIPE: Self = Self(0b1);
+ pub const TOP_OF_PIPE_KHR: Self = Self::TOP_OF_PIPE;
+ pub const DRAW_INDIRECT: Self = Self(0b10);
+ pub const DRAW_INDIRECT_KHR: Self = Self::DRAW_INDIRECT;
+ pub const VERTEX_INPUT: Self = Self(0b100);
+ pub const VERTEX_INPUT_KHR: Self = Self::VERTEX_INPUT;
+ pub const VERTEX_SHADER: Self = Self(0b1000);
+ pub const VERTEX_SHADER_KHR: Self = Self::VERTEX_SHADER;
+ pub const TESSELLATION_CONTROL_SHADER: Self = Self(0b1_0000);
+ pub const TESSELLATION_CONTROL_SHADER_KHR: Self = Self::TESSELLATION_CONTROL_SHADER;
+ pub const TESSELLATION_EVALUATION_SHADER: Self = Self(0b10_0000);
+ pub const TESSELLATION_EVALUATION_SHADER_KHR: Self = Self::TESSELLATION_EVALUATION_SHADER;
+ pub const GEOMETRY_SHADER: Self = Self(0b100_0000);
+ pub const GEOMETRY_SHADER_KHR: Self = Self::GEOMETRY_SHADER;
+ pub const FRAGMENT_SHADER: Self = Self(0b1000_0000);
+ pub const FRAGMENT_SHADER_KHR: Self = Self::FRAGMENT_SHADER;
+ pub const EARLY_FRAGMENT_TESTS: Self = Self(0b1_0000_0000);
+ pub const EARLY_FRAGMENT_TESTS_KHR: Self = Self::EARLY_FRAGMENT_TESTS;
+ pub const LATE_FRAGMENT_TESTS: Self = Self(0b10_0000_0000);
+ pub const LATE_FRAGMENT_TESTS_KHR: Self = Self::LATE_FRAGMENT_TESTS;
+ pub const COLOR_ATTACHMENT_OUTPUT: Self = Self(0b100_0000_0000);
+ pub const COLOR_ATTACHMENT_OUTPUT_KHR: Self = Self::COLOR_ATTACHMENT_OUTPUT;
+ pub const COMPUTE_SHADER: Self = Self(0b1000_0000_0000);
+ pub const COMPUTE_SHADER_KHR: Self = Self::COMPUTE_SHADER;
+ pub const ALL_TRANSFER: Self = Self(0b1_0000_0000_0000);
+ pub const ALL_TRANSFER_KHR: Self = Self::ALL_TRANSFER;
+ pub const TRANSFER: Self = Self::ALL_TRANSFER_KHR;
+ pub const TRANSFER_KHR: Self = Self::ALL_TRANSFER;
+ pub const BOTTOM_OF_PIPE: Self = Self(0b10_0000_0000_0000);
+ pub const BOTTOM_OF_PIPE_KHR: Self = Self::BOTTOM_OF_PIPE;
+ pub const HOST: Self = Self(0b100_0000_0000_0000);
+ pub const HOST_KHR: Self = Self::HOST;
+ pub const ALL_GRAPHICS: Self = Self(0b1000_0000_0000_0000);
+ pub const ALL_GRAPHICS_KHR: Self = Self::ALL_GRAPHICS;
+ pub const ALL_COMMANDS: Self = Self(0b1_0000_0000_0000_0000);
+ pub const ALL_COMMANDS_KHR: Self = Self::ALL_COMMANDS;
+ pub const COPY: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const COPY_KHR: Self = Self::COPY;
+ pub const RESOLVE: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const RESOLVE_KHR: Self = Self::RESOLVE;
+ pub const BLIT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const BLIT_KHR: Self = Self::BLIT;
+ pub const CLEAR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const CLEAR_KHR: Self = Self::CLEAR;
+ pub const INDEX_INPUT: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const INDEX_INPUT_KHR: Self = Self::INDEX_INPUT;
+ pub const VERTEX_ATTRIBUTE_INPUT: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const VERTEX_ATTRIBUTE_INPUT_KHR: Self = Self::VERTEX_ATTRIBUTE_INPUT;
+ pub const PRE_RASTERIZATION_SHADERS: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const PRE_RASTERIZATION_SHADERS_KHR: Self = Self::PRE_RASTERIZATION_SHADERS;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubmitFlagBits.html>"]
+pub struct SubmitFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(SubmitFlags, Flags);
+impl SubmitFlags {
+ pub const PROTECTED: Self = Self(0b1);
+ pub const PROTECTED_KHR: Self = Self::PROTECTED;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkEventCreateFlagBits.html>"]
+pub struct EventCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(EventCreateFlags, Flags);
+impl EventCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineLayoutCreateFlagBits.html>"]
+pub struct PipelineLayoutCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineLayoutCreateFlags, Flags);
+impl PipelineLayoutCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineColorBlendStateCreateFlagBits.html>"]
+pub struct PipelineColorBlendStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineColorBlendStateCreateFlags, Flags);
+impl PipelineColorBlendStateCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineDepthStencilStateCreateFlagBits.html>"]
+pub struct PipelineDepthStencilStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineDepthStencilStateCreateFlags, Flags);
+impl PipelineDepthStencilStateCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGraphicsPipelineLibraryFlagBitsEXT.html>"]
+pub struct GraphicsPipelineLibraryFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(GraphicsPipelineLibraryFlagsEXT, Flags);
+impl GraphicsPipelineLibraryFlagsEXT {
+ pub const VERTEX_INPUT_INTERFACE: Self = Self(0b1);
+ pub const PRE_RASTERIZATION_SHADERS: Self = Self(0b10);
+ pub const FRAGMENT_SHADER: Self = Self(0b100);
+ pub const FRAGMENT_OUTPUT_INTERFACE: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceAddressBindingFlagBitsEXT.html>"]
+pub struct DeviceAddressBindingFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DeviceAddressBindingFlagsEXT, Flags);
+impl DeviceAddressBindingFlagsEXT {
+ pub const INTERNAL_OBJECT: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentScalingFlagBitsEXT.html>"]
+pub struct PresentScalingFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(PresentScalingFlagsEXT, Flags);
+impl PresentScalingFlagsEXT {
+ pub const ONE_TO_ONE: Self = Self(0b1);
+ pub const ASPECT_RATIO_STRETCH: Self = Self(0b10);
+ pub const STRETCH: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentGravityFlagBitsEXT.html>"]
+pub struct PresentGravityFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(PresentGravityFlagsEXT, Flags);
+impl PresentGravityFlagsEXT {
+ pub const MIN: Self = Self(0b1);
+ pub const MAX: Self = Self(0b10);
+ pub const CENTERED: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoCodecOperationFlagBitsKHR.html>"]
+pub struct VideoCodecOperationFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoCodecOperationFlagsKHR, Flags);
+impl VideoCodecOperationFlagsKHR {
+ pub const NONE: Self = Self(0);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoChromaSubsamplingFlagBitsKHR.html>"]
+pub struct VideoChromaSubsamplingFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoChromaSubsamplingFlagsKHR, Flags);
+impl VideoChromaSubsamplingFlagsKHR {
+ pub const INVALID: Self = Self(0);
+ pub const MONOCHROME: Self = Self(0b1);
+ pub const TYPE_420: Self = Self(0b10);
+ pub const TYPE_422: Self = Self(0b100);
+ pub const TYPE_444: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoComponentBitDepthFlagBitsKHR.html>"]
+pub struct VideoComponentBitDepthFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoComponentBitDepthFlagsKHR, Flags);
+impl VideoComponentBitDepthFlagsKHR {
+ pub const INVALID: Self = Self(0);
+ pub const TYPE_8: Self = Self(0b1);
+ pub const TYPE_10: Self = Self(0b100);
+ pub const TYPE_12: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoCapabilityFlagBitsKHR.html>"]
+pub struct VideoCapabilityFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoCapabilityFlagsKHR, Flags);
+impl VideoCapabilityFlagsKHR {
+ pub const PROTECTED_CONTENT: Self = Self(0b1);
+ pub const SEPARATE_REFERENCE_IMAGES: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionCreateFlagBitsKHR.html>"]
+pub struct VideoSessionCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoSessionCreateFlagsKHR, Flags);
+impl VideoSessionCreateFlagsKHR {
+ pub const PROTECTED_CONTENT: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264PictureLayoutFlagBitsKHR.html>"]
+pub struct VideoDecodeH264PictureLayoutFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoDecodeH264PictureLayoutFlagsKHR, Flags);
+impl VideoDecodeH264PictureLayoutFlagsKHR {
+ pub const PROGRESSIVE: Self = Self(0);
+ pub const INTERLACED_INTERLEAVED_LINES: Self = Self(0b1);
+ pub const INTERLACED_SEPARATE_PLANES: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoCodingControlFlagBitsKHR.html>"]
+pub struct VideoCodingControlFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoCodingControlFlagsKHR, Flags);
+impl VideoCodingControlFlagsKHR {
+ pub const RESET: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeUsageFlagBitsKHR.html>"]
+pub struct VideoDecodeUsageFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoDecodeUsageFlagsKHR, Flags);
+impl VideoDecodeUsageFlagsKHR {
+ pub const DEFAULT: Self = Self(0);
+ pub const TRANSCODING: Self = Self(0b1);
+ pub const OFFLINE: Self = Self(0b10);
+ pub const STREAMING: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeCapabilityFlagBitsKHR.html>"]
+pub struct VideoDecodeCapabilityFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoDecodeCapabilityFlagsKHR, Flags);
+impl VideoDecodeCapabilityFlagsKHR {
+ pub const DPB_AND_OUTPUT_COINCIDE: Self = Self(0b1);
+ pub const DPB_AND_OUTPUT_DISTINCT: Self = Self(0b10);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeUsageFlagBitsKHR.html>"]
+pub struct VideoEncodeUsageFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeUsageFlagsKHR, Flags);
+impl VideoEncodeUsageFlagsKHR {
+ pub const DEFAULT: Self = Self(0);
+ pub const TRANSCODING: Self = Self(0b1);
+ pub const STREAMING: Self = Self(0b10);
+ pub const RECORDING: Self = Self(0b100);
+ pub const CONFERENCING: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeContentFlagBitsKHR.html>"]
+pub struct VideoEncodeContentFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeContentFlagsKHR, Flags);
+impl VideoEncodeContentFlagsKHR {
+ pub const DEFAULT: Self = Self(0);
+ pub const CAMERA: Self = Self(0b1);
+ pub const DESKTOP: Self = Self(0b10);
+ pub const RENDERED: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeCapabilityFlagBitsKHR.html>"]
+pub struct VideoEncodeCapabilityFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeCapabilityFlagsKHR, Flags);
+impl VideoEncodeCapabilityFlagsKHR {
+ pub const PRECEDING_EXTERNALLY_ENCODED_BYTES: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeRateControlModeFlagBitsKHR.html>"]
+pub struct VideoEncodeRateControlModeFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeRateControlModeFlagsKHR, Flags);
+impl VideoEncodeRateControlModeFlagsKHR {
+ pub const NONE: Self = Self(0);
+ pub const CBR: Self = Self(1);
+ pub const VBR: Self = Self(2);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264CapabilityFlagBitsEXT.html>"]
+pub struct VideoEncodeH264CapabilityFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH264CapabilityFlagsEXT, Flags);
+impl VideoEncodeH264CapabilityFlagsEXT {
+ pub const DIRECT_8X8_INFERENCE_ENABLED: Self = Self(0b1);
+ pub const DIRECT_8X8_INFERENCE_DISABLED: Self = Self(0b10);
+ pub const SEPARATE_COLOUR_PLANE: Self = Self(0b100);
+ pub const QPPRIME_Y_ZERO_TRANSFORM_BYPASS: Self = Self(0b1000);
+ pub const SCALING_LISTS: Self = Self(0b1_0000);
+ pub const HRD_COMPLIANCE: Self = Self(0b10_0000);
+ pub const CHROMA_QP_OFFSET: Self = Self(0b100_0000);
+ pub const SECOND_CHROMA_QP_OFFSET: Self = Self(0b1000_0000);
+ pub const PIC_INIT_QP_MINUS26: Self = Self(0b1_0000_0000);
+ pub const WEIGHTED_PRED: Self = Self(0b10_0000_0000);
+ pub const WEIGHTED_BIPRED_EXPLICIT: Self = Self(0b100_0000_0000);
+ pub const WEIGHTED_BIPRED_IMPLICIT: Self = Self(0b1000_0000_0000);
+ pub const WEIGHTED_PRED_NO_TABLE: Self = Self(0b1_0000_0000_0000);
+ pub const TRANSFORM_8X8: Self = Self(0b10_0000_0000_0000);
+ pub const CABAC: Self = Self(0b100_0000_0000_0000);
+ pub const CAVLC: Self = Self(0b1000_0000_0000_0000);
+ pub const DEBLOCKING_FILTER_DISABLED: Self = Self(0b1_0000_0000_0000_0000);
+ pub const DEBLOCKING_FILTER_ENABLED: Self = Self(0b10_0000_0000_0000_0000);
+ pub const DEBLOCKING_FILTER_PARTIAL: Self = Self(0b100_0000_0000_0000_0000);
+ pub const DISABLE_DIRECT_SPATIAL_MV_PRED: Self = Self(0b1000_0000_0000_0000_0000);
+ pub const MULTIPLE_SLICE_PER_FRAME: Self = Self(0b1_0000_0000_0000_0000_0000);
+ pub const SLICE_MB_COUNT: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const ROW_UNALIGNED_SLICE: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const DIFFERENT_SLICE_TYPE: Self = Self(0b1000_0000_0000_0000_0000_0000);
+ pub const B_FRAME_IN_L1_LIST: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264InputModeFlagBitsEXT.html>"]
+pub struct VideoEncodeH264InputModeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH264InputModeFlagsEXT, Flags);
+impl VideoEncodeH264InputModeFlagsEXT {
+ pub const FRAME: Self = Self(0b1);
+ pub const SLICE: Self = Self(0b10);
+ pub const NON_VCL: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264OutputModeFlagBitsEXT.html>"]
+pub struct VideoEncodeH264OutputModeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH264OutputModeFlagsEXT, Flags);
+impl VideoEncodeH264OutputModeFlagsEXT {
+ pub const FRAME: Self = Self(0b1);
+ pub const SLICE: Self = Self(0b10);
+ pub const NON_VCL: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageFormatConstraintsFlagBitsFUCHSIA.html>"]
+pub struct ImageFormatConstraintsFlagsFUCHSIA(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageFormatConstraintsFlagsFUCHSIA, Flags);
+impl ImageFormatConstraintsFlagsFUCHSIA {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageConstraintsInfoFlagBitsFUCHSIA.html>"]
+pub struct ImageConstraintsInfoFlagsFUCHSIA(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageConstraintsInfoFlagsFUCHSIA, Flags);
+impl ImageConstraintsInfoFlagsFUCHSIA {
+ pub const CPU_READ_RARELY: Self = Self(0b1);
+ pub const CPU_READ_OFTEN: Self = Self(0b10);
+ pub const CPU_WRITE_RARELY: Self = Self(0b100);
+ pub const CPU_WRITE_OFTEN: Self = Self(0b1000);
+ pub const PROTECTED_OPTIONAL: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFormatFeatureFlagBits2.html>"]
+pub struct FormatFeatureFlags2(pub(crate) Flags64);
+vk_bitflags_wrapped!(FormatFeatureFlags2, Flags64);
+impl FormatFeatureFlags2 {
+ pub const SAMPLED_IMAGE: Self = Self(0b1);
+ pub const SAMPLED_IMAGE_KHR: Self = Self::SAMPLED_IMAGE;
+ pub const STORAGE_IMAGE: Self = Self(0b10);
+ pub const STORAGE_IMAGE_KHR: Self = Self::STORAGE_IMAGE;
+ pub const STORAGE_IMAGE_ATOMIC: Self = Self(0b100);
+ pub const STORAGE_IMAGE_ATOMIC_KHR: Self = Self::STORAGE_IMAGE_ATOMIC;
+ pub const UNIFORM_TEXEL_BUFFER: Self = Self(0b1000);
+ pub const UNIFORM_TEXEL_BUFFER_KHR: Self = Self::UNIFORM_TEXEL_BUFFER;
+ pub const STORAGE_TEXEL_BUFFER: Self = Self(0b1_0000);
+ pub const STORAGE_TEXEL_BUFFER_KHR: Self = Self::STORAGE_TEXEL_BUFFER;
+ pub const STORAGE_TEXEL_BUFFER_ATOMIC: Self = Self(0b10_0000);
+ pub const STORAGE_TEXEL_BUFFER_ATOMIC_KHR: Self = Self::STORAGE_TEXEL_BUFFER_ATOMIC;
+ pub const VERTEX_BUFFER: Self = Self(0b100_0000);
+ pub const VERTEX_BUFFER_KHR: Self = Self::VERTEX_BUFFER;
+ pub const COLOR_ATTACHMENT: Self = Self(0b1000_0000);
+ pub const COLOR_ATTACHMENT_KHR: Self = Self::COLOR_ATTACHMENT;
+ pub const COLOR_ATTACHMENT_BLEND: Self = Self(0b1_0000_0000);
+ pub const COLOR_ATTACHMENT_BLEND_KHR: Self = Self::COLOR_ATTACHMENT_BLEND;
+ pub const DEPTH_STENCIL_ATTACHMENT: Self = Self(0b10_0000_0000);
+ pub const DEPTH_STENCIL_ATTACHMENT_KHR: Self = Self::DEPTH_STENCIL_ATTACHMENT;
+ pub const BLIT_SRC: Self = Self(0b100_0000_0000);
+ pub const BLIT_SRC_KHR: Self = Self::BLIT_SRC;
+ pub const BLIT_DST: Self = Self(0b1000_0000_0000);
+ pub const BLIT_DST_KHR: Self = Self::BLIT_DST;
+ pub const SAMPLED_IMAGE_FILTER_LINEAR: Self = Self(0b1_0000_0000_0000);
+ pub const SAMPLED_IMAGE_FILTER_LINEAR_KHR: Self = Self::SAMPLED_IMAGE_FILTER_LINEAR;
+ pub const SAMPLED_IMAGE_FILTER_CUBIC: Self = Self(0b10_0000_0000_0000);
+ pub const SAMPLED_IMAGE_FILTER_CUBIC_EXT: Self = Self::SAMPLED_IMAGE_FILTER_CUBIC;
+ pub const TRANSFER_SRC: Self = Self(0b100_0000_0000_0000);
+ pub const TRANSFER_SRC_KHR: Self = Self::TRANSFER_SRC;
+ pub const TRANSFER_DST: Self = Self(0b1000_0000_0000_0000);
+ pub const TRANSFER_DST_KHR: Self = Self::TRANSFER_DST;
+ pub const SAMPLED_IMAGE_FILTER_MINMAX: Self = Self(0b1_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_FILTER_MINMAX_KHR: Self = Self::SAMPLED_IMAGE_FILTER_MINMAX;
+ pub const MIDPOINT_CHROMA_SAMPLES: Self = Self(0b10_0000_0000_0000_0000);
+ pub const MIDPOINT_CHROMA_SAMPLES_KHR: Self = Self::MIDPOINT_CHROMA_SAMPLES;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER: Self = Self(0b100_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER: Self =
+ Self(0b1000_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT: Self =
+ Self(0b1_0000_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE: Self =
+ Self(0b10_0000_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE;
+ pub const DISJOINT: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const DISJOINT_KHR: Self = Self::DISJOINT;
+ pub const COSITED_CHROMA_SAMPLES: Self = Self(0b1000_0000_0000_0000_0000_0000);
+ pub const COSITED_CHROMA_SAMPLES_KHR: Self = Self::COSITED_CHROMA_SAMPLES;
+ pub const STORAGE_READ_WITHOUT_FORMAT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000);
+ pub const STORAGE_READ_WITHOUT_FORMAT_KHR: Self = Self::STORAGE_READ_WITHOUT_FORMAT;
+ pub const STORAGE_WRITE_WITHOUT_FORMAT: Self =
+ Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const STORAGE_WRITE_WITHOUT_FORMAT_KHR: Self = Self::STORAGE_WRITE_WITHOUT_FORMAT;
+ pub const SAMPLED_IMAGE_DEPTH_COMPARISON: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_DEPTH_COMPARISON_KHR: Self = Self::SAMPLED_IMAGE_DEPTH_COMPARISON;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderingFlagBits.html>"]
+pub struct RenderingFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(RenderingFlags, Flags);
+impl RenderingFlags {
+ pub const CONTENTS_SECONDARY_COMMAND_BUFFERS: Self = Self(0b1);
+ pub const CONTENTS_SECONDARY_COMMAND_BUFFERS_KHR: Self =
+ Self::CONTENTS_SECONDARY_COMMAND_BUFFERS;
+ pub const SUSPENDING: Self = Self(0b10);
+ pub const SUSPENDING_KHR: Self = Self::SUSPENDING;
+ pub const RESUMING: Self = Self(0b100);
+ pub const RESUMING_KHR: Self = Self::RESUMING;
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265CapabilityFlagBitsEXT.html>"]
+pub struct VideoEncodeH265CapabilityFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH265CapabilityFlagsEXT, Flags);
+impl VideoEncodeH265CapabilityFlagsEXT {
+ pub const SEPARATE_COLOUR_PLANE: Self = Self(0b1);
+ pub const SCALING_LISTS: Self = Self(0b10);
+ pub const SAMPLE_ADAPTIVE_OFFSET_ENABLED: Self = Self(0b100);
+ pub const PCM_ENABLE: Self = Self(0b1000);
+ pub const SPS_TEMPORAL_MVP_ENABLED: Self = Self(0b1_0000);
+ pub const HRD_COMPLIANCE: Self = Self(0b10_0000);
+ pub const INIT_QP_MINUS26: Self = Self(0b100_0000);
+ pub const LOG2_PARALLEL_MERGE_LEVEL_MINUS2: Self = Self(0b1000_0000);
+ pub const SIGN_DATA_HIDING_ENABLED: Self = Self(0b1_0000_0000);
+ pub const TRANSFORM_SKIP_ENABLED: Self = Self(0b10_0000_0000);
+ pub const TRANSFORM_SKIP_DISABLED: Self = Self(0b100_0000_0000);
+ pub const PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT: Self = Self(0b1000_0000_0000);
+ pub const WEIGHTED_PRED: Self = Self(0b1_0000_0000_0000);
+ pub const WEIGHTED_BIPRED: Self = Self(0b10_0000_0000_0000);
+ pub const WEIGHTED_PRED_NO_TABLE: Self = Self(0b100_0000_0000_0000);
+ pub const TRANSQUANT_BYPASS_ENABLED: Self = Self(0b1000_0000_0000_0000);
+ pub const ENTROPY_CODING_SYNC_ENABLED: Self = Self(0b1_0000_0000_0000_0000);
+ pub const DEBLOCKING_FILTER_OVERRIDE_ENABLED: Self = Self(0b10_0000_0000_0000_0000);
+ pub const MULTIPLE_TILE_PER_FRAME: Self = Self(0b100_0000_0000_0000_0000);
+ pub const MULTIPLE_SLICE_PER_TILE: Self = Self(0b1000_0000_0000_0000_0000);
+ pub const MULTIPLE_TILE_PER_SLICE: Self = Self(0b1_0000_0000_0000_0000_0000);
+ pub const SLICE_SEGMENT_CTB_COUNT: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const ROW_UNALIGNED_SLICE_SEGMENT: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const DEPENDENT_SLICE_SEGMENT: Self = Self(0b1000_0000_0000_0000_0000_0000);
+ pub const DIFFERENT_SLICE_TYPE: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+ pub const B_FRAME_IN_L1_LIST: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265InputModeFlagBitsEXT.html>"]
+pub struct VideoEncodeH265InputModeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH265InputModeFlagsEXT, Flags);
+impl VideoEncodeH265InputModeFlagsEXT {
+ pub const FRAME: Self = Self(0b1);
+ pub const SLICE_SEGMENT: Self = Self(0b10);
+ pub const NON_VCL: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265OutputModeFlagBitsEXT.html>"]
+pub struct VideoEncodeH265OutputModeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH265OutputModeFlagsEXT, Flags);
+impl VideoEncodeH265OutputModeFlagsEXT {
+ pub const FRAME: Self = Self(0b1);
+ pub const SLICE_SEGMENT: Self = Self(0b10);
+ pub const NON_VCL: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265CtbSizeFlagBitsEXT.html>"]
+pub struct VideoEncodeH265CtbSizeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH265CtbSizeFlagsEXT, Flags);
+impl VideoEncodeH265CtbSizeFlagsEXT {
+ pub const TYPE_16: Self = Self(0b1);
+ pub const TYPE_32: Self = Self(0b10);
+ pub const TYPE_64: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265TransformBlockSizeFlagBitsEXT.html>"]
+pub struct VideoEncodeH265TransformBlockSizeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeH265TransformBlockSizeFlagsEXT, Flags);
+impl VideoEncodeH265TransformBlockSizeFlagsEXT {
+ pub const TYPE_4: Self = Self(0b1);
+ pub const TYPE_8: Self = Self(0b10);
+ pub const TYPE_16: Self = Self(0b100);
+ pub const TYPE_32: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalObjectTypeFlagBitsEXT.html>"]
+pub struct ExportMetalObjectTypeFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(ExportMetalObjectTypeFlagsEXT, Flags);
+impl ExportMetalObjectTypeFlagsEXT {
+ pub const METAL_DEVICE: Self = Self(0b1);
+ pub const METAL_COMMAND_QUEUE: Self = Self(0b10);
+ pub const METAL_BUFFER: Self = Self(0b100);
+ pub const METAL_TEXTURE: Self = Self(0b1000);
+ pub const METAL_IOSURFACE: Self = Self(0b1_0000);
+ pub const METAL_SHARED_EVENT: Self = Self(0b10_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInstanceCreateFlagBits.html>"]
+pub struct InstanceCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(InstanceCreateFlags, Flags);
+impl InstanceCreateFlags {}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCompressionFlagBitsEXT.html>"]
+pub struct ImageCompressionFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageCompressionFlagsEXT, Flags);
+impl ImageCompressionFlagsEXT {
+ pub const DEFAULT: Self = Self(0);
+ pub const FIXED_RATE_DEFAULT: Self = Self(0b1);
+ pub const FIXED_RATE_EXPLICIT: Self = Self(0b10);
+ pub const DISABLED: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCompressionFixedRateFlagBitsEXT.html>"]
+pub struct ImageCompressionFixedRateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(ImageCompressionFixedRateFlagsEXT, Flags);
+impl ImageCompressionFixedRateFlagsEXT {
+ pub const NONE: Self = Self(0);
+ pub const TYPE_1BPC: Self = Self(0b1);
+ pub const TYPE_2BPC: Self = Self(0b10);
+ pub const TYPE_3BPC: Self = Self(0b100);
+ pub const TYPE_4BPC: Self = Self(0b1000);
+ pub const TYPE_5BPC: Self = Self(0b1_0000);
+ pub const TYPE_6BPC: Self = Self(0b10_0000);
+ pub const TYPE_7BPC: Self = Self(0b100_0000);
+ pub const TYPE_8BPC: Self = Self(0b1000_0000);
+ pub const TYPE_9BPC: Self = Self(0b1_0000_0000);
+ pub const TYPE_10BPC: Self = Self(0b10_0000_0000);
+ pub const TYPE_11BPC: Self = Self(0b100_0000_0000);
+ pub const TYPE_12BPC: Self = Self(0b1000_0000_0000);
+ pub const TYPE_13BPC: Self = Self(0b1_0000_0000_0000);
+ pub const TYPE_14BPC: Self = Self(0b10_0000_0000_0000);
+ pub const TYPE_15BPC: Self = Self(0b100_0000_0000_0000);
+ pub const TYPE_16BPC: Self = Self(0b1000_0000_0000_0000);
+ pub const TYPE_17BPC: Self = Self(0b1_0000_0000_0000_0000);
+ pub const TYPE_18BPC: Self = Self(0b10_0000_0000_0000_0000);
+ pub const TYPE_19BPC: Self = Self(0b100_0000_0000_0000_0000);
+ pub const TYPE_20BPC: Self = Self(0b1000_0000_0000_0000_0000);
+ pub const TYPE_21BPC: Self = Self(0b1_0000_0000_0000_0000_0000);
+ pub const TYPE_22BPC: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const TYPE_23BPC: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const TYPE_24BPC: Self = Self(0b1000_0000_0000_0000_0000_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowGridSizeFlagBitsNV.html>"]
+pub struct OpticalFlowGridSizeFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(OpticalFlowGridSizeFlagsNV, Flags);
+impl OpticalFlowGridSizeFlagsNV {
+ pub const UNKNOWN: Self = Self(0);
+ pub const TYPE_1X1: Self = Self(0b1);
+ pub const TYPE_2X2: Self = Self(0b10);
+ pub const TYPE_4X4: Self = Self(0b100);
+ pub const TYPE_8X8: Self = Self(0b1000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowUsageFlagBitsNV.html>"]
+pub struct OpticalFlowUsageFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(OpticalFlowUsageFlagsNV, Flags);
+impl OpticalFlowUsageFlagsNV {
+ pub const UNKNOWN: Self = Self(0);
+ pub const INPUT: Self = Self(0b1);
+ pub const OUTPUT: Self = Self(0b10);
+ pub const HINT: Self = Self(0b100);
+ pub const COST: Self = Self(0b1000);
+ pub const GLOBAL_FLOW: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowSessionCreateFlagBitsNV.html>"]
+pub struct OpticalFlowSessionCreateFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(OpticalFlowSessionCreateFlagsNV, Flags);
+impl OpticalFlowSessionCreateFlagsNV {
+ pub const ENABLE_HINT: Self = Self(0b1);
+ pub const ENABLE_COST: Self = Self(0b10);
+ pub const ENABLE_GLOBAL_FLOW: Self = Self(0b100);
+ pub const ALLOW_REGIONS: Self = Self(0b1000);
+ pub const BOTH_DIRECTIONS: Self = Self(0b1_0000);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowExecuteFlagBitsNV.html>"]
+pub struct OpticalFlowExecuteFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(OpticalFlowExecuteFlagsNV, Flags);
+impl OpticalFlowExecuteFlagsNV {
+ pub const DISABLE_TEMPORAL_HINTS: Self = Self(0b1);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBuildMicromapFlagBitsEXT.html>"]
+pub struct BuildMicromapFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(BuildMicromapFlagsEXT, Flags);
+impl BuildMicromapFlagsEXT {
+ pub const PREFER_FAST_TRACE: Self = Self(0b1);
+ pub const PREFER_FAST_BUILD: Self = Self(0b10);
+ pub const ALLOW_COMPACTION: Self = Self(0b100);
+}
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapCreateFlagBitsEXT.html>"]
+pub struct MicromapCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(MicromapCreateFlagsEXT, Flags);
+impl MicromapCreateFlagsEXT {
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b1);
+}
diff --git a/third_party/rust/ash/src/vk/const_debugs.rs b/third_party/rust/ash/src/vk/const_debugs.rs
new file mode 100644
index 0000000000..6508306e30
--- /dev/null
+++ b/third_party/rust/ash/src/vk/const_debugs.rs
@@ -0,0 +1,7167 @@
+use crate::prelude::debug_flags;
+use crate::vk::bitflags::*;
+use crate::vk::definitions::*;
+use crate::vk::enums::*;
+use std::fmt;
+impl fmt::Debug for AccelerationStructureBuildTypeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::HOST => Some("HOST"),
+ Self::DEVICE => Some("DEVICE"),
+ Self::HOST_OR_DEVICE => Some("HOST_OR_DEVICE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for AccelerationStructureCompatibilityKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::COMPATIBLE => Some("COMPATIBLE"),
+ Self::INCOMPATIBLE => Some("INCOMPATIBLE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for AccelerationStructureCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ AccelerationStructureCreateFlagsKHR::DEVICE_ADDRESS_CAPTURE_REPLAY.0,
+ "DEVICE_ADDRESS_CAPTURE_REPLAY",
+ ),
+ (
+ AccelerationStructureCreateFlagsKHR::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0,
+ "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT",
+ ),
+ (
+ AccelerationStructureCreateFlagsKHR::MOTION_NV.0,
+ "MOTION_NV",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AccelerationStructureMemoryRequirementsTypeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::OBJECT => Some("OBJECT"),
+ Self::BUILD_SCRATCH => Some("BUILD_SCRATCH"),
+ Self::UPDATE_SCRATCH => Some("UPDATE_SCRATCH"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for AccelerationStructureMotionInfoFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AccelerationStructureMotionInstanceFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AccelerationStructureMotionInstanceTypeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::STATIC => Some("STATIC"),
+ Self::MATRIX_MOTION => Some("MATRIX_MOTION"),
+ Self::SRT_MOTION => Some("SRT_MOTION"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for AccelerationStructureTypeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TOP_LEVEL => Some("TOP_LEVEL"),
+ Self::BOTTOM_LEVEL => Some("BOTTOM_LEVEL"),
+ Self::GENERIC => Some("GENERIC"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for AccessFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ AccessFlags::INDIRECT_COMMAND_READ.0,
+ "INDIRECT_COMMAND_READ",
+ ),
+ (AccessFlags::INDEX_READ.0, "INDEX_READ"),
+ (
+ AccessFlags::VERTEX_ATTRIBUTE_READ.0,
+ "VERTEX_ATTRIBUTE_READ",
+ ),
+ (AccessFlags::UNIFORM_READ.0, "UNIFORM_READ"),
+ (
+ AccessFlags::INPUT_ATTACHMENT_READ.0,
+ "INPUT_ATTACHMENT_READ",
+ ),
+ (AccessFlags::SHADER_READ.0, "SHADER_READ"),
+ (AccessFlags::SHADER_WRITE.0, "SHADER_WRITE"),
+ (
+ AccessFlags::COLOR_ATTACHMENT_READ.0,
+ "COLOR_ATTACHMENT_READ",
+ ),
+ (
+ AccessFlags::COLOR_ATTACHMENT_WRITE.0,
+ "COLOR_ATTACHMENT_WRITE",
+ ),
+ (
+ AccessFlags::DEPTH_STENCIL_ATTACHMENT_READ.0,
+ "DEPTH_STENCIL_ATTACHMENT_READ",
+ ),
+ (
+ AccessFlags::DEPTH_STENCIL_ATTACHMENT_WRITE.0,
+ "DEPTH_STENCIL_ATTACHMENT_WRITE",
+ ),
+ (AccessFlags::TRANSFER_READ.0, "TRANSFER_READ"),
+ (AccessFlags::TRANSFER_WRITE.0, "TRANSFER_WRITE"),
+ (AccessFlags::HOST_READ.0, "HOST_READ"),
+ (AccessFlags::HOST_WRITE.0, "HOST_WRITE"),
+ (AccessFlags::MEMORY_READ.0, "MEMORY_READ"),
+ (AccessFlags::MEMORY_WRITE.0, "MEMORY_WRITE"),
+ (
+ AccessFlags::TRANSFORM_FEEDBACK_WRITE_EXT.0,
+ "TRANSFORM_FEEDBACK_WRITE_EXT",
+ ),
+ (
+ AccessFlags::TRANSFORM_FEEDBACK_COUNTER_READ_EXT.0,
+ "TRANSFORM_FEEDBACK_COUNTER_READ_EXT",
+ ),
+ (
+ AccessFlags::TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT.0,
+ "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT",
+ ),
+ (
+ AccessFlags::CONDITIONAL_RENDERING_READ_EXT.0,
+ "CONDITIONAL_RENDERING_READ_EXT",
+ ),
+ (
+ AccessFlags::COLOR_ATTACHMENT_READ_NONCOHERENT_EXT.0,
+ "COLOR_ATTACHMENT_READ_NONCOHERENT_EXT",
+ ),
+ (
+ AccessFlags::ACCELERATION_STRUCTURE_READ_KHR.0,
+ "ACCELERATION_STRUCTURE_READ_KHR",
+ ),
+ (
+ AccessFlags::ACCELERATION_STRUCTURE_WRITE_KHR.0,
+ "ACCELERATION_STRUCTURE_WRITE_KHR",
+ ),
+ (
+ AccessFlags::FRAGMENT_DENSITY_MAP_READ_EXT.0,
+ "FRAGMENT_DENSITY_MAP_READ_EXT",
+ ),
+ (
+ AccessFlags::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR.0,
+ "FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR",
+ ),
+ (
+ AccessFlags::COMMAND_PREPROCESS_READ_NV.0,
+ "COMMAND_PREPROCESS_READ_NV",
+ ),
+ (
+ AccessFlags::COMMAND_PREPROCESS_WRITE_NV.0,
+ "COMMAND_PREPROCESS_WRITE_NV",
+ ),
+ (AccessFlags::NONE.0, "NONE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AccessFlags2 {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags64, &str)] = &[
+ (AccessFlags2::NONE.0, "NONE"),
+ (
+ AccessFlags2::INDIRECT_COMMAND_READ.0,
+ "INDIRECT_COMMAND_READ",
+ ),
+ (AccessFlags2::INDEX_READ.0, "INDEX_READ"),
+ (
+ AccessFlags2::VERTEX_ATTRIBUTE_READ.0,
+ "VERTEX_ATTRIBUTE_READ",
+ ),
+ (AccessFlags2::UNIFORM_READ.0, "UNIFORM_READ"),
+ (
+ AccessFlags2::INPUT_ATTACHMENT_READ.0,
+ "INPUT_ATTACHMENT_READ",
+ ),
+ (AccessFlags2::SHADER_READ.0, "SHADER_READ"),
+ (AccessFlags2::SHADER_WRITE.0, "SHADER_WRITE"),
+ (
+ AccessFlags2::COLOR_ATTACHMENT_READ.0,
+ "COLOR_ATTACHMENT_READ",
+ ),
+ (
+ AccessFlags2::COLOR_ATTACHMENT_WRITE.0,
+ "COLOR_ATTACHMENT_WRITE",
+ ),
+ (
+ AccessFlags2::DEPTH_STENCIL_ATTACHMENT_READ.0,
+ "DEPTH_STENCIL_ATTACHMENT_READ",
+ ),
+ (
+ AccessFlags2::DEPTH_STENCIL_ATTACHMENT_WRITE.0,
+ "DEPTH_STENCIL_ATTACHMENT_WRITE",
+ ),
+ (AccessFlags2::TRANSFER_READ.0, "TRANSFER_READ"),
+ (AccessFlags2::TRANSFER_WRITE.0, "TRANSFER_WRITE"),
+ (AccessFlags2::HOST_READ.0, "HOST_READ"),
+ (AccessFlags2::HOST_WRITE.0, "HOST_WRITE"),
+ (AccessFlags2::MEMORY_READ.0, "MEMORY_READ"),
+ (AccessFlags2::MEMORY_WRITE.0, "MEMORY_WRITE"),
+ (AccessFlags2::SHADER_SAMPLED_READ.0, "SHADER_SAMPLED_READ"),
+ (AccessFlags2::SHADER_STORAGE_READ.0, "SHADER_STORAGE_READ"),
+ (AccessFlags2::SHADER_STORAGE_WRITE.0, "SHADER_STORAGE_WRITE"),
+ (
+ AccessFlags2::VIDEO_DECODE_READ_KHR.0,
+ "VIDEO_DECODE_READ_KHR",
+ ),
+ (
+ AccessFlags2::VIDEO_DECODE_WRITE_KHR.0,
+ "VIDEO_DECODE_WRITE_KHR",
+ ),
+ (AccessFlags2::RESERVED_46_EXT.0, "RESERVED_46_EXT"),
+ (
+ AccessFlags2::VIDEO_ENCODE_READ_KHR.0,
+ "VIDEO_ENCODE_READ_KHR",
+ ),
+ (
+ AccessFlags2::VIDEO_ENCODE_WRITE_KHR.0,
+ "VIDEO_ENCODE_WRITE_KHR",
+ ),
+ (
+ AccessFlags2::TRANSFORM_FEEDBACK_WRITE_EXT.0,
+ "TRANSFORM_FEEDBACK_WRITE_EXT",
+ ),
+ (
+ AccessFlags2::TRANSFORM_FEEDBACK_COUNTER_READ_EXT.0,
+ "TRANSFORM_FEEDBACK_COUNTER_READ_EXT",
+ ),
+ (
+ AccessFlags2::TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT.0,
+ "TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT",
+ ),
+ (
+ AccessFlags2::CONDITIONAL_RENDERING_READ_EXT.0,
+ "CONDITIONAL_RENDERING_READ_EXT",
+ ),
+ (
+ AccessFlags2::COMMAND_PREPROCESS_READ_NV.0,
+ "COMMAND_PREPROCESS_READ_NV",
+ ),
+ (
+ AccessFlags2::COMMAND_PREPROCESS_WRITE_NV.0,
+ "COMMAND_PREPROCESS_WRITE_NV",
+ ),
+ (
+ AccessFlags2::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR.0,
+ "FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR",
+ ),
+ (
+ AccessFlags2::ACCELERATION_STRUCTURE_READ_KHR.0,
+ "ACCELERATION_STRUCTURE_READ_KHR",
+ ),
+ (
+ AccessFlags2::ACCELERATION_STRUCTURE_WRITE_KHR.0,
+ "ACCELERATION_STRUCTURE_WRITE_KHR",
+ ),
+ (
+ AccessFlags2::FRAGMENT_DENSITY_MAP_READ_EXT.0,
+ "FRAGMENT_DENSITY_MAP_READ_EXT",
+ ),
+ (
+ AccessFlags2::COLOR_ATTACHMENT_READ_NONCOHERENT_EXT.0,
+ "COLOR_ATTACHMENT_READ_NONCOHERENT_EXT",
+ ),
+ (
+ AccessFlags2::DESCRIPTOR_BUFFER_READ_EXT.0,
+ "DESCRIPTOR_BUFFER_READ_EXT",
+ ),
+ (
+ AccessFlags2::INVOCATION_MASK_READ_HUAWEI.0,
+ "INVOCATION_MASK_READ_HUAWEI",
+ ),
+ (
+ AccessFlags2::SHADER_BINDING_TABLE_READ_KHR.0,
+ "SHADER_BINDING_TABLE_READ_KHR",
+ ),
+ (AccessFlags2::MICROMAP_READ_EXT.0, "MICROMAP_READ_EXT"),
+ (AccessFlags2::MICROMAP_WRITE_EXT.0, "MICROMAP_WRITE_EXT"),
+ (AccessFlags2::OPTICAL_FLOW_READ_NV.0, "OPTICAL_FLOW_READ_NV"),
+ (
+ AccessFlags2::OPTICAL_FLOW_WRITE_NV.0,
+ "OPTICAL_FLOW_WRITE_NV",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AcquireProfilingLockFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AndroidSurfaceCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AttachmentDescriptionFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(AttachmentDescriptionFlags::MAY_ALIAS.0, "MAY_ALIAS")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for AttachmentLoadOp {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::LOAD => Some("LOAD"),
+ Self::CLEAR => Some("CLEAR"),
+ Self::DONT_CARE => Some("DONT_CARE"),
+ Self::NONE_EXT => Some("NONE_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for AttachmentStoreOp {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::STORE => Some("STORE"),
+ Self::DONT_CARE => Some("DONT_CARE"),
+ Self::NONE => Some("NONE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for BlendFactor {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ZERO => Some("ZERO"),
+ Self::ONE => Some("ONE"),
+ Self::SRC_COLOR => Some("SRC_COLOR"),
+ Self::ONE_MINUS_SRC_COLOR => Some("ONE_MINUS_SRC_COLOR"),
+ Self::DST_COLOR => Some("DST_COLOR"),
+ Self::ONE_MINUS_DST_COLOR => Some("ONE_MINUS_DST_COLOR"),
+ Self::SRC_ALPHA => Some("SRC_ALPHA"),
+ Self::ONE_MINUS_SRC_ALPHA => Some("ONE_MINUS_SRC_ALPHA"),
+ Self::DST_ALPHA => Some("DST_ALPHA"),
+ Self::ONE_MINUS_DST_ALPHA => Some("ONE_MINUS_DST_ALPHA"),
+ Self::CONSTANT_COLOR => Some("CONSTANT_COLOR"),
+ Self::ONE_MINUS_CONSTANT_COLOR => Some("ONE_MINUS_CONSTANT_COLOR"),
+ Self::CONSTANT_ALPHA => Some("CONSTANT_ALPHA"),
+ Self::ONE_MINUS_CONSTANT_ALPHA => Some("ONE_MINUS_CONSTANT_ALPHA"),
+ Self::SRC_ALPHA_SATURATE => Some("SRC_ALPHA_SATURATE"),
+ Self::SRC1_COLOR => Some("SRC1_COLOR"),
+ Self::ONE_MINUS_SRC1_COLOR => Some("ONE_MINUS_SRC1_COLOR"),
+ Self::SRC1_ALPHA => Some("SRC1_ALPHA"),
+ Self::ONE_MINUS_SRC1_ALPHA => Some("ONE_MINUS_SRC1_ALPHA"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for BlendOp {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ADD => Some("ADD"),
+ Self::SUBTRACT => Some("SUBTRACT"),
+ Self::REVERSE_SUBTRACT => Some("REVERSE_SUBTRACT"),
+ Self::MIN => Some("MIN"),
+ Self::MAX => Some("MAX"),
+ Self::ZERO_EXT => Some("ZERO_EXT"),
+ Self::SRC_EXT => Some("SRC_EXT"),
+ Self::DST_EXT => Some("DST_EXT"),
+ Self::SRC_OVER_EXT => Some("SRC_OVER_EXT"),
+ Self::DST_OVER_EXT => Some("DST_OVER_EXT"),
+ Self::SRC_IN_EXT => Some("SRC_IN_EXT"),
+ Self::DST_IN_EXT => Some("DST_IN_EXT"),
+ Self::SRC_OUT_EXT => Some("SRC_OUT_EXT"),
+ Self::DST_OUT_EXT => Some("DST_OUT_EXT"),
+ Self::SRC_ATOP_EXT => Some("SRC_ATOP_EXT"),
+ Self::DST_ATOP_EXT => Some("DST_ATOP_EXT"),
+ Self::XOR_EXT => Some("XOR_EXT"),
+ Self::MULTIPLY_EXT => Some("MULTIPLY_EXT"),
+ Self::SCREEN_EXT => Some("SCREEN_EXT"),
+ Self::OVERLAY_EXT => Some("OVERLAY_EXT"),
+ Self::DARKEN_EXT => Some("DARKEN_EXT"),
+ Self::LIGHTEN_EXT => Some("LIGHTEN_EXT"),
+ Self::COLORDODGE_EXT => Some("COLORDODGE_EXT"),
+ Self::COLORBURN_EXT => Some("COLORBURN_EXT"),
+ Self::HARDLIGHT_EXT => Some("HARDLIGHT_EXT"),
+ Self::SOFTLIGHT_EXT => Some("SOFTLIGHT_EXT"),
+ Self::DIFFERENCE_EXT => Some("DIFFERENCE_EXT"),
+ Self::EXCLUSION_EXT => Some("EXCLUSION_EXT"),
+ Self::INVERT_EXT => Some("INVERT_EXT"),
+ Self::INVERT_RGB_EXT => Some("INVERT_RGB_EXT"),
+ Self::LINEARDODGE_EXT => Some("LINEARDODGE_EXT"),
+ Self::LINEARBURN_EXT => Some("LINEARBURN_EXT"),
+ Self::VIVIDLIGHT_EXT => Some("VIVIDLIGHT_EXT"),
+ Self::LINEARLIGHT_EXT => Some("LINEARLIGHT_EXT"),
+ Self::PINLIGHT_EXT => Some("PINLIGHT_EXT"),
+ Self::HARDMIX_EXT => Some("HARDMIX_EXT"),
+ Self::HSL_HUE_EXT => Some("HSL_HUE_EXT"),
+ Self::HSL_SATURATION_EXT => Some("HSL_SATURATION_EXT"),
+ Self::HSL_COLOR_EXT => Some("HSL_COLOR_EXT"),
+ Self::HSL_LUMINOSITY_EXT => Some("HSL_LUMINOSITY_EXT"),
+ Self::PLUS_EXT => Some("PLUS_EXT"),
+ Self::PLUS_CLAMPED_EXT => Some("PLUS_CLAMPED_EXT"),
+ Self::PLUS_CLAMPED_ALPHA_EXT => Some("PLUS_CLAMPED_ALPHA_EXT"),
+ Self::PLUS_DARKER_EXT => Some("PLUS_DARKER_EXT"),
+ Self::MINUS_EXT => Some("MINUS_EXT"),
+ Self::MINUS_CLAMPED_EXT => Some("MINUS_CLAMPED_EXT"),
+ Self::CONTRAST_EXT => Some("CONTRAST_EXT"),
+ Self::INVERT_OVG_EXT => Some("INVERT_OVG_EXT"),
+ Self::RED_EXT => Some("RED_EXT"),
+ Self::GREEN_EXT => Some("GREEN_EXT"),
+ Self::BLUE_EXT => Some("BLUE_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for BlendOverlapEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNCORRELATED => Some("UNCORRELATED"),
+ Self::DISJOINT => Some("DISJOINT"),
+ Self::CONJOINT => Some("CONJOINT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for BorderColor {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FLOAT_TRANSPARENT_BLACK => Some("FLOAT_TRANSPARENT_BLACK"),
+ Self::INT_TRANSPARENT_BLACK => Some("INT_TRANSPARENT_BLACK"),
+ Self::FLOAT_OPAQUE_BLACK => Some("FLOAT_OPAQUE_BLACK"),
+ Self::INT_OPAQUE_BLACK => Some("INT_OPAQUE_BLACK"),
+ Self::FLOAT_OPAQUE_WHITE => Some("FLOAT_OPAQUE_WHITE"),
+ Self::INT_OPAQUE_WHITE => Some("INT_OPAQUE_WHITE"),
+ Self::FLOAT_CUSTOM_EXT => Some("FLOAT_CUSTOM_EXT"),
+ Self::INT_CUSTOM_EXT => Some("INT_CUSTOM_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for BufferCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (BufferCreateFlags::SPARSE_BINDING.0, "SPARSE_BINDING"),
+ (BufferCreateFlags::SPARSE_RESIDENCY.0, "SPARSE_RESIDENCY"),
+ (BufferCreateFlags::SPARSE_ALIASED.0, "SPARSE_ALIASED"),
+ (
+ BufferCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0,
+ "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT",
+ ),
+ (BufferCreateFlags::PROTECTED.0, "PROTECTED"),
+ (
+ BufferCreateFlags::DEVICE_ADDRESS_CAPTURE_REPLAY.0,
+ "DEVICE_ADDRESS_CAPTURE_REPLAY",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for BufferUsageFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (BufferUsageFlags::TRANSFER_SRC.0, "TRANSFER_SRC"),
+ (BufferUsageFlags::TRANSFER_DST.0, "TRANSFER_DST"),
+ (
+ BufferUsageFlags::UNIFORM_TEXEL_BUFFER.0,
+ "UNIFORM_TEXEL_BUFFER",
+ ),
+ (
+ BufferUsageFlags::STORAGE_TEXEL_BUFFER.0,
+ "STORAGE_TEXEL_BUFFER",
+ ),
+ (BufferUsageFlags::UNIFORM_BUFFER.0, "UNIFORM_BUFFER"),
+ (BufferUsageFlags::STORAGE_BUFFER.0, "STORAGE_BUFFER"),
+ (BufferUsageFlags::INDEX_BUFFER.0, "INDEX_BUFFER"),
+ (BufferUsageFlags::VERTEX_BUFFER.0, "VERTEX_BUFFER"),
+ (BufferUsageFlags::INDIRECT_BUFFER.0, "INDIRECT_BUFFER"),
+ (
+ BufferUsageFlags::VIDEO_DECODE_SRC_KHR.0,
+ "VIDEO_DECODE_SRC_KHR",
+ ),
+ (
+ BufferUsageFlags::VIDEO_DECODE_DST_KHR.0,
+ "VIDEO_DECODE_DST_KHR",
+ ),
+ (
+ BufferUsageFlags::TRANSFORM_FEEDBACK_BUFFER_EXT.0,
+ "TRANSFORM_FEEDBACK_BUFFER_EXT",
+ ),
+ (
+ BufferUsageFlags::TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT.0,
+ "TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT",
+ ),
+ (
+ BufferUsageFlags::CONDITIONAL_RENDERING_EXT.0,
+ "CONDITIONAL_RENDERING_EXT",
+ ),
+ (BufferUsageFlags::RESERVED_25_AMD.0, "RESERVED_25_AMD"),
+ (
+ BufferUsageFlags::ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR.0,
+ "ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR",
+ ),
+ (
+ BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR.0,
+ "ACCELERATION_STRUCTURE_STORAGE_KHR",
+ ),
+ (
+ BufferUsageFlags::SHADER_BINDING_TABLE_KHR.0,
+ "SHADER_BINDING_TABLE_KHR",
+ ),
+ (BufferUsageFlags::RESERVED_18_QCOM.0, "RESERVED_18_QCOM"),
+ (
+ BufferUsageFlags::VIDEO_ENCODE_DST_KHR.0,
+ "VIDEO_ENCODE_DST_KHR",
+ ),
+ (
+ BufferUsageFlags::VIDEO_ENCODE_SRC_KHR.0,
+ "VIDEO_ENCODE_SRC_KHR",
+ ),
+ (
+ BufferUsageFlags::SAMPLER_DESCRIPTOR_BUFFER_EXT.0,
+ "SAMPLER_DESCRIPTOR_BUFFER_EXT",
+ ),
+ (
+ BufferUsageFlags::RESOURCE_DESCRIPTOR_BUFFER_EXT.0,
+ "RESOURCE_DESCRIPTOR_BUFFER_EXT",
+ ),
+ (
+ BufferUsageFlags::PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT.0,
+ "PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT",
+ ),
+ (
+ BufferUsageFlags::MICROMAP_BUILD_INPUT_READ_ONLY_EXT.0,
+ "MICROMAP_BUILD_INPUT_READ_ONLY_EXT",
+ ),
+ (
+ BufferUsageFlags::MICROMAP_STORAGE_EXT.0,
+ "MICROMAP_STORAGE_EXT",
+ ),
+ (
+ BufferUsageFlags::SHADER_DEVICE_ADDRESS.0,
+ "SHADER_DEVICE_ADDRESS",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for BufferViewCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for BuildAccelerationStructureFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ BuildAccelerationStructureFlagsKHR::ALLOW_UPDATE.0,
+ "ALLOW_UPDATE",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::ALLOW_COMPACTION.0,
+ "ALLOW_COMPACTION",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::PREFER_FAST_TRACE.0,
+ "PREFER_FAST_TRACE",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::PREFER_FAST_BUILD.0,
+ "PREFER_FAST_BUILD",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::LOW_MEMORY.0,
+ "LOW_MEMORY",
+ ),
+ (BuildAccelerationStructureFlagsKHR::MOTION_NV.0, "MOTION_NV"),
+ (
+ BuildAccelerationStructureFlagsKHR::ALLOW_OPACITY_MICROMAP_UPDATE_EXT.0,
+ "ALLOW_OPACITY_MICROMAP_UPDATE_EXT",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::ALLOW_DISABLE_OPACITY_MICROMAPS_EXT.0,
+ "ALLOW_DISABLE_OPACITY_MICROMAPS_EXT",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT.0,
+ "ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::RESERVED_9_NV.0,
+ "RESERVED_9_NV",
+ ),
+ (
+ BuildAccelerationStructureFlagsKHR::RESERVED_10_NV.0,
+ "RESERVED_10_NV",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for BuildAccelerationStructureModeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::BUILD => Some("BUILD"),
+ Self::UPDATE => Some("UPDATE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for BuildMicromapFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ BuildMicromapFlagsEXT::PREFER_FAST_TRACE.0,
+ "PREFER_FAST_TRACE",
+ ),
+ (
+ BuildMicromapFlagsEXT::PREFER_FAST_BUILD.0,
+ "PREFER_FAST_BUILD",
+ ),
+ (
+ BuildMicromapFlagsEXT::ALLOW_COMPACTION.0,
+ "ALLOW_COMPACTION",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for BuildMicromapModeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::BUILD => Some("BUILD"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ChromaLocation {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::COSITED_EVEN => Some("COSITED_EVEN"),
+ Self::MIDPOINT => Some("MIDPOINT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CoarseSampleOrderTypeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEFAULT => Some("DEFAULT"),
+ Self::CUSTOM => Some("CUSTOM"),
+ Self::PIXEL_MAJOR => Some("PIXEL_MAJOR"),
+ Self::SAMPLE_MAJOR => Some("SAMPLE_MAJOR"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ColorComponentFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ColorComponentFlags::R.0, "R"),
+ (ColorComponentFlags::G.0, "G"),
+ (ColorComponentFlags::B.0, "B"),
+ (ColorComponentFlags::A.0, "A"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ColorSpaceKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::SRGB_NONLINEAR => Some("SRGB_NONLINEAR"),
+ Self::DISPLAY_P3_NONLINEAR_EXT => Some("DISPLAY_P3_NONLINEAR_EXT"),
+ Self::EXTENDED_SRGB_LINEAR_EXT => Some("EXTENDED_SRGB_LINEAR_EXT"),
+ Self::DISPLAY_P3_LINEAR_EXT => Some("DISPLAY_P3_LINEAR_EXT"),
+ Self::DCI_P3_NONLINEAR_EXT => Some("DCI_P3_NONLINEAR_EXT"),
+ Self::BT709_LINEAR_EXT => Some("BT709_LINEAR_EXT"),
+ Self::BT709_NONLINEAR_EXT => Some("BT709_NONLINEAR_EXT"),
+ Self::BT2020_LINEAR_EXT => Some("BT2020_LINEAR_EXT"),
+ Self::HDR10_ST2084_EXT => Some("HDR10_ST2084_EXT"),
+ Self::DOLBYVISION_EXT => Some("DOLBYVISION_EXT"),
+ Self::HDR10_HLG_EXT => Some("HDR10_HLG_EXT"),
+ Self::ADOBERGB_LINEAR_EXT => Some("ADOBERGB_LINEAR_EXT"),
+ Self::ADOBERGB_NONLINEAR_EXT => Some("ADOBERGB_NONLINEAR_EXT"),
+ Self::PASS_THROUGH_EXT => Some("PASS_THROUGH_EXT"),
+ Self::EXTENDED_SRGB_NONLINEAR_EXT => Some("EXTENDED_SRGB_NONLINEAR_EXT"),
+ Self::DISPLAY_NATIVE_AMD => Some("DISPLAY_NATIVE_AMD"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CommandBufferLevel {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::PRIMARY => Some("PRIMARY"),
+ Self::SECONDARY => Some("SECONDARY"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CommandBufferResetFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ CommandBufferResetFlags::RELEASE_RESOURCES.0,
+ "RELEASE_RESOURCES",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for CommandBufferUsageFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ CommandBufferUsageFlags::ONE_TIME_SUBMIT.0,
+ "ONE_TIME_SUBMIT",
+ ),
+ (
+ CommandBufferUsageFlags::RENDER_PASS_CONTINUE.0,
+ "RENDER_PASS_CONTINUE",
+ ),
+ (
+ CommandBufferUsageFlags::SIMULTANEOUS_USE.0,
+ "SIMULTANEOUS_USE",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for CommandPoolCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (CommandPoolCreateFlags::TRANSIENT.0, "TRANSIENT"),
+ (
+ CommandPoolCreateFlags::RESET_COMMAND_BUFFER.0,
+ "RESET_COMMAND_BUFFER",
+ ),
+ (CommandPoolCreateFlags::PROTECTED.0, "PROTECTED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for CommandPoolResetFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ CommandPoolResetFlags::RELEASE_RESOURCES.0,
+ "RELEASE_RESOURCES",
+ ),
+ (
+ CommandPoolResetFlags::RESERVED_1_COREAVI.0,
+ "RESERVED_1_COREAVI",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for CommandPoolTrimFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for CompareOp {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NEVER => Some("NEVER"),
+ Self::LESS => Some("LESS"),
+ Self::EQUAL => Some("EQUAL"),
+ Self::LESS_OR_EQUAL => Some("LESS_OR_EQUAL"),
+ Self::GREATER => Some("GREATER"),
+ Self::NOT_EQUAL => Some("NOT_EQUAL"),
+ Self::GREATER_OR_EQUAL => Some("GREATER_OR_EQUAL"),
+ Self::ALWAYS => Some("ALWAYS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ComponentSwizzle {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::IDENTITY => Some("IDENTITY"),
+ Self::ZERO => Some("ZERO"),
+ Self::ONE => Some("ONE"),
+ Self::R => Some("R"),
+ Self::G => Some("G"),
+ Self::B => Some("B"),
+ Self::A => Some("A"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ComponentTypeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FLOAT16 => Some("FLOAT16"),
+ Self::FLOAT32 => Some("FLOAT32"),
+ Self::FLOAT64 => Some("FLOAT64"),
+ Self::SINT8 => Some("SINT8"),
+ Self::SINT16 => Some("SINT16"),
+ Self::SINT32 => Some("SINT32"),
+ Self::SINT64 => Some("SINT64"),
+ Self::UINT8 => Some("UINT8"),
+ Self::UINT16 => Some("UINT16"),
+ Self::UINT32 => Some("UINT32"),
+ Self::UINT64 => Some("UINT64"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CompositeAlphaFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (CompositeAlphaFlagsKHR::OPAQUE.0, "OPAQUE"),
+ (CompositeAlphaFlagsKHR::PRE_MULTIPLIED.0, "PRE_MULTIPLIED"),
+ (CompositeAlphaFlagsKHR::POST_MULTIPLIED.0, "POST_MULTIPLIED"),
+ (CompositeAlphaFlagsKHR::INHERIT.0, "INHERIT"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ConditionalRenderingFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(ConditionalRenderingFlagsEXT::INVERTED.0, "INVERTED")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ConservativeRasterizationModeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DISABLED => Some("DISABLED"),
+ Self::OVERESTIMATE => Some("OVERESTIMATE"),
+ Self::UNDERESTIMATE => Some("UNDERESTIMATE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CopyAccelerationStructureModeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::CLONE => Some("CLONE"),
+ Self::COMPACT => Some("COMPACT"),
+ Self::SERIALIZE => Some("SERIALIZE"),
+ Self::DESERIALIZE => Some("DESERIALIZE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CopyMicromapModeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::CLONE => Some("CLONE"),
+ Self::SERIALIZE => Some("SERIALIZE"),
+ Self::DESERIALIZE => Some("DESERIALIZE"),
+ Self::COMPACT => Some("COMPACT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CoverageModulationModeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NONE => Some("NONE"),
+ Self::RGB => Some("RGB"),
+ Self::ALPHA => Some("ALPHA"),
+ Self::RGBA => Some("RGBA"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CoverageReductionModeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::MERGE => Some("MERGE"),
+ Self::TRUNCATE => Some("TRUNCATE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for CullModeFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (CullModeFlags::NONE.0, "NONE"),
+ (CullModeFlags::FRONT.0, "FRONT"),
+ (CullModeFlags::BACK.0, "BACK"),
+ (CullModeFlags::FRONT_AND_BACK.0, "FRONT_AND_BACK"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DebugReportFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DebugReportFlagsEXT::INFORMATION.0, "INFORMATION"),
+ (DebugReportFlagsEXT::WARNING.0, "WARNING"),
+ (
+ DebugReportFlagsEXT::PERFORMANCE_WARNING.0,
+ "PERFORMANCE_WARNING",
+ ),
+ (DebugReportFlagsEXT::ERROR.0, "ERROR"),
+ (DebugReportFlagsEXT::DEBUG.0, "DEBUG"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DebugReportObjectTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNKNOWN => Some("UNKNOWN"),
+ Self::INSTANCE => Some("INSTANCE"),
+ Self::PHYSICAL_DEVICE => Some("PHYSICAL_DEVICE"),
+ Self::DEVICE => Some("DEVICE"),
+ Self::QUEUE => Some("QUEUE"),
+ Self::SEMAPHORE => Some("SEMAPHORE"),
+ Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"),
+ Self::FENCE => Some("FENCE"),
+ Self::DEVICE_MEMORY => Some("DEVICE_MEMORY"),
+ Self::BUFFER => Some("BUFFER"),
+ Self::IMAGE => Some("IMAGE"),
+ Self::EVENT => Some("EVENT"),
+ Self::QUERY_POOL => Some("QUERY_POOL"),
+ Self::BUFFER_VIEW => Some("BUFFER_VIEW"),
+ Self::IMAGE_VIEW => Some("IMAGE_VIEW"),
+ Self::SHADER_MODULE => Some("SHADER_MODULE"),
+ Self::PIPELINE_CACHE => Some("PIPELINE_CACHE"),
+ Self::PIPELINE_LAYOUT => Some("PIPELINE_LAYOUT"),
+ Self::RENDER_PASS => Some("RENDER_PASS"),
+ Self::PIPELINE => Some("PIPELINE"),
+ Self::DESCRIPTOR_SET_LAYOUT => Some("DESCRIPTOR_SET_LAYOUT"),
+ Self::SAMPLER => Some("SAMPLER"),
+ Self::DESCRIPTOR_POOL => Some("DESCRIPTOR_POOL"),
+ Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
+ Self::FRAMEBUFFER => Some("FRAMEBUFFER"),
+ Self::COMMAND_POOL => Some("COMMAND_POOL"),
+ Self::SURFACE_KHR => Some("SURFACE_KHR"),
+ Self::SWAPCHAIN_KHR => Some("SWAPCHAIN_KHR"),
+ Self::DEBUG_REPORT_CALLBACK_EXT => Some("DEBUG_REPORT_CALLBACK_EXT"),
+ Self::DISPLAY_KHR => Some("DISPLAY_KHR"),
+ Self::DISPLAY_MODE_KHR => Some("DISPLAY_MODE_KHR"),
+ Self::VALIDATION_CACHE_EXT => Some("VALIDATION_CACHE_EXT"),
+ Self::SAMPLER_YCBCR_CONVERSION => Some("SAMPLER_YCBCR_CONVERSION"),
+ Self::DESCRIPTOR_UPDATE_TEMPLATE => Some("DESCRIPTOR_UPDATE_TEMPLATE"),
+ Self::CU_MODULE_NVX => Some("CU_MODULE_NVX"),
+ Self::CU_FUNCTION_NVX => Some("CU_FUNCTION_NVX"),
+ Self::ACCELERATION_STRUCTURE_KHR => Some("ACCELERATION_STRUCTURE_KHR"),
+ Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"),
+ Self::BUFFER_COLLECTION_FUCHSIA => Some("BUFFER_COLLECTION_FUCHSIA"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DebugUtilsMessageSeverityFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DebugUtilsMessageSeverityFlagsEXT::VERBOSE.0, "VERBOSE"),
+ (DebugUtilsMessageSeverityFlagsEXT::INFO.0, "INFO"),
+ (DebugUtilsMessageSeverityFlagsEXT::WARNING.0, "WARNING"),
+ (DebugUtilsMessageSeverityFlagsEXT::ERROR.0, "ERROR"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DebugUtilsMessageTypeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DebugUtilsMessageTypeFlagsEXT::GENERAL.0, "GENERAL"),
+ (DebugUtilsMessageTypeFlagsEXT::VALIDATION.0, "VALIDATION"),
+ (DebugUtilsMessageTypeFlagsEXT::PERFORMANCE.0, "PERFORMANCE"),
+ (
+ DebugUtilsMessageTypeFlagsEXT::DEVICE_ADDRESS_BINDING.0,
+ "DEVICE_ADDRESS_BINDING",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DebugUtilsMessengerCallbackDataFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DebugUtilsMessengerCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DependencyFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DependencyFlags::BY_REGION.0, "BY_REGION"),
+ (DependencyFlags::FEEDBACK_LOOP_EXT.0, "FEEDBACK_LOOP_EXT"),
+ (DependencyFlags::DEVICE_GROUP.0, "DEVICE_GROUP"),
+ (DependencyFlags::VIEW_LOCAL.0, "VIEW_LOCAL"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DescriptorBindingFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ DescriptorBindingFlags::UPDATE_AFTER_BIND.0,
+ "UPDATE_AFTER_BIND",
+ ),
+ (
+ DescriptorBindingFlags::UPDATE_UNUSED_WHILE_PENDING.0,
+ "UPDATE_UNUSED_WHILE_PENDING",
+ ),
+ (DescriptorBindingFlags::PARTIALLY_BOUND.0, "PARTIALLY_BOUND"),
+ (
+ DescriptorBindingFlags::VARIABLE_DESCRIPTOR_COUNT.0,
+ "VARIABLE_DESCRIPTOR_COUNT",
+ ),
+ (DescriptorBindingFlags::RESERVED_4_QCOM.0, "RESERVED_4_QCOM"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DescriptorPoolCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET.0,
+ "FREE_DESCRIPTOR_SET",
+ ),
+ (DescriptorPoolCreateFlags::HOST_ONLY_EXT.0, "HOST_ONLY_EXT"),
+ (
+ DescriptorPoolCreateFlags::UPDATE_AFTER_BIND.0,
+ "UPDATE_AFTER_BIND",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DescriptorPoolResetFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DescriptorSetLayoutCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ DescriptorSetLayoutCreateFlags::PUSH_DESCRIPTOR_KHR.0,
+ "PUSH_DESCRIPTOR_KHR",
+ ),
+ (
+ DescriptorSetLayoutCreateFlags::DESCRIPTOR_BUFFER_EXT.0,
+ "DESCRIPTOR_BUFFER_EXT",
+ ),
+ (
+ DescriptorSetLayoutCreateFlags::EMBEDDED_IMMUTABLE_SAMPLERS_EXT.0,
+ "EMBEDDED_IMMUTABLE_SAMPLERS_EXT",
+ ),
+ (
+ DescriptorSetLayoutCreateFlags::RESERVED_3_AMD.0,
+ "RESERVED_3_AMD",
+ ),
+ (
+ DescriptorSetLayoutCreateFlags::HOST_ONLY_POOL_EXT.0,
+ "HOST_ONLY_POOL_EXT",
+ ),
+ (
+ DescriptorSetLayoutCreateFlags::UPDATE_AFTER_BIND_POOL.0,
+ "UPDATE_AFTER_BIND_POOL",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DescriptorType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::SAMPLER => Some("SAMPLER"),
+ Self::COMBINED_IMAGE_SAMPLER => Some("COMBINED_IMAGE_SAMPLER"),
+ Self::SAMPLED_IMAGE => Some("SAMPLED_IMAGE"),
+ Self::STORAGE_IMAGE => Some("STORAGE_IMAGE"),
+ Self::UNIFORM_TEXEL_BUFFER => Some("UNIFORM_TEXEL_BUFFER"),
+ Self::STORAGE_TEXEL_BUFFER => Some("STORAGE_TEXEL_BUFFER"),
+ Self::UNIFORM_BUFFER => Some("UNIFORM_BUFFER"),
+ Self::STORAGE_BUFFER => Some("STORAGE_BUFFER"),
+ Self::UNIFORM_BUFFER_DYNAMIC => Some("UNIFORM_BUFFER_DYNAMIC"),
+ Self::STORAGE_BUFFER_DYNAMIC => Some("STORAGE_BUFFER_DYNAMIC"),
+ Self::INPUT_ATTACHMENT => Some("INPUT_ATTACHMENT"),
+ Self::ACCELERATION_STRUCTURE_KHR => Some("ACCELERATION_STRUCTURE_KHR"),
+ Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"),
+ Self::SAMPLE_WEIGHT_IMAGE_QCOM => Some("SAMPLE_WEIGHT_IMAGE_QCOM"),
+ Self::BLOCK_MATCH_IMAGE_QCOM => Some("BLOCK_MATCH_IMAGE_QCOM"),
+ Self::MUTABLE_EXT => Some("MUTABLE_EXT"),
+ Self::INLINE_UNIFORM_BLOCK => Some("INLINE_UNIFORM_BLOCK"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DescriptorUpdateTemplateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DescriptorUpdateTemplateType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
+ Self::PUSH_DESCRIPTORS_KHR => Some("PUSH_DESCRIPTORS_KHR"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DeviceAddressBindingFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ DeviceAddressBindingFlagsEXT::INTERNAL_OBJECT.0,
+ "INTERNAL_OBJECT",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DeviceAddressBindingTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::BIND => Some("BIND"),
+ Self::UNBIND => Some("UNBIND"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DeviceCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DeviceDiagnosticsConfigFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ DeviceDiagnosticsConfigFlagsNV::ENABLE_SHADER_DEBUG_INFO.0,
+ "ENABLE_SHADER_DEBUG_INFO",
+ ),
+ (
+ DeviceDiagnosticsConfigFlagsNV::ENABLE_RESOURCE_TRACKING.0,
+ "ENABLE_RESOURCE_TRACKING",
+ ),
+ (
+ DeviceDiagnosticsConfigFlagsNV::ENABLE_AUTOMATIC_CHECKPOINTS.0,
+ "ENABLE_AUTOMATIC_CHECKPOINTS",
+ ),
+ (
+ DeviceDiagnosticsConfigFlagsNV::ENABLE_SHADER_ERROR_REPORTING.0,
+ "ENABLE_SHADER_ERROR_REPORTING",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DeviceEventTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DISPLAY_HOTPLUG => Some("DISPLAY_HOTPLUG"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DeviceFaultAddressTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NONE => Some("NONE"),
+ Self::READ_INVALID => Some("READ_INVALID"),
+ Self::WRITE_INVALID => Some("WRITE_INVALID"),
+ Self::EXECUTE_INVALID => Some("EXECUTE_INVALID"),
+ Self::INSTRUCTION_POINTER_UNKNOWN => Some("INSTRUCTION_POINTER_UNKNOWN"),
+ Self::INSTRUCTION_POINTER_INVALID => Some("INSTRUCTION_POINTER_INVALID"),
+ Self::INSTRUCTION_POINTER_FAULT => Some("INSTRUCTION_POINTER_FAULT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DeviceFaultVendorBinaryHeaderVersionEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ONE => Some("ONE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DeviceGroupPresentModeFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DeviceGroupPresentModeFlagsKHR::LOCAL.0, "LOCAL"),
+ (DeviceGroupPresentModeFlagsKHR::REMOTE.0, "REMOTE"),
+ (DeviceGroupPresentModeFlagsKHR::SUM.0, "SUM"),
+ (
+ DeviceGroupPresentModeFlagsKHR::LOCAL_MULTI_DEVICE.0,
+ "LOCAL_MULTI_DEVICE",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DeviceMemoryReportEventTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ALLOCATE => Some("ALLOCATE"),
+ Self::FREE => Some("FREE"),
+ Self::IMPORT => Some("IMPORT"),
+ Self::UNIMPORT => Some("UNIMPORT"),
+ Self::ALLOCATION_FAILED => Some("ALLOCATION_FAILED"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DeviceMemoryReportFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DeviceQueueCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DeviceQueueCreateFlags::RESERVED_1_QCOM.0, "RESERVED_1_QCOM"),
+ (DeviceQueueCreateFlags::PROTECTED.0, "PROTECTED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DirectDriverLoadingFlagsLUNARG {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DirectDriverLoadingModeLUNARG {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::EXCLUSIVE => Some("EXCLUSIVE"),
+ Self::INCLUSIVE => Some("INCLUSIVE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DirectFBSurfaceCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DiscardRectangleModeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::INCLUSIVE => Some("INCLUSIVE"),
+ Self::EXCLUSIVE => Some("EXCLUSIVE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DisplayEventTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FIRST_PIXEL_OUT => Some("FIRST_PIXEL_OUT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DisplayModeCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DisplayPlaneAlphaFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (DisplayPlaneAlphaFlagsKHR::OPAQUE.0, "OPAQUE"),
+ (DisplayPlaneAlphaFlagsKHR::GLOBAL.0, "GLOBAL"),
+ (DisplayPlaneAlphaFlagsKHR::PER_PIXEL.0, "PER_PIXEL"),
+ (
+ DisplayPlaneAlphaFlagsKHR::PER_PIXEL_PREMULTIPLIED.0,
+ "PER_PIXEL_PREMULTIPLIED",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DisplayPowerStateEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::OFF => Some("OFF"),
+ Self::SUSPEND => Some("SUSPEND"),
+ Self::ON => Some("ON"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DisplaySurfaceCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for DriverId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::AMD_PROPRIETARY => Some("AMD_PROPRIETARY"),
+ Self::AMD_OPEN_SOURCE => Some("AMD_OPEN_SOURCE"),
+ Self::MESA_RADV => Some("MESA_RADV"),
+ Self::NVIDIA_PROPRIETARY => Some("NVIDIA_PROPRIETARY"),
+ Self::INTEL_PROPRIETARY_WINDOWS => Some("INTEL_PROPRIETARY_WINDOWS"),
+ Self::INTEL_OPEN_SOURCE_MESA => Some("INTEL_OPEN_SOURCE_MESA"),
+ Self::IMAGINATION_PROPRIETARY => Some("IMAGINATION_PROPRIETARY"),
+ Self::QUALCOMM_PROPRIETARY => Some("QUALCOMM_PROPRIETARY"),
+ Self::ARM_PROPRIETARY => Some("ARM_PROPRIETARY"),
+ Self::GOOGLE_SWIFTSHADER => Some("GOOGLE_SWIFTSHADER"),
+ Self::GGP_PROPRIETARY => Some("GGP_PROPRIETARY"),
+ Self::BROADCOM_PROPRIETARY => Some("BROADCOM_PROPRIETARY"),
+ Self::MESA_LLVMPIPE => Some("MESA_LLVMPIPE"),
+ Self::MOLTENVK => Some("MOLTENVK"),
+ Self::COREAVI_PROPRIETARY => Some("COREAVI_PROPRIETARY"),
+ Self::JUICE_PROPRIETARY => Some("JUICE_PROPRIETARY"),
+ Self::VERISILICON_PROPRIETARY => Some("VERISILICON_PROPRIETARY"),
+ Self::MESA_TURNIP => Some("MESA_TURNIP"),
+ Self::MESA_V3DV => Some("MESA_V3DV"),
+ Self::MESA_PANVK => Some("MESA_PANVK"),
+ Self::SAMSUNG_PROPRIETARY => Some("SAMSUNG_PROPRIETARY"),
+ Self::MESA_VENUS => Some("MESA_VENUS"),
+ Self::MESA_DOZEN => Some("MESA_DOZEN"),
+ Self::MESA_NVK => Some("MESA_NVK"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for DynamicState {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::VIEWPORT => Some("VIEWPORT"),
+ Self::SCISSOR => Some("SCISSOR"),
+ Self::LINE_WIDTH => Some("LINE_WIDTH"),
+ Self::DEPTH_BIAS => Some("DEPTH_BIAS"),
+ Self::BLEND_CONSTANTS => Some("BLEND_CONSTANTS"),
+ Self::DEPTH_BOUNDS => Some("DEPTH_BOUNDS"),
+ Self::STENCIL_COMPARE_MASK => Some("STENCIL_COMPARE_MASK"),
+ Self::STENCIL_WRITE_MASK => Some("STENCIL_WRITE_MASK"),
+ Self::STENCIL_REFERENCE => Some("STENCIL_REFERENCE"),
+ Self::VIEWPORT_W_SCALING_NV => Some("VIEWPORT_W_SCALING_NV"),
+ Self::DISCARD_RECTANGLE_EXT => Some("DISCARD_RECTANGLE_EXT"),
+ Self::SAMPLE_LOCATIONS_EXT => Some("SAMPLE_LOCATIONS_EXT"),
+ Self::RAY_TRACING_PIPELINE_STACK_SIZE_KHR => {
+ Some("RAY_TRACING_PIPELINE_STACK_SIZE_KHR")
+ }
+ Self::VIEWPORT_SHADING_RATE_PALETTE_NV => Some("VIEWPORT_SHADING_RATE_PALETTE_NV"),
+ Self::VIEWPORT_COARSE_SAMPLE_ORDER_NV => Some("VIEWPORT_COARSE_SAMPLE_ORDER_NV"),
+ Self::EXCLUSIVE_SCISSOR_NV => Some("EXCLUSIVE_SCISSOR_NV"),
+ Self::FRAGMENT_SHADING_RATE_KHR => Some("FRAGMENT_SHADING_RATE_KHR"),
+ Self::LINE_STIPPLE_EXT => Some("LINE_STIPPLE_EXT"),
+ Self::VERTEX_INPUT_EXT => Some("VERTEX_INPUT_EXT"),
+ Self::PATCH_CONTROL_POINTS_EXT => Some("PATCH_CONTROL_POINTS_EXT"),
+ Self::LOGIC_OP_EXT => Some("LOGIC_OP_EXT"),
+ Self::COLOR_WRITE_ENABLE_EXT => Some("COLOR_WRITE_ENABLE_EXT"),
+ Self::TESSELLATION_DOMAIN_ORIGIN_EXT => Some("TESSELLATION_DOMAIN_ORIGIN_EXT"),
+ Self::DEPTH_CLAMP_ENABLE_EXT => Some("DEPTH_CLAMP_ENABLE_EXT"),
+ Self::POLYGON_MODE_EXT => Some("POLYGON_MODE_EXT"),
+ Self::RASTERIZATION_SAMPLES_EXT => Some("RASTERIZATION_SAMPLES_EXT"),
+ Self::SAMPLE_MASK_EXT => Some("SAMPLE_MASK_EXT"),
+ Self::ALPHA_TO_COVERAGE_ENABLE_EXT => Some("ALPHA_TO_COVERAGE_ENABLE_EXT"),
+ Self::ALPHA_TO_ONE_ENABLE_EXT => Some("ALPHA_TO_ONE_ENABLE_EXT"),
+ Self::LOGIC_OP_ENABLE_EXT => Some("LOGIC_OP_ENABLE_EXT"),
+ Self::COLOR_BLEND_ENABLE_EXT => Some("COLOR_BLEND_ENABLE_EXT"),
+ Self::COLOR_BLEND_EQUATION_EXT => Some("COLOR_BLEND_EQUATION_EXT"),
+ Self::COLOR_WRITE_MASK_EXT => Some("COLOR_WRITE_MASK_EXT"),
+ Self::RASTERIZATION_STREAM_EXT => Some("RASTERIZATION_STREAM_EXT"),
+ Self::CONSERVATIVE_RASTERIZATION_MODE_EXT => {
+ Some("CONSERVATIVE_RASTERIZATION_MODE_EXT")
+ }
+ Self::EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT => {
+ Some("EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT")
+ }
+ Self::DEPTH_CLIP_ENABLE_EXT => Some("DEPTH_CLIP_ENABLE_EXT"),
+ Self::SAMPLE_LOCATIONS_ENABLE_EXT => Some("SAMPLE_LOCATIONS_ENABLE_EXT"),
+ Self::COLOR_BLEND_ADVANCED_EXT => Some("COLOR_BLEND_ADVANCED_EXT"),
+ Self::PROVOKING_VERTEX_MODE_EXT => Some("PROVOKING_VERTEX_MODE_EXT"),
+ Self::LINE_RASTERIZATION_MODE_EXT => Some("LINE_RASTERIZATION_MODE_EXT"),
+ Self::LINE_STIPPLE_ENABLE_EXT => Some("LINE_STIPPLE_ENABLE_EXT"),
+ Self::DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT => Some("DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT"),
+ Self::VIEWPORT_W_SCALING_ENABLE_NV => Some("VIEWPORT_W_SCALING_ENABLE_NV"),
+ Self::VIEWPORT_SWIZZLE_NV => Some("VIEWPORT_SWIZZLE_NV"),
+ Self::COVERAGE_TO_COLOR_ENABLE_NV => Some("COVERAGE_TO_COLOR_ENABLE_NV"),
+ Self::COVERAGE_TO_COLOR_LOCATION_NV => Some("COVERAGE_TO_COLOR_LOCATION_NV"),
+ Self::COVERAGE_MODULATION_MODE_NV => Some("COVERAGE_MODULATION_MODE_NV"),
+ Self::COVERAGE_MODULATION_TABLE_ENABLE_NV => {
+ Some("COVERAGE_MODULATION_TABLE_ENABLE_NV")
+ }
+ Self::COVERAGE_MODULATION_TABLE_NV => Some("COVERAGE_MODULATION_TABLE_NV"),
+ Self::SHADING_RATE_IMAGE_ENABLE_NV => Some("SHADING_RATE_IMAGE_ENABLE_NV"),
+ Self::REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV => {
+ Some("REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV")
+ }
+ Self::COVERAGE_REDUCTION_MODE_NV => Some("COVERAGE_REDUCTION_MODE_NV"),
+ Self::CULL_MODE => Some("CULL_MODE"),
+ Self::FRONT_FACE => Some("FRONT_FACE"),
+ Self::PRIMITIVE_TOPOLOGY => Some("PRIMITIVE_TOPOLOGY"),
+ Self::VIEWPORT_WITH_COUNT => Some("VIEWPORT_WITH_COUNT"),
+ Self::SCISSOR_WITH_COUNT => Some("SCISSOR_WITH_COUNT"),
+ Self::VERTEX_INPUT_BINDING_STRIDE => Some("VERTEX_INPUT_BINDING_STRIDE"),
+ Self::DEPTH_TEST_ENABLE => Some("DEPTH_TEST_ENABLE"),
+ Self::DEPTH_WRITE_ENABLE => Some("DEPTH_WRITE_ENABLE"),
+ Self::DEPTH_COMPARE_OP => Some("DEPTH_COMPARE_OP"),
+ Self::DEPTH_BOUNDS_TEST_ENABLE => Some("DEPTH_BOUNDS_TEST_ENABLE"),
+ Self::STENCIL_TEST_ENABLE => Some("STENCIL_TEST_ENABLE"),
+ Self::STENCIL_OP => Some("STENCIL_OP"),
+ Self::RASTERIZER_DISCARD_ENABLE => Some("RASTERIZER_DISCARD_ENABLE"),
+ Self::DEPTH_BIAS_ENABLE => Some("DEPTH_BIAS_ENABLE"),
+ Self::PRIMITIVE_RESTART_ENABLE => Some("PRIMITIVE_RESTART_ENABLE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for EventCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(EventCreateFlags::DEVICE_ONLY.0, "DEVICE_ONLY")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExportMetalObjectTypeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ ExportMetalObjectTypeFlagsEXT::METAL_DEVICE.0,
+ "METAL_DEVICE",
+ ),
+ (
+ ExportMetalObjectTypeFlagsEXT::METAL_COMMAND_QUEUE.0,
+ "METAL_COMMAND_QUEUE",
+ ),
+ (
+ ExportMetalObjectTypeFlagsEXT::METAL_BUFFER.0,
+ "METAL_BUFFER",
+ ),
+ (
+ ExportMetalObjectTypeFlagsEXT::METAL_TEXTURE.0,
+ "METAL_TEXTURE",
+ ),
+ (
+ ExportMetalObjectTypeFlagsEXT::METAL_IOSURFACE.0,
+ "METAL_IOSURFACE",
+ ),
+ (
+ ExportMetalObjectTypeFlagsEXT::METAL_SHARED_EVENT.0,
+ "METAL_SHARED_EVENT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalFenceFeatureFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ExternalFenceFeatureFlags::EXPORTABLE.0, "EXPORTABLE"),
+ (ExternalFenceFeatureFlags::IMPORTABLE.0, "IMPORTABLE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalFenceHandleTypeFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ExternalFenceHandleTypeFlags::OPAQUE_FD.0, "OPAQUE_FD"),
+ (ExternalFenceHandleTypeFlags::OPAQUE_WIN32.0, "OPAQUE_WIN32"),
+ (
+ ExternalFenceHandleTypeFlags::OPAQUE_WIN32_KMT.0,
+ "OPAQUE_WIN32_KMT",
+ ),
+ (ExternalFenceHandleTypeFlags::SYNC_FD.0, "SYNC_FD"),
+ (
+ ExternalFenceHandleTypeFlags::RESERVED_4_NV.0,
+ "RESERVED_4_NV",
+ ),
+ (
+ ExternalFenceHandleTypeFlags::RESERVED_5_NV.0,
+ "RESERVED_5_NV",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalMemoryFeatureFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ ExternalMemoryFeatureFlags::DEDICATED_ONLY.0,
+ "DEDICATED_ONLY",
+ ),
+ (ExternalMemoryFeatureFlags::EXPORTABLE.0, "EXPORTABLE"),
+ (ExternalMemoryFeatureFlags::IMPORTABLE.0, "IMPORTABLE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalMemoryFeatureFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ ExternalMemoryFeatureFlagsNV::DEDICATED_ONLY.0,
+ "DEDICATED_ONLY",
+ ),
+ (ExternalMemoryFeatureFlagsNV::EXPORTABLE.0, "EXPORTABLE"),
+ (ExternalMemoryFeatureFlagsNV::IMPORTABLE.0, "IMPORTABLE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalMemoryHandleTypeFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ExternalMemoryHandleTypeFlags::OPAQUE_FD.0, "OPAQUE_FD"),
+ (
+ ExternalMemoryHandleTypeFlags::OPAQUE_WIN32.0,
+ "OPAQUE_WIN32",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::OPAQUE_WIN32_KMT.0,
+ "OPAQUE_WIN32_KMT",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::D3D11_TEXTURE.0,
+ "D3D11_TEXTURE",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::D3D11_TEXTURE_KMT.0,
+ "D3D11_TEXTURE_KMT",
+ ),
+ (ExternalMemoryHandleTypeFlags::D3D12_HEAP.0, "D3D12_HEAP"),
+ (
+ ExternalMemoryHandleTypeFlags::D3D12_RESOURCE.0,
+ "D3D12_RESOURCE",
+ ),
+ (ExternalMemoryHandleTypeFlags::DMA_BUF_EXT.0, "DMA_BUF_EXT"),
+ (
+ ExternalMemoryHandleTypeFlags::ANDROID_HARDWARE_BUFFER_ANDROID.0,
+ "ANDROID_HARDWARE_BUFFER_ANDROID",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::HOST_ALLOCATION_EXT.0,
+ "HOST_ALLOCATION_EXT",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::HOST_MAPPED_FOREIGN_MEMORY_EXT.0,
+ "HOST_MAPPED_FOREIGN_MEMORY_EXT",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::ZIRCON_VMO_FUCHSIA.0,
+ "ZIRCON_VMO_FUCHSIA",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::RDMA_ADDRESS_NV.0,
+ "RDMA_ADDRESS_NV",
+ ),
+ (
+ ExternalMemoryHandleTypeFlags::RESERVED_13_NV.0,
+ "RESERVED_13_NV",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalMemoryHandleTypeFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ ExternalMemoryHandleTypeFlagsNV::OPAQUE_WIN32.0,
+ "OPAQUE_WIN32",
+ ),
+ (
+ ExternalMemoryHandleTypeFlagsNV::OPAQUE_WIN32_KMT.0,
+ "OPAQUE_WIN32_KMT",
+ ),
+ (
+ ExternalMemoryHandleTypeFlagsNV::D3D11_IMAGE.0,
+ "D3D11_IMAGE",
+ ),
+ (
+ ExternalMemoryHandleTypeFlagsNV::D3D11_IMAGE_KMT.0,
+ "D3D11_IMAGE_KMT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalSemaphoreFeatureFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ExternalSemaphoreFeatureFlags::EXPORTABLE.0, "EXPORTABLE"),
+ (ExternalSemaphoreFeatureFlags::IMPORTABLE.0, "IMPORTABLE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ExternalSemaphoreHandleTypeFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ExternalSemaphoreHandleTypeFlags::OPAQUE_FD.0, "OPAQUE_FD"),
+ (
+ ExternalSemaphoreHandleTypeFlags::OPAQUE_WIN32.0,
+ "OPAQUE_WIN32",
+ ),
+ (
+ ExternalSemaphoreHandleTypeFlags::OPAQUE_WIN32_KMT.0,
+ "OPAQUE_WIN32_KMT",
+ ),
+ (
+ ExternalSemaphoreHandleTypeFlags::D3D12_FENCE.0,
+ "D3D12_FENCE",
+ ),
+ (ExternalSemaphoreHandleTypeFlags::SYNC_FD.0, "SYNC_FD"),
+ (
+ ExternalSemaphoreHandleTypeFlags::ZIRCON_EVENT_FUCHSIA.0,
+ "ZIRCON_EVENT_FUCHSIA",
+ ),
+ (
+ ExternalSemaphoreHandleTypeFlags::RESERVED_5_NV.0,
+ "RESERVED_5_NV",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for FenceCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(FenceCreateFlags::SIGNALED.0, "SIGNALED")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for FenceImportFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(FenceImportFlags::TEMPORARY.0, "TEMPORARY")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for Filter {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NEAREST => Some("NEAREST"),
+ Self::LINEAR => Some("LINEAR"),
+ Self::CUBIC_EXT => Some("CUBIC_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for Format {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNDEFINED => Some("UNDEFINED"),
+ Self::R4G4_UNORM_PACK8 => Some("R4G4_UNORM_PACK8"),
+ Self::R4G4B4A4_UNORM_PACK16 => Some("R4G4B4A4_UNORM_PACK16"),
+ Self::B4G4R4A4_UNORM_PACK16 => Some("B4G4R4A4_UNORM_PACK16"),
+ Self::R5G6B5_UNORM_PACK16 => Some("R5G6B5_UNORM_PACK16"),
+ Self::B5G6R5_UNORM_PACK16 => Some("B5G6R5_UNORM_PACK16"),
+ Self::R5G5B5A1_UNORM_PACK16 => Some("R5G5B5A1_UNORM_PACK16"),
+ Self::B5G5R5A1_UNORM_PACK16 => Some("B5G5R5A1_UNORM_PACK16"),
+ Self::A1R5G5B5_UNORM_PACK16 => Some("A1R5G5B5_UNORM_PACK16"),
+ Self::R8_UNORM => Some("R8_UNORM"),
+ Self::R8_SNORM => Some("R8_SNORM"),
+ Self::R8_USCALED => Some("R8_USCALED"),
+ Self::R8_SSCALED => Some("R8_SSCALED"),
+ Self::R8_UINT => Some("R8_UINT"),
+ Self::R8_SINT => Some("R8_SINT"),
+ Self::R8_SRGB => Some("R8_SRGB"),
+ Self::R8G8_UNORM => Some("R8G8_UNORM"),
+ Self::R8G8_SNORM => Some("R8G8_SNORM"),
+ Self::R8G8_USCALED => Some("R8G8_USCALED"),
+ Self::R8G8_SSCALED => Some("R8G8_SSCALED"),
+ Self::R8G8_UINT => Some("R8G8_UINT"),
+ Self::R8G8_SINT => Some("R8G8_SINT"),
+ Self::R8G8_SRGB => Some("R8G8_SRGB"),
+ Self::R8G8B8_UNORM => Some("R8G8B8_UNORM"),
+ Self::R8G8B8_SNORM => Some("R8G8B8_SNORM"),
+ Self::R8G8B8_USCALED => Some("R8G8B8_USCALED"),
+ Self::R8G8B8_SSCALED => Some("R8G8B8_SSCALED"),
+ Self::R8G8B8_UINT => Some("R8G8B8_UINT"),
+ Self::R8G8B8_SINT => Some("R8G8B8_SINT"),
+ Self::R8G8B8_SRGB => Some("R8G8B8_SRGB"),
+ Self::B8G8R8_UNORM => Some("B8G8R8_UNORM"),
+ Self::B8G8R8_SNORM => Some("B8G8R8_SNORM"),
+ Self::B8G8R8_USCALED => Some("B8G8R8_USCALED"),
+ Self::B8G8R8_SSCALED => Some("B8G8R8_SSCALED"),
+ Self::B8G8R8_UINT => Some("B8G8R8_UINT"),
+ Self::B8G8R8_SINT => Some("B8G8R8_SINT"),
+ Self::B8G8R8_SRGB => Some("B8G8R8_SRGB"),
+ Self::R8G8B8A8_UNORM => Some("R8G8B8A8_UNORM"),
+ Self::R8G8B8A8_SNORM => Some("R8G8B8A8_SNORM"),
+ Self::R8G8B8A8_USCALED => Some("R8G8B8A8_USCALED"),
+ Self::R8G8B8A8_SSCALED => Some("R8G8B8A8_SSCALED"),
+ Self::R8G8B8A8_UINT => Some("R8G8B8A8_UINT"),
+ Self::R8G8B8A8_SINT => Some("R8G8B8A8_SINT"),
+ Self::R8G8B8A8_SRGB => Some("R8G8B8A8_SRGB"),
+ Self::B8G8R8A8_UNORM => Some("B8G8R8A8_UNORM"),
+ Self::B8G8R8A8_SNORM => Some("B8G8R8A8_SNORM"),
+ Self::B8G8R8A8_USCALED => Some("B8G8R8A8_USCALED"),
+ Self::B8G8R8A8_SSCALED => Some("B8G8R8A8_SSCALED"),
+ Self::B8G8R8A8_UINT => Some("B8G8R8A8_UINT"),
+ Self::B8G8R8A8_SINT => Some("B8G8R8A8_SINT"),
+ Self::B8G8R8A8_SRGB => Some("B8G8R8A8_SRGB"),
+ Self::A8B8G8R8_UNORM_PACK32 => Some("A8B8G8R8_UNORM_PACK32"),
+ Self::A8B8G8R8_SNORM_PACK32 => Some("A8B8G8R8_SNORM_PACK32"),
+ Self::A8B8G8R8_USCALED_PACK32 => Some("A8B8G8R8_USCALED_PACK32"),
+ Self::A8B8G8R8_SSCALED_PACK32 => Some("A8B8G8R8_SSCALED_PACK32"),
+ Self::A8B8G8R8_UINT_PACK32 => Some("A8B8G8R8_UINT_PACK32"),
+ Self::A8B8G8R8_SINT_PACK32 => Some("A8B8G8R8_SINT_PACK32"),
+ Self::A8B8G8R8_SRGB_PACK32 => Some("A8B8G8R8_SRGB_PACK32"),
+ Self::A2R10G10B10_UNORM_PACK32 => Some("A2R10G10B10_UNORM_PACK32"),
+ Self::A2R10G10B10_SNORM_PACK32 => Some("A2R10G10B10_SNORM_PACK32"),
+ Self::A2R10G10B10_USCALED_PACK32 => Some("A2R10G10B10_USCALED_PACK32"),
+ Self::A2R10G10B10_SSCALED_PACK32 => Some("A2R10G10B10_SSCALED_PACK32"),
+ Self::A2R10G10B10_UINT_PACK32 => Some("A2R10G10B10_UINT_PACK32"),
+ Self::A2R10G10B10_SINT_PACK32 => Some("A2R10G10B10_SINT_PACK32"),
+ Self::A2B10G10R10_UNORM_PACK32 => Some("A2B10G10R10_UNORM_PACK32"),
+ Self::A2B10G10R10_SNORM_PACK32 => Some("A2B10G10R10_SNORM_PACK32"),
+ Self::A2B10G10R10_USCALED_PACK32 => Some("A2B10G10R10_USCALED_PACK32"),
+ Self::A2B10G10R10_SSCALED_PACK32 => Some("A2B10G10R10_SSCALED_PACK32"),
+ Self::A2B10G10R10_UINT_PACK32 => Some("A2B10G10R10_UINT_PACK32"),
+ Self::A2B10G10R10_SINT_PACK32 => Some("A2B10G10R10_SINT_PACK32"),
+ Self::R16_UNORM => Some("R16_UNORM"),
+ Self::R16_SNORM => Some("R16_SNORM"),
+ Self::R16_USCALED => Some("R16_USCALED"),
+ Self::R16_SSCALED => Some("R16_SSCALED"),
+ Self::R16_UINT => Some("R16_UINT"),
+ Self::R16_SINT => Some("R16_SINT"),
+ Self::R16_SFLOAT => Some("R16_SFLOAT"),
+ Self::R16G16_UNORM => Some("R16G16_UNORM"),
+ Self::R16G16_SNORM => Some("R16G16_SNORM"),
+ Self::R16G16_USCALED => Some("R16G16_USCALED"),
+ Self::R16G16_SSCALED => Some("R16G16_SSCALED"),
+ Self::R16G16_UINT => Some("R16G16_UINT"),
+ Self::R16G16_SINT => Some("R16G16_SINT"),
+ Self::R16G16_SFLOAT => Some("R16G16_SFLOAT"),
+ Self::R16G16B16_UNORM => Some("R16G16B16_UNORM"),
+ Self::R16G16B16_SNORM => Some("R16G16B16_SNORM"),
+ Self::R16G16B16_USCALED => Some("R16G16B16_USCALED"),
+ Self::R16G16B16_SSCALED => Some("R16G16B16_SSCALED"),
+ Self::R16G16B16_UINT => Some("R16G16B16_UINT"),
+ Self::R16G16B16_SINT => Some("R16G16B16_SINT"),
+ Self::R16G16B16_SFLOAT => Some("R16G16B16_SFLOAT"),
+ Self::R16G16B16A16_UNORM => Some("R16G16B16A16_UNORM"),
+ Self::R16G16B16A16_SNORM => Some("R16G16B16A16_SNORM"),
+ Self::R16G16B16A16_USCALED => Some("R16G16B16A16_USCALED"),
+ Self::R16G16B16A16_SSCALED => Some("R16G16B16A16_SSCALED"),
+ Self::R16G16B16A16_UINT => Some("R16G16B16A16_UINT"),
+ Self::R16G16B16A16_SINT => Some("R16G16B16A16_SINT"),
+ Self::R16G16B16A16_SFLOAT => Some("R16G16B16A16_SFLOAT"),
+ Self::R32_UINT => Some("R32_UINT"),
+ Self::R32_SINT => Some("R32_SINT"),
+ Self::R32_SFLOAT => Some("R32_SFLOAT"),
+ Self::R32G32_UINT => Some("R32G32_UINT"),
+ Self::R32G32_SINT => Some("R32G32_SINT"),
+ Self::R32G32_SFLOAT => Some("R32G32_SFLOAT"),
+ Self::R32G32B32_UINT => Some("R32G32B32_UINT"),
+ Self::R32G32B32_SINT => Some("R32G32B32_SINT"),
+ Self::R32G32B32_SFLOAT => Some("R32G32B32_SFLOAT"),
+ Self::R32G32B32A32_UINT => Some("R32G32B32A32_UINT"),
+ Self::R32G32B32A32_SINT => Some("R32G32B32A32_SINT"),
+ Self::R32G32B32A32_SFLOAT => Some("R32G32B32A32_SFLOAT"),
+ Self::R64_UINT => Some("R64_UINT"),
+ Self::R64_SINT => Some("R64_SINT"),
+ Self::R64_SFLOAT => Some("R64_SFLOAT"),
+ Self::R64G64_UINT => Some("R64G64_UINT"),
+ Self::R64G64_SINT => Some("R64G64_SINT"),
+ Self::R64G64_SFLOAT => Some("R64G64_SFLOAT"),
+ Self::R64G64B64_UINT => Some("R64G64B64_UINT"),
+ Self::R64G64B64_SINT => Some("R64G64B64_SINT"),
+ Self::R64G64B64_SFLOAT => Some("R64G64B64_SFLOAT"),
+ Self::R64G64B64A64_UINT => Some("R64G64B64A64_UINT"),
+ Self::R64G64B64A64_SINT => Some("R64G64B64A64_SINT"),
+ Self::R64G64B64A64_SFLOAT => Some("R64G64B64A64_SFLOAT"),
+ Self::B10G11R11_UFLOAT_PACK32 => Some("B10G11R11_UFLOAT_PACK32"),
+ Self::E5B9G9R9_UFLOAT_PACK32 => Some("E5B9G9R9_UFLOAT_PACK32"),
+ Self::D16_UNORM => Some("D16_UNORM"),
+ Self::X8_D24_UNORM_PACK32 => Some("X8_D24_UNORM_PACK32"),
+ Self::D32_SFLOAT => Some("D32_SFLOAT"),
+ Self::S8_UINT => Some("S8_UINT"),
+ Self::D16_UNORM_S8_UINT => Some("D16_UNORM_S8_UINT"),
+ Self::D24_UNORM_S8_UINT => Some("D24_UNORM_S8_UINT"),
+ Self::D32_SFLOAT_S8_UINT => Some("D32_SFLOAT_S8_UINT"),
+ Self::BC1_RGB_UNORM_BLOCK => Some("BC1_RGB_UNORM_BLOCK"),
+ Self::BC1_RGB_SRGB_BLOCK => Some("BC1_RGB_SRGB_BLOCK"),
+ Self::BC1_RGBA_UNORM_BLOCK => Some("BC1_RGBA_UNORM_BLOCK"),
+ Self::BC1_RGBA_SRGB_BLOCK => Some("BC1_RGBA_SRGB_BLOCK"),
+ Self::BC2_UNORM_BLOCK => Some("BC2_UNORM_BLOCK"),
+ Self::BC2_SRGB_BLOCK => Some("BC2_SRGB_BLOCK"),
+ Self::BC3_UNORM_BLOCK => Some("BC3_UNORM_BLOCK"),
+ Self::BC3_SRGB_BLOCK => Some("BC3_SRGB_BLOCK"),
+ Self::BC4_UNORM_BLOCK => Some("BC4_UNORM_BLOCK"),
+ Self::BC4_SNORM_BLOCK => Some("BC4_SNORM_BLOCK"),
+ Self::BC5_UNORM_BLOCK => Some("BC5_UNORM_BLOCK"),
+ Self::BC5_SNORM_BLOCK => Some("BC5_SNORM_BLOCK"),
+ Self::BC6H_UFLOAT_BLOCK => Some("BC6H_UFLOAT_BLOCK"),
+ Self::BC6H_SFLOAT_BLOCK => Some("BC6H_SFLOAT_BLOCK"),
+ Self::BC7_UNORM_BLOCK => Some("BC7_UNORM_BLOCK"),
+ Self::BC7_SRGB_BLOCK => Some("BC7_SRGB_BLOCK"),
+ Self::ETC2_R8G8B8_UNORM_BLOCK => Some("ETC2_R8G8B8_UNORM_BLOCK"),
+ Self::ETC2_R8G8B8_SRGB_BLOCK => Some("ETC2_R8G8B8_SRGB_BLOCK"),
+ Self::ETC2_R8G8B8A1_UNORM_BLOCK => Some("ETC2_R8G8B8A1_UNORM_BLOCK"),
+ Self::ETC2_R8G8B8A1_SRGB_BLOCK => Some("ETC2_R8G8B8A1_SRGB_BLOCK"),
+ Self::ETC2_R8G8B8A8_UNORM_BLOCK => Some("ETC2_R8G8B8A8_UNORM_BLOCK"),
+ Self::ETC2_R8G8B8A8_SRGB_BLOCK => Some("ETC2_R8G8B8A8_SRGB_BLOCK"),
+ Self::EAC_R11_UNORM_BLOCK => Some("EAC_R11_UNORM_BLOCK"),
+ Self::EAC_R11_SNORM_BLOCK => Some("EAC_R11_SNORM_BLOCK"),
+ Self::EAC_R11G11_UNORM_BLOCK => Some("EAC_R11G11_UNORM_BLOCK"),
+ Self::EAC_R11G11_SNORM_BLOCK => Some("EAC_R11G11_SNORM_BLOCK"),
+ Self::ASTC_4X4_UNORM_BLOCK => Some("ASTC_4X4_UNORM_BLOCK"),
+ Self::ASTC_4X4_SRGB_BLOCK => Some("ASTC_4X4_SRGB_BLOCK"),
+ Self::ASTC_5X4_UNORM_BLOCK => Some("ASTC_5X4_UNORM_BLOCK"),
+ Self::ASTC_5X4_SRGB_BLOCK => Some("ASTC_5X4_SRGB_BLOCK"),
+ Self::ASTC_5X5_UNORM_BLOCK => Some("ASTC_5X5_UNORM_BLOCK"),
+ Self::ASTC_5X5_SRGB_BLOCK => Some("ASTC_5X5_SRGB_BLOCK"),
+ Self::ASTC_6X5_UNORM_BLOCK => Some("ASTC_6X5_UNORM_BLOCK"),
+ Self::ASTC_6X5_SRGB_BLOCK => Some("ASTC_6X5_SRGB_BLOCK"),
+ Self::ASTC_6X6_UNORM_BLOCK => Some("ASTC_6X6_UNORM_BLOCK"),
+ Self::ASTC_6X6_SRGB_BLOCK => Some("ASTC_6X6_SRGB_BLOCK"),
+ Self::ASTC_8X5_UNORM_BLOCK => Some("ASTC_8X5_UNORM_BLOCK"),
+ Self::ASTC_8X5_SRGB_BLOCK => Some("ASTC_8X5_SRGB_BLOCK"),
+ Self::ASTC_8X6_UNORM_BLOCK => Some("ASTC_8X6_UNORM_BLOCK"),
+ Self::ASTC_8X6_SRGB_BLOCK => Some("ASTC_8X6_SRGB_BLOCK"),
+ Self::ASTC_8X8_UNORM_BLOCK => Some("ASTC_8X8_UNORM_BLOCK"),
+ Self::ASTC_8X8_SRGB_BLOCK => Some("ASTC_8X8_SRGB_BLOCK"),
+ Self::ASTC_10X5_UNORM_BLOCK => Some("ASTC_10X5_UNORM_BLOCK"),
+ Self::ASTC_10X5_SRGB_BLOCK => Some("ASTC_10X5_SRGB_BLOCK"),
+ Self::ASTC_10X6_UNORM_BLOCK => Some("ASTC_10X6_UNORM_BLOCK"),
+ Self::ASTC_10X6_SRGB_BLOCK => Some("ASTC_10X6_SRGB_BLOCK"),
+ Self::ASTC_10X8_UNORM_BLOCK => Some("ASTC_10X8_UNORM_BLOCK"),
+ Self::ASTC_10X8_SRGB_BLOCK => Some("ASTC_10X8_SRGB_BLOCK"),
+ Self::ASTC_10X10_UNORM_BLOCK => Some("ASTC_10X10_UNORM_BLOCK"),
+ Self::ASTC_10X10_SRGB_BLOCK => Some("ASTC_10X10_SRGB_BLOCK"),
+ Self::ASTC_12X10_UNORM_BLOCK => Some("ASTC_12X10_UNORM_BLOCK"),
+ Self::ASTC_12X10_SRGB_BLOCK => Some("ASTC_12X10_SRGB_BLOCK"),
+ Self::ASTC_12X12_UNORM_BLOCK => Some("ASTC_12X12_UNORM_BLOCK"),
+ Self::ASTC_12X12_SRGB_BLOCK => Some("ASTC_12X12_SRGB_BLOCK"),
+ Self::PVRTC1_2BPP_UNORM_BLOCK_IMG => Some("PVRTC1_2BPP_UNORM_BLOCK_IMG"),
+ Self::PVRTC1_4BPP_UNORM_BLOCK_IMG => Some("PVRTC1_4BPP_UNORM_BLOCK_IMG"),
+ Self::PVRTC2_2BPP_UNORM_BLOCK_IMG => Some("PVRTC2_2BPP_UNORM_BLOCK_IMG"),
+ Self::PVRTC2_4BPP_UNORM_BLOCK_IMG => Some("PVRTC2_4BPP_UNORM_BLOCK_IMG"),
+ Self::PVRTC1_2BPP_SRGB_BLOCK_IMG => Some("PVRTC1_2BPP_SRGB_BLOCK_IMG"),
+ Self::PVRTC1_4BPP_SRGB_BLOCK_IMG => Some("PVRTC1_4BPP_SRGB_BLOCK_IMG"),
+ Self::PVRTC2_2BPP_SRGB_BLOCK_IMG => Some("PVRTC2_2BPP_SRGB_BLOCK_IMG"),
+ Self::PVRTC2_4BPP_SRGB_BLOCK_IMG => Some("PVRTC2_4BPP_SRGB_BLOCK_IMG"),
+ Self::ASTC_3X3X3_UNORM_BLOCK_EXT => Some("ASTC_3X3X3_UNORM_BLOCK_EXT"),
+ Self::ASTC_3X3X3_SRGB_BLOCK_EXT => Some("ASTC_3X3X3_SRGB_BLOCK_EXT"),
+ Self::ASTC_3X3X3_SFLOAT_BLOCK_EXT => Some("ASTC_3X3X3_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_4X3X3_UNORM_BLOCK_EXT => Some("ASTC_4X3X3_UNORM_BLOCK_EXT"),
+ Self::ASTC_4X3X3_SRGB_BLOCK_EXT => Some("ASTC_4X3X3_SRGB_BLOCK_EXT"),
+ Self::ASTC_4X3X3_SFLOAT_BLOCK_EXT => Some("ASTC_4X3X3_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_4X4X3_UNORM_BLOCK_EXT => Some("ASTC_4X4X3_UNORM_BLOCK_EXT"),
+ Self::ASTC_4X4X3_SRGB_BLOCK_EXT => Some("ASTC_4X4X3_SRGB_BLOCK_EXT"),
+ Self::ASTC_4X4X3_SFLOAT_BLOCK_EXT => Some("ASTC_4X4X3_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_4X4X4_UNORM_BLOCK_EXT => Some("ASTC_4X4X4_UNORM_BLOCK_EXT"),
+ Self::ASTC_4X4X4_SRGB_BLOCK_EXT => Some("ASTC_4X4X4_SRGB_BLOCK_EXT"),
+ Self::ASTC_4X4X4_SFLOAT_BLOCK_EXT => Some("ASTC_4X4X4_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_5X4X4_UNORM_BLOCK_EXT => Some("ASTC_5X4X4_UNORM_BLOCK_EXT"),
+ Self::ASTC_5X4X4_SRGB_BLOCK_EXT => Some("ASTC_5X4X4_SRGB_BLOCK_EXT"),
+ Self::ASTC_5X4X4_SFLOAT_BLOCK_EXT => Some("ASTC_5X4X4_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_5X5X4_UNORM_BLOCK_EXT => Some("ASTC_5X5X4_UNORM_BLOCK_EXT"),
+ Self::ASTC_5X5X4_SRGB_BLOCK_EXT => Some("ASTC_5X5X4_SRGB_BLOCK_EXT"),
+ Self::ASTC_5X5X4_SFLOAT_BLOCK_EXT => Some("ASTC_5X5X4_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_5X5X5_UNORM_BLOCK_EXT => Some("ASTC_5X5X5_UNORM_BLOCK_EXT"),
+ Self::ASTC_5X5X5_SRGB_BLOCK_EXT => Some("ASTC_5X5X5_SRGB_BLOCK_EXT"),
+ Self::ASTC_5X5X5_SFLOAT_BLOCK_EXT => Some("ASTC_5X5X5_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_6X5X5_UNORM_BLOCK_EXT => Some("ASTC_6X5X5_UNORM_BLOCK_EXT"),
+ Self::ASTC_6X5X5_SRGB_BLOCK_EXT => Some("ASTC_6X5X5_SRGB_BLOCK_EXT"),
+ Self::ASTC_6X5X5_SFLOAT_BLOCK_EXT => Some("ASTC_6X5X5_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_6X6X5_UNORM_BLOCK_EXT => Some("ASTC_6X6X5_UNORM_BLOCK_EXT"),
+ Self::ASTC_6X6X5_SRGB_BLOCK_EXT => Some("ASTC_6X6X5_SRGB_BLOCK_EXT"),
+ Self::ASTC_6X6X5_SFLOAT_BLOCK_EXT => Some("ASTC_6X6X5_SFLOAT_BLOCK_EXT"),
+ Self::ASTC_6X6X6_UNORM_BLOCK_EXT => Some("ASTC_6X6X6_UNORM_BLOCK_EXT"),
+ Self::ASTC_6X6X6_SRGB_BLOCK_EXT => Some("ASTC_6X6X6_SRGB_BLOCK_EXT"),
+ Self::ASTC_6X6X6_SFLOAT_BLOCK_EXT => Some("ASTC_6X6X6_SFLOAT_BLOCK_EXT"),
+ Self::R16G16_S10_5_NV => Some("R16G16_S10_5_NV"),
+ Self::G8B8G8R8_422_UNORM => Some("G8B8G8R8_422_UNORM"),
+ Self::B8G8R8G8_422_UNORM => Some("B8G8R8G8_422_UNORM"),
+ Self::G8_B8_R8_3PLANE_420_UNORM => Some("G8_B8_R8_3PLANE_420_UNORM"),
+ Self::G8_B8R8_2PLANE_420_UNORM => Some("G8_B8R8_2PLANE_420_UNORM"),
+ Self::G8_B8_R8_3PLANE_422_UNORM => Some("G8_B8_R8_3PLANE_422_UNORM"),
+ Self::G8_B8R8_2PLANE_422_UNORM => Some("G8_B8R8_2PLANE_422_UNORM"),
+ Self::G8_B8_R8_3PLANE_444_UNORM => Some("G8_B8_R8_3PLANE_444_UNORM"),
+ Self::R10X6_UNORM_PACK16 => Some("R10X6_UNORM_PACK16"),
+ Self::R10X6G10X6_UNORM_2PACK16 => Some("R10X6G10X6_UNORM_2PACK16"),
+ Self::R10X6G10X6B10X6A10X6_UNORM_4PACK16 => Some("R10X6G10X6B10X6A10X6_UNORM_4PACK16"),
+ Self::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 => {
+ Some("G10X6B10X6G10X6R10X6_422_UNORM_4PACK16")
+ }
+ Self::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 => {
+ Some("B10X6G10X6R10X6G10X6_422_UNORM_4PACK16")
+ }
+ Self::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 => {
+ Some("G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16")
+ }
+ Self::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 => {
+ Some("G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16")
+ }
+ Self::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 => {
+ Some("G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16")
+ }
+ Self::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 => {
+ Some("G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16")
+ }
+ Self::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 => {
+ Some("G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16")
+ }
+ Self::R12X4_UNORM_PACK16 => Some("R12X4_UNORM_PACK16"),
+ Self::R12X4G12X4_UNORM_2PACK16 => Some("R12X4G12X4_UNORM_2PACK16"),
+ Self::R12X4G12X4B12X4A12X4_UNORM_4PACK16 => Some("R12X4G12X4B12X4A12X4_UNORM_4PACK16"),
+ Self::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 => {
+ Some("G12X4B12X4G12X4R12X4_422_UNORM_4PACK16")
+ }
+ Self::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 => {
+ Some("B12X4G12X4R12X4G12X4_422_UNORM_4PACK16")
+ }
+ Self::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 => {
+ Some("G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16")
+ }
+ Self::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 => {
+ Some("G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16")
+ }
+ Self::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 => {
+ Some("G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16")
+ }
+ Self::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 => {
+ Some("G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16")
+ }
+ Self::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 => {
+ Some("G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16")
+ }
+ Self::G16B16G16R16_422_UNORM => Some("G16B16G16R16_422_UNORM"),
+ Self::B16G16R16G16_422_UNORM => Some("B16G16R16G16_422_UNORM"),
+ Self::G16_B16_R16_3PLANE_420_UNORM => Some("G16_B16_R16_3PLANE_420_UNORM"),
+ Self::G16_B16R16_2PLANE_420_UNORM => Some("G16_B16R16_2PLANE_420_UNORM"),
+ Self::G16_B16_R16_3PLANE_422_UNORM => Some("G16_B16_R16_3PLANE_422_UNORM"),
+ Self::G16_B16R16_2PLANE_422_UNORM => Some("G16_B16R16_2PLANE_422_UNORM"),
+ Self::G16_B16_R16_3PLANE_444_UNORM => Some("G16_B16_R16_3PLANE_444_UNORM"),
+ Self::G8_B8R8_2PLANE_444_UNORM => Some("G8_B8R8_2PLANE_444_UNORM"),
+ Self::G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 => {
+ Some("G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16")
+ }
+ Self::G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 => {
+ Some("G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16")
+ }
+ Self::G16_B16R16_2PLANE_444_UNORM => Some("G16_B16R16_2PLANE_444_UNORM"),
+ Self::A4R4G4B4_UNORM_PACK16 => Some("A4R4G4B4_UNORM_PACK16"),
+ Self::A4B4G4R4_UNORM_PACK16 => Some("A4B4G4R4_UNORM_PACK16"),
+ Self::ASTC_4X4_SFLOAT_BLOCK => Some("ASTC_4X4_SFLOAT_BLOCK"),
+ Self::ASTC_5X4_SFLOAT_BLOCK => Some("ASTC_5X4_SFLOAT_BLOCK"),
+ Self::ASTC_5X5_SFLOAT_BLOCK => Some("ASTC_5X5_SFLOAT_BLOCK"),
+ Self::ASTC_6X5_SFLOAT_BLOCK => Some("ASTC_6X5_SFLOAT_BLOCK"),
+ Self::ASTC_6X6_SFLOAT_BLOCK => Some("ASTC_6X6_SFLOAT_BLOCK"),
+ Self::ASTC_8X5_SFLOAT_BLOCK => Some("ASTC_8X5_SFLOAT_BLOCK"),
+ Self::ASTC_8X6_SFLOAT_BLOCK => Some("ASTC_8X6_SFLOAT_BLOCK"),
+ Self::ASTC_8X8_SFLOAT_BLOCK => Some("ASTC_8X8_SFLOAT_BLOCK"),
+ Self::ASTC_10X5_SFLOAT_BLOCK => Some("ASTC_10X5_SFLOAT_BLOCK"),
+ Self::ASTC_10X6_SFLOAT_BLOCK => Some("ASTC_10X6_SFLOAT_BLOCK"),
+ Self::ASTC_10X8_SFLOAT_BLOCK => Some("ASTC_10X8_SFLOAT_BLOCK"),
+ Self::ASTC_10X10_SFLOAT_BLOCK => Some("ASTC_10X10_SFLOAT_BLOCK"),
+ Self::ASTC_12X10_SFLOAT_BLOCK => Some("ASTC_12X10_SFLOAT_BLOCK"),
+ Self::ASTC_12X12_SFLOAT_BLOCK => Some("ASTC_12X12_SFLOAT_BLOCK"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for FormatFeatureFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN : & [(Flags , & str)] = & [(FormatFeatureFlags :: SAMPLED_IMAGE . 0 , "SAMPLED_IMAGE") , (FormatFeatureFlags :: STORAGE_IMAGE . 0 , "STORAGE_IMAGE") , (FormatFeatureFlags :: STORAGE_IMAGE_ATOMIC . 0 , "STORAGE_IMAGE_ATOMIC") , (FormatFeatureFlags :: UNIFORM_TEXEL_BUFFER . 0 , "UNIFORM_TEXEL_BUFFER") , (FormatFeatureFlags :: STORAGE_TEXEL_BUFFER . 0 , "STORAGE_TEXEL_BUFFER") , (FormatFeatureFlags :: STORAGE_TEXEL_BUFFER_ATOMIC . 0 , "STORAGE_TEXEL_BUFFER_ATOMIC") , (FormatFeatureFlags :: VERTEX_BUFFER . 0 , "VERTEX_BUFFER") , (FormatFeatureFlags :: COLOR_ATTACHMENT . 0 , "COLOR_ATTACHMENT") , (FormatFeatureFlags :: COLOR_ATTACHMENT_BLEND . 0 , "COLOR_ATTACHMENT_BLEND") , (FormatFeatureFlags :: DEPTH_STENCIL_ATTACHMENT . 0 , "DEPTH_STENCIL_ATTACHMENT") , (FormatFeatureFlags :: BLIT_SRC . 0 , "BLIT_SRC") , (FormatFeatureFlags :: BLIT_DST . 0 , "BLIT_DST") , (FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_LINEAR . 0 , "SAMPLED_IMAGE_FILTER_LINEAR") , (FormatFeatureFlags :: VIDEO_DECODE_OUTPUT_KHR . 0 , "VIDEO_DECODE_OUTPUT_KHR") , (FormatFeatureFlags :: VIDEO_DECODE_DPB_KHR . 0 , "VIDEO_DECODE_DPB_KHR") , (FormatFeatureFlags :: ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR . 0 , "ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR") , (FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_CUBIC_EXT . 0 , "SAMPLED_IMAGE_FILTER_CUBIC_EXT") , (FormatFeatureFlags :: FRAGMENT_DENSITY_MAP_EXT . 0 , "FRAGMENT_DENSITY_MAP_EXT") , (FormatFeatureFlags :: FRAGMENT_SHADING_RATE_ATTACHMENT_KHR . 0 , "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR") , (FormatFeatureFlags :: VIDEO_ENCODE_INPUT_KHR . 0 , "VIDEO_ENCODE_INPUT_KHR") , (FormatFeatureFlags :: VIDEO_ENCODE_DPB_KHR . 0 , "VIDEO_ENCODE_DPB_KHR") , (FormatFeatureFlags :: TRANSFER_SRC . 0 , "TRANSFER_SRC") , (FormatFeatureFlags :: TRANSFER_DST . 0 , "TRANSFER_DST") , (FormatFeatureFlags :: MIDPOINT_CHROMA_SAMPLES . 0 , "MIDPOINT_CHROMA_SAMPLES") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT") , (FormatFeatureFlags :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE") , (FormatFeatureFlags :: DISJOINT . 0 , "DISJOINT") , (FormatFeatureFlags :: COSITED_CHROMA_SAMPLES . 0 , "COSITED_CHROMA_SAMPLES") , (FormatFeatureFlags :: SAMPLED_IMAGE_FILTER_MINMAX . 0 , "SAMPLED_IMAGE_FILTER_MINMAX")] ;
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for FormatFeatureFlags2 {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN : & [(Flags64 , & str)] = & [(FormatFeatureFlags2 :: SAMPLED_IMAGE . 0 , "SAMPLED_IMAGE") , (FormatFeatureFlags2 :: STORAGE_IMAGE . 0 , "STORAGE_IMAGE") , (FormatFeatureFlags2 :: STORAGE_IMAGE_ATOMIC . 0 , "STORAGE_IMAGE_ATOMIC") , (FormatFeatureFlags2 :: UNIFORM_TEXEL_BUFFER . 0 , "UNIFORM_TEXEL_BUFFER") , (FormatFeatureFlags2 :: STORAGE_TEXEL_BUFFER . 0 , "STORAGE_TEXEL_BUFFER") , (FormatFeatureFlags2 :: STORAGE_TEXEL_BUFFER_ATOMIC . 0 , "STORAGE_TEXEL_BUFFER_ATOMIC") , (FormatFeatureFlags2 :: VERTEX_BUFFER . 0 , "VERTEX_BUFFER") , (FormatFeatureFlags2 :: COLOR_ATTACHMENT . 0 , "COLOR_ATTACHMENT") , (FormatFeatureFlags2 :: COLOR_ATTACHMENT_BLEND . 0 , "COLOR_ATTACHMENT_BLEND") , (FormatFeatureFlags2 :: DEPTH_STENCIL_ATTACHMENT . 0 , "DEPTH_STENCIL_ATTACHMENT") , (FormatFeatureFlags2 :: BLIT_SRC . 0 , "BLIT_SRC") , (FormatFeatureFlags2 :: BLIT_DST . 0 , "BLIT_DST") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_FILTER_LINEAR . 0 , "SAMPLED_IMAGE_FILTER_LINEAR") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_FILTER_CUBIC . 0 , "SAMPLED_IMAGE_FILTER_CUBIC") , (FormatFeatureFlags2 :: TRANSFER_SRC . 0 , "TRANSFER_SRC") , (FormatFeatureFlags2 :: TRANSFER_DST . 0 , "TRANSFER_DST") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_FILTER_MINMAX . 0 , "SAMPLED_IMAGE_FILTER_MINMAX") , (FormatFeatureFlags2 :: MIDPOINT_CHROMA_SAMPLES . 0 , "MIDPOINT_CHROMA_SAMPLES") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE . 0 , "SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE") , (FormatFeatureFlags2 :: DISJOINT . 0 , "DISJOINT") , (FormatFeatureFlags2 :: COSITED_CHROMA_SAMPLES . 0 , "COSITED_CHROMA_SAMPLES") , (FormatFeatureFlags2 :: STORAGE_READ_WITHOUT_FORMAT . 0 , "STORAGE_READ_WITHOUT_FORMAT") , (FormatFeatureFlags2 :: STORAGE_WRITE_WITHOUT_FORMAT . 0 , "STORAGE_WRITE_WITHOUT_FORMAT") , (FormatFeatureFlags2 :: SAMPLED_IMAGE_DEPTH_COMPARISON . 0 , "SAMPLED_IMAGE_DEPTH_COMPARISON") , (FormatFeatureFlags2 :: VIDEO_DECODE_OUTPUT_KHR . 0 , "VIDEO_DECODE_OUTPUT_KHR") , (FormatFeatureFlags2 :: VIDEO_DECODE_DPB_KHR . 0 , "VIDEO_DECODE_DPB_KHR") , (FormatFeatureFlags2 :: ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR . 0 , "ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR") , (FormatFeatureFlags2 :: FRAGMENT_DENSITY_MAP_EXT . 0 , "FRAGMENT_DENSITY_MAP_EXT") , (FormatFeatureFlags2 :: FRAGMENT_SHADING_RATE_ATTACHMENT_KHR . 0 , "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR") , (FormatFeatureFlags2 :: RESERVED_44_EXT . 0 , "RESERVED_44_EXT") , (FormatFeatureFlags2 :: RESERVED_45_EXT . 0 , "RESERVED_45_EXT") , (FormatFeatureFlags2 :: VIDEO_ENCODE_INPUT_KHR . 0 , "VIDEO_ENCODE_INPUT_KHR") , (FormatFeatureFlags2 :: VIDEO_ENCODE_DPB_KHR . 0 , "VIDEO_ENCODE_DPB_KHR") , (FormatFeatureFlags2 :: LINEAR_COLOR_ATTACHMENT_NV . 0 , "LINEAR_COLOR_ATTACHMENT_NV") , (FormatFeatureFlags2 :: WEIGHT_IMAGE_QCOM . 0 , "WEIGHT_IMAGE_QCOM") , (FormatFeatureFlags2 :: WEIGHT_SAMPLED_IMAGE_QCOM . 0 , "WEIGHT_SAMPLED_IMAGE_QCOM") , (FormatFeatureFlags2 :: BLOCK_MATCHING_QCOM . 0 , "BLOCK_MATCHING_QCOM") , (FormatFeatureFlags2 :: BOX_FILTER_SAMPLED_QCOM . 0 , "BOX_FILTER_SAMPLED_QCOM") , (FormatFeatureFlags2 :: RESERVED_39_EXT . 0 , "RESERVED_39_EXT") , (FormatFeatureFlags2 :: OPTICAL_FLOW_IMAGE_NV . 0 , "OPTICAL_FLOW_IMAGE_NV") , (FormatFeatureFlags2 :: OPTICAL_FLOW_VECTOR_NV . 0 , "OPTICAL_FLOW_VECTOR_NV") , (FormatFeatureFlags2 :: OPTICAL_FLOW_COST_NV . 0 , "OPTICAL_FLOW_COST_NV")] ;
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for FragmentShadingRateCombinerOpKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::KEEP => Some("KEEP"),
+ Self::REPLACE => Some("REPLACE"),
+ Self::MIN => Some("MIN"),
+ Self::MAX => Some("MAX"),
+ Self::MUL => Some("MUL"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for FragmentShadingRateNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TYPE_1_INVOCATION_PER_PIXEL => Some("TYPE_1_INVOCATION_PER_PIXEL"),
+ Self::TYPE_1_INVOCATION_PER_1X2_PIXELS => Some("TYPE_1_INVOCATION_PER_1X2_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_2X1_PIXELS => Some("TYPE_1_INVOCATION_PER_2X1_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_2X2_PIXELS => Some("TYPE_1_INVOCATION_PER_2X2_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_2X4_PIXELS => Some("TYPE_1_INVOCATION_PER_2X4_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_4X2_PIXELS => Some("TYPE_1_INVOCATION_PER_4X2_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_4X4_PIXELS => Some("TYPE_1_INVOCATION_PER_4X4_PIXELS"),
+ Self::TYPE_2_INVOCATIONS_PER_PIXEL => Some("TYPE_2_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_4_INVOCATIONS_PER_PIXEL => Some("TYPE_4_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_8_INVOCATIONS_PER_PIXEL => Some("TYPE_8_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_16_INVOCATIONS_PER_PIXEL => Some("TYPE_16_INVOCATIONS_PER_PIXEL"),
+ Self::NO_INVOCATIONS => Some("NO_INVOCATIONS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for FragmentShadingRateTypeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FRAGMENT_SIZE => Some("FRAGMENT_SIZE"),
+ Self::ENUMS => Some("ENUMS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for FramebufferCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(FramebufferCreateFlags::IMAGELESS.0, "IMAGELESS")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for FrontFace {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::COUNTER_CLOCKWISE => Some("COUNTER_CLOCKWISE"),
+ Self::CLOCKWISE => Some("CLOCKWISE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for FullScreenExclusiveEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEFAULT => Some("DEFAULT"),
+ Self::ALLOWED => Some("ALLOWED"),
+ Self::DISALLOWED => Some("DISALLOWED"),
+ Self::APPLICATION_CONTROLLED => Some("APPLICATION_CONTROLLED"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for GeometryFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (GeometryFlagsKHR::OPAQUE.0, "OPAQUE"),
+ (
+ GeometryFlagsKHR::NO_DUPLICATE_ANY_HIT_INVOCATION.0,
+ "NO_DUPLICATE_ANY_HIT_INVOCATION",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for GeometryInstanceFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ GeometryInstanceFlagsKHR::TRIANGLE_FACING_CULL_DISABLE.0,
+ "TRIANGLE_FACING_CULL_DISABLE",
+ ),
+ (
+ GeometryInstanceFlagsKHR::TRIANGLE_FLIP_FACING.0,
+ "TRIANGLE_FLIP_FACING",
+ ),
+ (GeometryInstanceFlagsKHR::FORCE_OPAQUE.0, "FORCE_OPAQUE"),
+ (
+ GeometryInstanceFlagsKHR::FORCE_NO_OPAQUE.0,
+ "FORCE_NO_OPAQUE",
+ ),
+ (
+ GeometryInstanceFlagsKHR::FORCE_OPACITY_MICROMAP_2_STATE_EXT.0,
+ "FORCE_OPACITY_MICROMAP_2_STATE_EXT",
+ ),
+ (
+ GeometryInstanceFlagsKHR::DISABLE_OPACITY_MICROMAPS_EXT.0,
+ "DISABLE_OPACITY_MICROMAPS_EXT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for GeometryTypeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TRIANGLES => Some("TRIANGLES"),
+ Self::AABBS => Some("AABBS"),
+ Self::INSTANCES => Some("INSTANCES"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for GraphicsPipelineLibraryFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ GraphicsPipelineLibraryFlagsEXT::VERTEX_INPUT_INTERFACE.0,
+ "VERTEX_INPUT_INTERFACE",
+ ),
+ (
+ GraphicsPipelineLibraryFlagsEXT::PRE_RASTERIZATION_SHADERS.0,
+ "PRE_RASTERIZATION_SHADERS",
+ ),
+ (
+ GraphicsPipelineLibraryFlagsEXT::FRAGMENT_SHADER.0,
+ "FRAGMENT_SHADER",
+ ),
+ (
+ GraphicsPipelineLibraryFlagsEXT::FRAGMENT_OUTPUT_INTERFACE.0,
+ "FRAGMENT_OUTPUT_INTERFACE",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for HeadlessSurfaceCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for IOSSurfaceCreateFlagsMVK {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageAspectFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ImageAspectFlags::COLOR.0, "COLOR"),
+ (ImageAspectFlags::DEPTH.0, "DEPTH"),
+ (ImageAspectFlags::STENCIL.0, "STENCIL"),
+ (ImageAspectFlags::METADATA.0, "METADATA"),
+ (ImageAspectFlags::MEMORY_PLANE_0_EXT.0, "MEMORY_PLANE_0_EXT"),
+ (ImageAspectFlags::MEMORY_PLANE_1_EXT.0, "MEMORY_PLANE_1_EXT"),
+ (ImageAspectFlags::MEMORY_PLANE_2_EXT.0, "MEMORY_PLANE_2_EXT"),
+ (ImageAspectFlags::MEMORY_PLANE_3_EXT.0, "MEMORY_PLANE_3_EXT"),
+ (ImageAspectFlags::PLANE_0.0, "PLANE_0"),
+ (ImageAspectFlags::PLANE_1.0, "PLANE_1"),
+ (ImageAspectFlags::PLANE_2.0, "PLANE_2"),
+ (ImageAspectFlags::NONE.0, "NONE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageCompressionFixedRateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ImageCompressionFixedRateFlagsEXT::NONE.0, "NONE"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_1BPC.0, "TYPE_1BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_2BPC.0, "TYPE_2BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_3BPC.0, "TYPE_3BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_4BPC.0, "TYPE_4BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_5BPC.0, "TYPE_5BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_6BPC.0, "TYPE_6BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_7BPC.0, "TYPE_7BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_8BPC.0, "TYPE_8BPC"),
+ (ImageCompressionFixedRateFlagsEXT::TYPE_9BPC.0, "TYPE_9BPC"),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_10BPC.0,
+ "TYPE_10BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_11BPC.0,
+ "TYPE_11BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_12BPC.0,
+ "TYPE_12BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_13BPC.0,
+ "TYPE_13BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_14BPC.0,
+ "TYPE_14BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_15BPC.0,
+ "TYPE_15BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_16BPC.0,
+ "TYPE_16BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_17BPC.0,
+ "TYPE_17BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_18BPC.0,
+ "TYPE_18BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_19BPC.0,
+ "TYPE_19BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_20BPC.0,
+ "TYPE_20BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_21BPC.0,
+ "TYPE_21BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_22BPC.0,
+ "TYPE_22BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_23BPC.0,
+ "TYPE_23BPC",
+ ),
+ (
+ ImageCompressionFixedRateFlagsEXT::TYPE_24BPC.0,
+ "TYPE_24BPC",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageCompressionFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ImageCompressionFlagsEXT::DEFAULT.0, "DEFAULT"),
+ (
+ ImageCompressionFlagsEXT::FIXED_RATE_DEFAULT.0,
+ "FIXED_RATE_DEFAULT",
+ ),
+ (
+ ImageCompressionFlagsEXT::FIXED_RATE_EXPLICIT.0,
+ "FIXED_RATE_EXPLICIT",
+ ),
+ (ImageCompressionFlagsEXT::DISABLED.0, "DISABLED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageConstraintsInfoFlagsFUCHSIA {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ ImageConstraintsInfoFlagsFUCHSIA::CPU_READ_RARELY.0,
+ "CPU_READ_RARELY",
+ ),
+ (
+ ImageConstraintsInfoFlagsFUCHSIA::CPU_READ_OFTEN.0,
+ "CPU_READ_OFTEN",
+ ),
+ (
+ ImageConstraintsInfoFlagsFUCHSIA::CPU_WRITE_RARELY.0,
+ "CPU_WRITE_RARELY",
+ ),
+ (
+ ImageConstraintsInfoFlagsFUCHSIA::CPU_WRITE_OFTEN.0,
+ "CPU_WRITE_OFTEN",
+ ),
+ (
+ ImageConstraintsInfoFlagsFUCHSIA::PROTECTED_OPTIONAL.0,
+ "PROTECTED_OPTIONAL",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ImageCreateFlags::SPARSE_BINDING.0, "SPARSE_BINDING"),
+ (ImageCreateFlags::SPARSE_RESIDENCY.0, "SPARSE_RESIDENCY"),
+ (ImageCreateFlags::SPARSE_ALIASED.0, "SPARSE_ALIASED"),
+ (ImageCreateFlags::MUTABLE_FORMAT.0, "MUTABLE_FORMAT"),
+ (ImageCreateFlags::CUBE_COMPATIBLE.0, "CUBE_COMPATIBLE"),
+ (ImageCreateFlags::CORNER_SAMPLED_NV.0, "CORNER_SAMPLED_NV"),
+ (
+ ImageCreateFlags::SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT.0,
+ "SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT",
+ ),
+ (ImageCreateFlags::SUBSAMPLED_EXT.0, "SUBSAMPLED_EXT"),
+ (ImageCreateFlags::RESERVED_19_EXT.0, "RESERVED_19_EXT"),
+ (
+ ImageCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0,
+ "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT",
+ ),
+ (
+ ImageCreateFlags::MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXT.0,
+ "MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXT",
+ ),
+ (
+ ImageCreateFlags::TYPE_2D_VIEW_COMPATIBLE_EXT.0,
+ "TYPE_2D_VIEW_COMPATIBLE_EXT",
+ ),
+ (
+ ImageCreateFlags::FRAGMENT_DENSITY_MAP_OFFSET_QCOM.0,
+ "FRAGMENT_DENSITY_MAP_OFFSET_QCOM",
+ ),
+ (ImageCreateFlags::ALIAS.0, "ALIAS"),
+ (
+ ImageCreateFlags::SPLIT_INSTANCE_BIND_REGIONS.0,
+ "SPLIT_INSTANCE_BIND_REGIONS",
+ ),
+ (
+ ImageCreateFlags::TYPE_2D_ARRAY_COMPATIBLE.0,
+ "TYPE_2D_ARRAY_COMPATIBLE",
+ ),
+ (
+ ImageCreateFlags::BLOCK_TEXEL_VIEW_COMPATIBLE.0,
+ "BLOCK_TEXEL_VIEW_COMPATIBLE",
+ ),
+ (ImageCreateFlags::EXTENDED_USAGE.0, "EXTENDED_USAGE"),
+ (ImageCreateFlags::PROTECTED.0, "PROTECTED"),
+ (ImageCreateFlags::DISJOINT.0, "DISJOINT"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageFormatConstraintsFlagsFUCHSIA {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageLayout {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNDEFINED => Some("UNDEFINED"),
+ Self::GENERAL => Some("GENERAL"),
+ Self::COLOR_ATTACHMENT_OPTIMAL => Some("COLOR_ATTACHMENT_OPTIMAL"),
+ Self::DEPTH_STENCIL_ATTACHMENT_OPTIMAL => Some("DEPTH_STENCIL_ATTACHMENT_OPTIMAL"),
+ Self::DEPTH_STENCIL_READ_ONLY_OPTIMAL => Some("DEPTH_STENCIL_READ_ONLY_OPTIMAL"),
+ Self::SHADER_READ_ONLY_OPTIMAL => Some("SHADER_READ_ONLY_OPTIMAL"),
+ Self::TRANSFER_SRC_OPTIMAL => Some("TRANSFER_SRC_OPTIMAL"),
+ Self::TRANSFER_DST_OPTIMAL => Some("TRANSFER_DST_OPTIMAL"),
+ Self::PREINITIALIZED => Some("PREINITIALIZED"),
+ Self::PRESENT_SRC_KHR => Some("PRESENT_SRC_KHR"),
+ Self::VIDEO_DECODE_DST_KHR => Some("VIDEO_DECODE_DST_KHR"),
+ Self::VIDEO_DECODE_SRC_KHR => Some("VIDEO_DECODE_SRC_KHR"),
+ Self::VIDEO_DECODE_DPB_KHR => Some("VIDEO_DECODE_DPB_KHR"),
+ Self::SHARED_PRESENT_KHR => Some("SHARED_PRESENT_KHR"),
+ Self::FRAGMENT_DENSITY_MAP_OPTIMAL_EXT => Some("FRAGMENT_DENSITY_MAP_OPTIMAL_EXT"),
+ Self::FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR => {
+ Some("FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR")
+ }
+ Self::VIDEO_ENCODE_DST_KHR => Some("VIDEO_ENCODE_DST_KHR"),
+ Self::VIDEO_ENCODE_SRC_KHR => Some("VIDEO_ENCODE_SRC_KHR"),
+ Self::VIDEO_ENCODE_DPB_KHR => Some("VIDEO_ENCODE_DPB_KHR"),
+ Self::ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT => {
+ Some("ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT")
+ }
+ Self::DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL => {
+ Some("DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL")
+ }
+ Self::DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL => {
+ Some("DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL")
+ }
+ Self::DEPTH_ATTACHMENT_OPTIMAL => Some("DEPTH_ATTACHMENT_OPTIMAL"),
+ Self::DEPTH_READ_ONLY_OPTIMAL => Some("DEPTH_READ_ONLY_OPTIMAL"),
+ Self::STENCIL_ATTACHMENT_OPTIMAL => Some("STENCIL_ATTACHMENT_OPTIMAL"),
+ Self::STENCIL_READ_ONLY_OPTIMAL => Some("STENCIL_READ_ONLY_OPTIMAL"),
+ Self::READ_ONLY_OPTIMAL => Some("READ_ONLY_OPTIMAL"),
+ Self::ATTACHMENT_OPTIMAL => Some("ATTACHMENT_OPTIMAL"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ImagePipeSurfaceCreateFlagsFUCHSIA {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageTiling {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::OPTIMAL => Some("OPTIMAL"),
+ Self::LINEAR => Some("LINEAR"),
+ Self::DRM_FORMAT_MODIFIER_EXT => Some("DRM_FORMAT_MODIFIER_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ImageType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TYPE_1D => Some("TYPE_1D"),
+ Self::TYPE_2D => Some("TYPE_2D"),
+ Self::TYPE_3D => Some("TYPE_3D"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ImageUsageFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ImageUsageFlags::TRANSFER_SRC.0, "TRANSFER_SRC"),
+ (ImageUsageFlags::TRANSFER_DST.0, "TRANSFER_DST"),
+ (ImageUsageFlags::SAMPLED.0, "SAMPLED"),
+ (ImageUsageFlags::STORAGE.0, "STORAGE"),
+ (ImageUsageFlags::COLOR_ATTACHMENT.0, "COLOR_ATTACHMENT"),
+ (
+ ImageUsageFlags::DEPTH_STENCIL_ATTACHMENT.0,
+ "DEPTH_STENCIL_ATTACHMENT",
+ ),
+ (
+ ImageUsageFlags::TRANSIENT_ATTACHMENT.0,
+ "TRANSIENT_ATTACHMENT",
+ ),
+ (ImageUsageFlags::INPUT_ATTACHMENT.0, "INPUT_ATTACHMENT"),
+ (
+ ImageUsageFlags::VIDEO_DECODE_DST_KHR.0,
+ "VIDEO_DECODE_DST_KHR",
+ ),
+ (
+ ImageUsageFlags::VIDEO_DECODE_SRC_KHR.0,
+ "VIDEO_DECODE_SRC_KHR",
+ ),
+ (
+ ImageUsageFlags::VIDEO_DECODE_DPB_KHR.0,
+ "VIDEO_DECODE_DPB_KHR",
+ ),
+ (ImageUsageFlags::RESERVED_16_QCOM.0, "RESERVED_16_QCOM"),
+ (ImageUsageFlags::RESERVED_17_QCOM.0, "RESERVED_17_QCOM"),
+ (
+ ImageUsageFlags::FRAGMENT_DENSITY_MAP_EXT.0,
+ "FRAGMENT_DENSITY_MAP_EXT",
+ ),
+ (
+ ImageUsageFlags::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0,
+ "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR",
+ ),
+ (ImageUsageFlags::RESERVED_22_EXT.0, "RESERVED_22_EXT"),
+ (
+ ImageUsageFlags::VIDEO_ENCODE_DST_KHR.0,
+ "VIDEO_ENCODE_DST_KHR",
+ ),
+ (
+ ImageUsageFlags::VIDEO_ENCODE_SRC_KHR.0,
+ "VIDEO_ENCODE_SRC_KHR",
+ ),
+ (
+ ImageUsageFlags::VIDEO_ENCODE_DPB_KHR.0,
+ "VIDEO_ENCODE_DPB_KHR",
+ ),
+ (
+ ImageUsageFlags::ATTACHMENT_FEEDBACK_LOOP_EXT.0,
+ "ATTACHMENT_FEEDBACK_LOOP_EXT",
+ ),
+ (
+ ImageUsageFlags::INVOCATION_MASK_HUAWEI.0,
+ "INVOCATION_MASK_HUAWEI",
+ ),
+ (ImageUsageFlags::SAMPLE_WEIGHT_QCOM.0, "SAMPLE_WEIGHT_QCOM"),
+ (
+ ImageUsageFlags::SAMPLE_BLOCK_MATCH_QCOM.0,
+ "SAMPLE_BLOCK_MATCH_QCOM",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageViewCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ ImageViewCreateFlags::FRAGMENT_DENSITY_MAP_DYNAMIC_EXT.0,
+ "FRAGMENT_DENSITY_MAP_DYNAMIC_EXT",
+ ),
+ (
+ ImageViewCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0,
+ "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT",
+ ),
+ (
+ ImageViewCreateFlags::FRAGMENT_DENSITY_MAP_DEFERRED_EXT.0,
+ "FRAGMENT_DENSITY_MAP_DEFERRED_EXT",
+ ),
+ (ImageViewCreateFlags::RESERVED_3_EXT.0, "RESERVED_3_EXT"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ImageViewType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TYPE_1D => Some("TYPE_1D"),
+ Self::TYPE_2D => Some("TYPE_2D"),
+ Self::TYPE_3D => Some("TYPE_3D"),
+ Self::CUBE => Some("CUBE"),
+ Self::TYPE_1D_ARRAY => Some("TYPE_1D_ARRAY"),
+ Self::TYPE_2D_ARRAY => Some("TYPE_2D_ARRAY"),
+ Self::CUBE_ARRAY => Some("CUBE_ARRAY"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for IndexType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UINT16 => Some("UINT16"),
+ Self::UINT32 => Some("UINT32"),
+ Self::NONE_KHR => Some("NONE_KHR"),
+ Self::UINT8_EXT => Some("UINT8_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for IndirectCommandsLayoutUsageFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ IndirectCommandsLayoutUsageFlagsNV::EXPLICIT_PREPROCESS.0,
+ "EXPLICIT_PREPROCESS",
+ ),
+ (
+ IndirectCommandsLayoutUsageFlagsNV::INDEXED_SEQUENCES.0,
+ "INDEXED_SEQUENCES",
+ ),
+ (
+ IndirectCommandsLayoutUsageFlagsNV::UNORDERED_SEQUENCES.0,
+ "UNORDERED_SEQUENCES",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for IndirectCommandsTokenTypeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::SHADER_GROUP => Some("SHADER_GROUP"),
+ Self::STATE_FLAGS => Some("STATE_FLAGS"),
+ Self::INDEX_BUFFER => Some("INDEX_BUFFER"),
+ Self::VERTEX_BUFFER => Some("VERTEX_BUFFER"),
+ Self::PUSH_CONSTANT => Some("PUSH_CONSTANT"),
+ Self::DRAW_INDEXED => Some("DRAW_INDEXED"),
+ Self::DRAW => Some("DRAW"),
+ Self::DRAW_TASKS => Some("DRAW_TASKS"),
+ Self::DRAW_MESH_TASKS => Some("DRAW_MESH_TASKS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for IndirectStateFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] =
+ &[(IndirectStateFlagsNV::FLAG_FRONTFACE.0, "FLAG_FRONTFACE")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for InstanceCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ InstanceCreateFlags::ENUMERATE_PORTABILITY_KHR.0,
+ "ENUMERATE_PORTABILITY_KHR",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for InternalAllocationType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::EXECUTABLE => Some("EXECUTABLE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for LineRasterizationModeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEFAULT => Some("DEFAULT"),
+ Self::RECTANGULAR => Some("RECTANGULAR"),
+ Self::BRESENHAM => Some("BRESENHAM"),
+ Self::RECTANGULAR_SMOOTH => Some("RECTANGULAR_SMOOTH"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for LogicOp {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::CLEAR => Some("CLEAR"),
+ Self::AND => Some("AND"),
+ Self::AND_REVERSE => Some("AND_REVERSE"),
+ Self::COPY => Some("COPY"),
+ Self::AND_INVERTED => Some("AND_INVERTED"),
+ Self::NO_OP => Some("NO_OP"),
+ Self::XOR => Some("XOR"),
+ Self::OR => Some("OR"),
+ Self::NOR => Some("NOR"),
+ Self::EQUIVALENT => Some("EQUIVALENT"),
+ Self::INVERT => Some("INVERT"),
+ Self::OR_REVERSE => Some("OR_REVERSE"),
+ Self::COPY_INVERTED => Some("COPY_INVERTED"),
+ Self::OR_INVERTED => Some("OR_INVERTED"),
+ Self::NAND => Some("NAND"),
+ Self::SET => Some("SET"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for MacOSSurfaceCreateFlagsMVK {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MemoryAllocateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (MemoryAllocateFlags::DEVICE_MASK.0, "DEVICE_MASK"),
+ (MemoryAllocateFlags::DEVICE_ADDRESS.0, "DEVICE_ADDRESS"),
+ (
+ MemoryAllocateFlags::DEVICE_ADDRESS_CAPTURE_REPLAY.0,
+ "DEVICE_ADDRESS_CAPTURE_REPLAY",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MemoryDecompressionMethodFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags64, &str)] = &[(
+ MemoryDecompressionMethodFlagsNV::GDEFLATE_1_0.0,
+ "GDEFLATE_1_0",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MemoryHeapFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (MemoryHeapFlags::DEVICE_LOCAL.0, "DEVICE_LOCAL"),
+ (MemoryHeapFlags::RESERVED_2_KHR.0, "RESERVED_2_KHR"),
+ (MemoryHeapFlags::MULTI_INSTANCE.0, "MULTI_INSTANCE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MemoryMapFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MemoryOverallocationBehaviorAMD {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEFAULT => Some("DEFAULT"),
+ Self::ALLOWED => Some("ALLOWED"),
+ Self::DISALLOWED => Some("DISALLOWED"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for MemoryPropertyFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (MemoryPropertyFlags::DEVICE_LOCAL.0, "DEVICE_LOCAL"),
+ (MemoryPropertyFlags::HOST_VISIBLE.0, "HOST_VISIBLE"),
+ (MemoryPropertyFlags::HOST_COHERENT.0, "HOST_COHERENT"),
+ (MemoryPropertyFlags::HOST_CACHED.0, "HOST_CACHED"),
+ (MemoryPropertyFlags::LAZILY_ALLOCATED.0, "LAZILY_ALLOCATED"),
+ (
+ MemoryPropertyFlags::DEVICE_COHERENT_AMD.0,
+ "DEVICE_COHERENT_AMD",
+ ),
+ (
+ MemoryPropertyFlags::DEVICE_UNCACHED_AMD.0,
+ "DEVICE_UNCACHED_AMD",
+ ),
+ (MemoryPropertyFlags::RDMA_CAPABLE_NV.0, "RDMA_CAPABLE_NV"),
+ (MemoryPropertyFlags::PROTECTED.0, "PROTECTED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MetalSurfaceCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MicromapCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ MicromapCreateFlagsEXT::DEVICE_ADDRESS_CAPTURE_REPLAY.0,
+ "DEVICE_ADDRESS_CAPTURE_REPLAY",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for MicromapTypeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::OPACITY_MICROMAP => Some("OPACITY_MICROMAP"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for OpacityMicromapFormatEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TYPE_2_STATE => Some("TYPE_2_STATE"),
+ Self::TYPE_4_STATE => Some("TYPE_4_STATE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for OpacityMicromapSpecialIndexEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FULLY_TRANSPARENT => Some("FULLY_TRANSPARENT"),
+ Self::FULLY_OPAQUE => Some("FULLY_OPAQUE"),
+ Self::FULLY_UNKNOWN_TRANSPARENT => Some("FULLY_UNKNOWN_TRANSPARENT"),
+ Self::FULLY_UNKNOWN_OPAQUE => Some("FULLY_UNKNOWN_OPAQUE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for OpticalFlowExecuteFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ OpticalFlowExecuteFlagsNV::DISABLE_TEMPORAL_HINTS.0,
+ "DISABLE_TEMPORAL_HINTS",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for OpticalFlowGridSizeFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (OpticalFlowGridSizeFlagsNV::UNKNOWN.0, "UNKNOWN"),
+ (OpticalFlowGridSizeFlagsNV::TYPE_1X1.0, "TYPE_1X1"),
+ (OpticalFlowGridSizeFlagsNV::TYPE_2X2.0, "TYPE_2X2"),
+ (OpticalFlowGridSizeFlagsNV::TYPE_4X4.0, "TYPE_4X4"),
+ (OpticalFlowGridSizeFlagsNV::TYPE_8X8.0, "TYPE_8X8"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for OpticalFlowPerformanceLevelNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNKNOWN => Some("UNKNOWN"),
+ Self::SLOW => Some("SLOW"),
+ Self::MEDIUM => Some("MEDIUM"),
+ Self::FAST => Some("FAST"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for OpticalFlowSessionBindingPointNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNKNOWN => Some("UNKNOWN"),
+ Self::INPUT => Some("INPUT"),
+ Self::REFERENCE => Some("REFERENCE"),
+ Self::HINT => Some("HINT"),
+ Self::FLOW_VECTOR => Some("FLOW_VECTOR"),
+ Self::BACKWARD_FLOW_VECTOR => Some("BACKWARD_FLOW_VECTOR"),
+ Self::COST => Some("COST"),
+ Self::BACKWARD_COST => Some("BACKWARD_COST"),
+ Self::GLOBAL_FLOW => Some("GLOBAL_FLOW"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for OpticalFlowSessionCreateFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ OpticalFlowSessionCreateFlagsNV::ENABLE_HINT.0,
+ "ENABLE_HINT",
+ ),
+ (
+ OpticalFlowSessionCreateFlagsNV::ENABLE_COST.0,
+ "ENABLE_COST",
+ ),
+ (
+ OpticalFlowSessionCreateFlagsNV::ENABLE_GLOBAL_FLOW.0,
+ "ENABLE_GLOBAL_FLOW",
+ ),
+ (
+ OpticalFlowSessionCreateFlagsNV::ALLOW_REGIONS.0,
+ "ALLOW_REGIONS",
+ ),
+ (
+ OpticalFlowSessionCreateFlagsNV::BOTH_DIRECTIONS.0,
+ "BOTH_DIRECTIONS",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for OpticalFlowUsageFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (OpticalFlowUsageFlagsNV::UNKNOWN.0, "UNKNOWN"),
+ (OpticalFlowUsageFlagsNV::INPUT.0, "INPUT"),
+ (OpticalFlowUsageFlagsNV::OUTPUT.0, "OUTPUT"),
+ (OpticalFlowUsageFlagsNV::HINT.0, "HINT"),
+ (OpticalFlowUsageFlagsNV::COST.0, "COST"),
+ (OpticalFlowUsageFlagsNV::GLOBAL_FLOW.0, "GLOBAL_FLOW"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PeerMemoryFeatureFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (PeerMemoryFeatureFlags::COPY_SRC.0, "COPY_SRC"),
+ (PeerMemoryFeatureFlags::COPY_DST.0, "COPY_DST"),
+ (PeerMemoryFeatureFlags::GENERIC_SRC.0, "GENERIC_SRC"),
+ (PeerMemoryFeatureFlags::GENERIC_DST.0, "GENERIC_DST"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PerformanceConfigurationTypeINTEL {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED => {
+ Some("COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED")
+ }
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PerformanceCounterDescriptionFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ PerformanceCounterDescriptionFlagsKHR::PERFORMANCE_IMPACTING.0,
+ "PERFORMANCE_IMPACTING",
+ ),
+ (
+ PerformanceCounterDescriptionFlagsKHR::CONCURRENTLY_IMPACTED.0,
+ "CONCURRENTLY_IMPACTED",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PerformanceCounterScopeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"),
+ Self::RENDER_PASS => Some("RENDER_PASS"),
+ Self::COMMAND => Some("COMMAND"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PerformanceCounterStorageKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::INT32 => Some("INT32"),
+ Self::INT64 => Some("INT64"),
+ Self::UINT32 => Some("UINT32"),
+ Self::UINT64 => Some("UINT64"),
+ Self::FLOAT32 => Some("FLOAT32"),
+ Self::FLOAT64 => Some("FLOAT64"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PerformanceCounterUnitKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::GENERIC => Some("GENERIC"),
+ Self::PERCENTAGE => Some("PERCENTAGE"),
+ Self::NANOSECONDS => Some("NANOSECONDS"),
+ Self::BYTES => Some("BYTES"),
+ Self::BYTES_PER_SECOND => Some("BYTES_PER_SECOND"),
+ Self::KELVIN => Some("KELVIN"),
+ Self::WATTS => Some("WATTS"),
+ Self::VOLTS => Some("VOLTS"),
+ Self::AMPS => Some("AMPS"),
+ Self::HERTZ => Some("HERTZ"),
+ Self::CYCLES => Some("CYCLES"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PerformanceOverrideTypeINTEL {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NULL_HARDWARE => Some("NULL_HARDWARE"),
+ Self::FLUSH_GPU_CACHES => Some("FLUSH_GPU_CACHES"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PerformanceParameterTypeINTEL {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::HW_COUNTERS_SUPPORTED => Some("HW_COUNTERS_SUPPORTED"),
+ Self::STREAM_MARKER_VALIDS => Some("STREAM_MARKER_VALIDS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PerformanceValueTypeINTEL {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UINT32 => Some("UINT32"),
+ Self::UINT64 => Some("UINT64"),
+ Self::FLOAT => Some("FLOAT"),
+ Self::BOOL => Some("BOOL"),
+ Self::STRING => Some("STRING"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PhysicalDeviceType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::OTHER => Some("OTHER"),
+ Self::INTEGRATED_GPU => Some("INTEGRATED_GPU"),
+ Self::DISCRETE_GPU => Some("DISCRETE_GPU"),
+ Self::VIRTUAL_GPU => Some("VIRTUAL_GPU"),
+ Self::CPU => Some("CPU"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PipelineBindPoint {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::GRAPHICS => Some("GRAPHICS"),
+ Self::COMPUTE => Some("COMPUTE"),
+ Self::RAY_TRACING_KHR => Some("RAY_TRACING_KHR"),
+ Self::SUBPASS_SHADING_HUAWEI => Some("SUBPASS_SHADING_HUAWEI"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PipelineCacheCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (PipelineCacheCreateFlags::RESERVED_1_EXT.0, "RESERVED_1_EXT"),
+ (PipelineCacheCreateFlags::RESERVED_2_KHR.0, "RESERVED_2_KHR"),
+ (
+ PipelineCacheCreateFlags::EXTERNALLY_SYNCHRONIZED.0,
+ "EXTERNALLY_SYNCHRONIZED",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCacheHeaderVersion {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ONE => Some("ONE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PipelineColorBlendStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ PipelineColorBlendStateCreateFlags::RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT.0,
+ "RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCompilerControlFlagsAMD {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCoverageModulationStateCreateFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCoverageReductionStateCreateFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCoverageToColorStateCreateFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ PipelineCreateFlags::DISABLE_OPTIMIZATION.0,
+ "DISABLE_OPTIMIZATION",
+ ),
+ (
+ PipelineCreateFlags::ALLOW_DERIVATIVES.0,
+ "ALLOW_DERIVATIVES",
+ ),
+ (PipelineCreateFlags::DERIVATIVE.0, "DERIVATIVE"),
+ (
+ PipelineCreateFlags::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0,
+ "RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_KHR",
+ ),
+ (
+ PipelineCreateFlags::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT.0,
+ "RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_KHR.0,
+ "RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_KHR",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_KHR.0,
+ "RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_KHR",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_NO_NULL_MISS_SHADERS_KHR.0,
+ "RAY_TRACING_NO_NULL_MISS_SHADERS_KHR",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_KHR.0,
+ "RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_KHR",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_SKIP_TRIANGLES_KHR.0,
+ "RAY_TRACING_SKIP_TRIANGLES_KHR",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_SKIP_AABBS_KHR.0,
+ "RAY_TRACING_SKIP_AABBS_KHR",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_KHR.0,
+ "RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_KHR",
+ ),
+ (PipelineCreateFlags::DEFER_COMPILE_NV.0, "DEFER_COMPILE_NV"),
+ (
+ PipelineCreateFlags::CAPTURE_STATISTICS_KHR.0,
+ "CAPTURE_STATISTICS_KHR",
+ ),
+ (
+ PipelineCreateFlags::CAPTURE_INTERNAL_REPRESENTATIONS_KHR.0,
+ "CAPTURE_INTERNAL_REPRESENTATIONS_KHR",
+ ),
+ (
+ PipelineCreateFlags::INDIRECT_BINDABLE_NV.0,
+ "INDIRECT_BINDABLE_NV",
+ ),
+ (PipelineCreateFlags::LIBRARY_KHR.0, "LIBRARY_KHR"),
+ (
+ PipelineCreateFlags::DESCRIPTOR_BUFFER_EXT.0,
+ "DESCRIPTOR_BUFFER_EXT",
+ ),
+ (
+ PipelineCreateFlags::RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT.0,
+ "RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT",
+ ),
+ (
+ PipelineCreateFlags::LINK_TIME_OPTIMIZATION_EXT.0,
+ "LINK_TIME_OPTIMIZATION_EXT",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_ALLOW_MOTION_NV.0,
+ "RAY_TRACING_ALLOW_MOTION_NV",
+ ),
+ (
+ PipelineCreateFlags::COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT.0,
+ "COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT",
+ ),
+ (
+ PipelineCreateFlags::DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT.0,
+ "DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT",
+ ),
+ (
+ PipelineCreateFlags::RAY_TRACING_OPACITY_MICROMAP_EXT.0,
+ "RAY_TRACING_OPACITY_MICROMAP_EXT",
+ ),
+ (PipelineCreateFlags::RESERVED_28_NV.0, "RESERVED_28_NV"),
+ (
+ PipelineCreateFlags::NO_PROTECTED_ACCESS_EXT.0,
+ "NO_PROTECTED_ACCESS_EXT",
+ ),
+ (
+ PipelineCreateFlags::PROTECTED_ACCESS_ONLY_EXT.0,
+ "PROTECTED_ACCESS_ONLY_EXT",
+ ),
+ (
+ PipelineCreateFlags::VIEW_INDEX_FROM_DEVICE_INDEX.0,
+ "VIEW_INDEX_FROM_DEVICE_INDEX",
+ ),
+ (PipelineCreateFlags::DISPATCH_BASE.0, "DISPATCH_BASE"),
+ (
+ PipelineCreateFlags::FAIL_ON_PIPELINE_COMPILE_REQUIRED.0,
+ "FAIL_ON_PIPELINE_COMPILE_REQUIRED",
+ ),
+ (
+ PipelineCreateFlags::EARLY_RETURN_ON_FAILURE.0,
+ "EARLY_RETURN_ON_FAILURE",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineCreationFeedbackFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (PipelineCreationFeedbackFlags::VALID.0, "VALID"),
+ (
+ PipelineCreationFeedbackFlags::APPLICATION_PIPELINE_CACHE_HIT.0,
+ "APPLICATION_PIPELINE_CACHE_HIT",
+ ),
+ (
+ PipelineCreationFeedbackFlags::BASE_PIPELINE_ACCELERATION.0,
+ "BASE_PIPELINE_ACCELERATION",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineDepthStencilStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN : & [(Flags , & str)] = & [(PipelineDepthStencilStateCreateFlags :: RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT . 0 , "RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT") , (PipelineDepthStencilStateCreateFlags :: RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT . 0 , "RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT")] ;
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineDiscardRectangleStateCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineDynamicStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineExecutableStatisticFormatKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::BOOL32 => Some("BOOL32"),
+ Self::INT64 => Some("INT64"),
+ Self::UINT64 => Some("UINT64"),
+ Self::FLOAT64 => Some("FLOAT64"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PipelineInputAssemblyStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineLayoutCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ PipelineLayoutCreateFlags::RESERVED_0_AMD.0,
+ "RESERVED_0_AMD",
+ ),
+ (
+ PipelineLayoutCreateFlags::INDEPENDENT_SETS_EXT.0,
+ "INDEPENDENT_SETS_EXT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineMultisampleStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineRasterizationConservativeStateCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineRasterizationDepthClipStateCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineRasterizationStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineRasterizationStateStreamCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineRobustnessBufferBehaviorEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEVICE_DEFAULT => Some("DEVICE_DEFAULT"),
+ Self::DISABLED => Some("DISABLED"),
+ Self::ROBUST_BUFFER_ACCESS => Some("ROBUST_BUFFER_ACCESS"),
+ Self::ROBUST_BUFFER_ACCESS_2 => Some("ROBUST_BUFFER_ACCESS_2"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PipelineRobustnessImageBehaviorEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEVICE_DEFAULT => Some("DEVICE_DEFAULT"),
+ Self::DISABLED => Some("DISABLED"),
+ Self::ROBUST_IMAGE_ACCESS => Some("ROBUST_IMAGE_ACCESS"),
+ Self::ROBUST_IMAGE_ACCESS_2 => Some("ROBUST_IMAGE_ACCESS_2"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PipelineShaderStageCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ PipelineShaderStageCreateFlags::RESERVED_3_KHR.0,
+ "RESERVED_3_KHR",
+ ),
+ (
+ PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE.0,
+ "ALLOW_VARYING_SUBGROUP_SIZE",
+ ),
+ (
+ PipelineShaderStageCreateFlags::REQUIRE_FULL_SUBGROUPS.0,
+ "REQUIRE_FULL_SUBGROUPS",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineStageFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (PipelineStageFlags::TOP_OF_PIPE.0, "TOP_OF_PIPE"),
+ (PipelineStageFlags::DRAW_INDIRECT.0, "DRAW_INDIRECT"),
+ (PipelineStageFlags::VERTEX_INPUT.0, "VERTEX_INPUT"),
+ (PipelineStageFlags::VERTEX_SHADER.0, "VERTEX_SHADER"),
+ (
+ PipelineStageFlags::TESSELLATION_CONTROL_SHADER.0,
+ "TESSELLATION_CONTROL_SHADER",
+ ),
+ (
+ PipelineStageFlags::TESSELLATION_EVALUATION_SHADER.0,
+ "TESSELLATION_EVALUATION_SHADER",
+ ),
+ (PipelineStageFlags::GEOMETRY_SHADER.0, "GEOMETRY_SHADER"),
+ (PipelineStageFlags::FRAGMENT_SHADER.0, "FRAGMENT_SHADER"),
+ (
+ PipelineStageFlags::EARLY_FRAGMENT_TESTS.0,
+ "EARLY_FRAGMENT_TESTS",
+ ),
+ (
+ PipelineStageFlags::LATE_FRAGMENT_TESTS.0,
+ "LATE_FRAGMENT_TESTS",
+ ),
+ (
+ PipelineStageFlags::COLOR_ATTACHMENT_OUTPUT.0,
+ "COLOR_ATTACHMENT_OUTPUT",
+ ),
+ (PipelineStageFlags::COMPUTE_SHADER.0, "COMPUTE_SHADER"),
+ (PipelineStageFlags::TRANSFER.0, "TRANSFER"),
+ (PipelineStageFlags::BOTTOM_OF_PIPE.0, "BOTTOM_OF_PIPE"),
+ (PipelineStageFlags::HOST.0, "HOST"),
+ (PipelineStageFlags::ALL_GRAPHICS.0, "ALL_GRAPHICS"),
+ (PipelineStageFlags::ALL_COMMANDS.0, "ALL_COMMANDS"),
+ (
+ PipelineStageFlags::TRANSFORM_FEEDBACK_EXT.0,
+ "TRANSFORM_FEEDBACK_EXT",
+ ),
+ (
+ PipelineStageFlags::CONDITIONAL_RENDERING_EXT.0,
+ "CONDITIONAL_RENDERING_EXT",
+ ),
+ (
+ PipelineStageFlags::ACCELERATION_STRUCTURE_BUILD_KHR.0,
+ "ACCELERATION_STRUCTURE_BUILD_KHR",
+ ),
+ (
+ PipelineStageFlags::RAY_TRACING_SHADER_KHR.0,
+ "RAY_TRACING_SHADER_KHR",
+ ),
+ (
+ PipelineStageFlags::FRAGMENT_DENSITY_PROCESS_EXT.0,
+ "FRAGMENT_DENSITY_PROCESS_EXT",
+ ),
+ (
+ PipelineStageFlags::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0,
+ "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR",
+ ),
+ (
+ PipelineStageFlags::COMMAND_PREPROCESS_NV.0,
+ "COMMAND_PREPROCESS_NV",
+ ),
+ (PipelineStageFlags::TASK_SHADER_EXT.0, "TASK_SHADER_EXT"),
+ (PipelineStageFlags::MESH_SHADER_EXT.0, "MESH_SHADER_EXT"),
+ (PipelineStageFlags::NONE.0, "NONE"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineStageFlags2 {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags64, &str)] = &[
+ (PipelineStageFlags2::NONE.0, "NONE"),
+ (PipelineStageFlags2::TOP_OF_PIPE.0, "TOP_OF_PIPE"),
+ (PipelineStageFlags2::DRAW_INDIRECT.0, "DRAW_INDIRECT"),
+ (PipelineStageFlags2::VERTEX_INPUT.0, "VERTEX_INPUT"),
+ (PipelineStageFlags2::VERTEX_SHADER.0, "VERTEX_SHADER"),
+ (
+ PipelineStageFlags2::TESSELLATION_CONTROL_SHADER.0,
+ "TESSELLATION_CONTROL_SHADER",
+ ),
+ (
+ PipelineStageFlags2::TESSELLATION_EVALUATION_SHADER.0,
+ "TESSELLATION_EVALUATION_SHADER",
+ ),
+ (PipelineStageFlags2::GEOMETRY_SHADER.0, "GEOMETRY_SHADER"),
+ (PipelineStageFlags2::FRAGMENT_SHADER.0, "FRAGMENT_SHADER"),
+ (
+ PipelineStageFlags2::EARLY_FRAGMENT_TESTS.0,
+ "EARLY_FRAGMENT_TESTS",
+ ),
+ (
+ PipelineStageFlags2::LATE_FRAGMENT_TESTS.0,
+ "LATE_FRAGMENT_TESTS",
+ ),
+ (
+ PipelineStageFlags2::COLOR_ATTACHMENT_OUTPUT.0,
+ "COLOR_ATTACHMENT_OUTPUT",
+ ),
+ (PipelineStageFlags2::COMPUTE_SHADER.0, "COMPUTE_SHADER"),
+ (PipelineStageFlags2::ALL_TRANSFER.0, "ALL_TRANSFER"),
+ (PipelineStageFlags2::BOTTOM_OF_PIPE.0, "BOTTOM_OF_PIPE"),
+ (PipelineStageFlags2::HOST.0, "HOST"),
+ (PipelineStageFlags2::ALL_GRAPHICS.0, "ALL_GRAPHICS"),
+ (PipelineStageFlags2::ALL_COMMANDS.0, "ALL_COMMANDS"),
+ (PipelineStageFlags2::COPY.0, "COPY"),
+ (PipelineStageFlags2::RESOLVE.0, "RESOLVE"),
+ (PipelineStageFlags2::BLIT.0, "BLIT"),
+ (PipelineStageFlags2::CLEAR.0, "CLEAR"),
+ (PipelineStageFlags2::INDEX_INPUT.0, "INDEX_INPUT"),
+ (
+ PipelineStageFlags2::VERTEX_ATTRIBUTE_INPUT.0,
+ "VERTEX_ATTRIBUTE_INPUT",
+ ),
+ (
+ PipelineStageFlags2::PRE_RASTERIZATION_SHADERS.0,
+ "PRE_RASTERIZATION_SHADERS",
+ ),
+ (PipelineStageFlags2::VIDEO_DECODE_KHR.0, "VIDEO_DECODE_KHR"),
+ (PipelineStageFlags2::VIDEO_ENCODE_KHR.0, "VIDEO_ENCODE_KHR"),
+ (
+ PipelineStageFlags2::TRANSFORM_FEEDBACK_EXT.0,
+ "TRANSFORM_FEEDBACK_EXT",
+ ),
+ (
+ PipelineStageFlags2::CONDITIONAL_RENDERING_EXT.0,
+ "CONDITIONAL_RENDERING_EXT",
+ ),
+ (
+ PipelineStageFlags2::COMMAND_PREPROCESS_NV.0,
+ "COMMAND_PREPROCESS_NV",
+ ),
+ (
+ PipelineStageFlags2::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR.0,
+ "FRAGMENT_SHADING_RATE_ATTACHMENT_KHR",
+ ),
+ (
+ PipelineStageFlags2::ACCELERATION_STRUCTURE_BUILD_KHR.0,
+ "ACCELERATION_STRUCTURE_BUILD_KHR",
+ ),
+ (
+ PipelineStageFlags2::RAY_TRACING_SHADER_KHR.0,
+ "RAY_TRACING_SHADER_KHR",
+ ),
+ (
+ PipelineStageFlags2::FRAGMENT_DENSITY_PROCESS_EXT.0,
+ "FRAGMENT_DENSITY_PROCESS_EXT",
+ ),
+ (PipelineStageFlags2::TASK_SHADER_EXT.0, "TASK_SHADER_EXT"),
+ (PipelineStageFlags2::MESH_SHADER_EXT.0, "MESH_SHADER_EXT"),
+ (
+ PipelineStageFlags2::SUBPASS_SHADING_HUAWEI.0,
+ "SUBPASS_SHADING_HUAWEI",
+ ),
+ (
+ PipelineStageFlags2::INVOCATION_MASK_HUAWEI.0,
+ "INVOCATION_MASK_HUAWEI",
+ ),
+ (
+ PipelineStageFlags2::ACCELERATION_STRUCTURE_COPY_KHR.0,
+ "ACCELERATION_STRUCTURE_COPY_KHR",
+ ),
+ (
+ PipelineStageFlags2::MICROMAP_BUILD_EXT.0,
+ "MICROMAP_BUILD_EXT",
+ ),
+ (
+ PipelineStageFlags2::RESEVED_41_HUAWEI.0,
+ "RESEVED_41_HUAWEI",
+ ),
+ (PipelineStageFlags2::OPTICAL_FLOW_NV.0, "OPTICAL_FLOW_NV"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineTessellationStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineVertexInputStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineViewportStateCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PipelineViewportSwizzleStateCreateFlagsNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PointClippingBehavior {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ALL_CLIP_PLANES => Some("ALL_CLIP_PLANES"),
+ Self::USER_CLIP_PLANES_ONLY => Some("USER_CLIP_PLANES_ONLY"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PolygonMode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FILL => Some("FILL"),
+ Self::LINE => Some("LINE"),
+ Self::POINT => Some("POINT"),
+ Self::FILL_RECTANGLE_NV => Some("FILL_RECTANGLE_NV"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PresentGravityFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (PresentGravityFlagsEXT::MIN.0, "MIN"),
+ (PresentGravityFlagsEXT::MAX.0, "MAX"),
+ (PresentGravityFlagsEXT::CENTERED.0, "CENTERED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PresentModeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::IMMEDIATE => Some("IMMEDIATE"),
+ Self::MAILBOX => Some("MAILBOX"),
+ Self::FIFO => Some("FIFO"),
+ Self::FIFO_RELAXED => Some("FIFO_RELAXED"),
+ Self::SHARED_DEMAND_REFRESH => Some("SHARED_DEMAND_REFRESH"),
+ Self::SHARED_CONTINUOUS_REFRESH => Some("SHARED_CONTINUOUS_REFRESH"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PresentScalingFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (PresentScalingFlagsEXT::ONE_TO_ONE.0, "ONE_TO_ONE"),
+ (
+ PresentScalingFlagsEXT::ASPECT_RATIO_STRETCH.0,
+ "ASPECT_RATIO_STRETCH",
+ ),
+ (PresentScalingFlagsEXT::STRETCH.0, "STRETCH"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for PrimitiveTopology {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::POINT_LIST => Some("POINT_LIST"),
+ Self::LINE_LIST => Some("LINE_LIST"),
+ Self::LINE_STRIP => Some("LINE_STRIP"),
+ Self::TRIANGLE_LIST => Some("TRIANGLE_LIST"),
+ Self::TRIANGLE_STRIP => Some("TRIANGLE_STRIP"),
+ Self::TRIANGLE_FAN => Some("TRIANGLE_FAN"),
+ Self::LINE_LIST_WITH_ADJACENCY => Some("LINE_LIST_WITH_ADJACENCY"),
+ Self::LINE_STRIP_WITH_ADJACENCY => Some("LINE_STRIP_WITH_ADJACENCY"),
+ Self::TRIANGLE_LIST_WITH_ADJACENCY => Some("TRIANGLE_LIST_WITH_ADJACENCY"),
+ Self::TRIANGLE_STRIP_WITH_ADJACENCY => Some("TRIANGLE_STRIP_WITH_ADJACENCY"),
+ Self::PATCH_LIST => Some("PATCH_LIST"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for PrivateDataSlotCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ProvokingVertexModeEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::FIRST_VERTEX => Some("FIRST_VERTEX"),
+ Self::LAST_VERTEX => Some("LAST_VERTEX"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for QueryControlFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(QueryControlFlags::PRECISE.0, "PRECISE")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for QueryPipelineStatisticFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ QueryPipelineStatisticFlags::INPUT_ASSEMBLY_VERTICES.0,
+ "INPUT_ASSEMBLY_VERTICES",
+ ),
+ (
+ QueryPipelineStatisticFlags::INPUT_ASSEMBLY_PRIMITIVES.0,
+ "INPUT_ASSEMBLY_PRIMITIVES",
+ ),
+ (
+ QueryPipelineStatisticFlags::VERTEX_SHADER_INVOCATIONS.0,
+ "VERTEX_SHADER_INVOCATIONS",
+ ),
+ (
+ QueryPipelineStatisticFlags::GEOMETRY_SHADER_INVOCATIONS.0,
+ "GEOMETRY_SHADER_INVOCATIONS",
+ ),
+ (
+ QueryPipelineStatisticFlags::GEOMETRY_SHADER_PRIMITIVES.0,
+ "GEOMETRY_SHADER_PRIMITIVES",
+ ),
+ (
+ QueryPipelineStatisticFlags::CLIPPING_INVOCATIONS.0,
+ "CLIPPING_INVOCATIONS",
+ ),
+ (
+ QueryPipelineStatisticFlags::CLIPPING_PRIMITIVES.0,
+ "CLIPPING_PRIMITIVES",
+ ),
+ (
+ QueryPipelineStatisticFlags::FRAGMENT_SHADER_INVOCATIONS.0,
+ "FRAGMENT_SHADER_INVOCATIONS",
+ ),
+ (
+ QueryPipelineStatisticFlags::TESSELLATION_CONTROL_SHADER_PATCHES.0,
+ "TESSELLATION_CONTROL_SHADER_PATCHES",
+ ),
+ (
+ QueryPipelineStatisticFlags::TESSELLATION_EVALUATION_SHADER_INVOCATIONS.0,
+ "TESSELLATION_EVALUATION_SHADER_INVOCATIONS",
+ ),
+ (
+ QueryPipelineStatisticFlags::COMPUTE_SHADER_INVOCATIONS.0,
+ "COMPUTE_SHADER_INVOCATIONS",
+ ),
+ (
+ QueryPipelineStatisticFlags::TASK_SHADER_INVOCATIONS_EXT.0,
+ "TASK_SHADER_INVOCATIONS_EXT",
+ ),
+ (
+ QueryPipelineStatisticFlags::MESH_SHADER_INVOCATIONS_EXT.0,
+ "MESH_SHADER_INVOCATIONS_EXT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for QueryPoolCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for QueryPoolSamplingModeINTEL {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::MANUAL => Some("MANUAL"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for QueryResultFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (QueryResultFlags::TYPE_64.0, "TYPE_64"),
+ (QueryResultFlags::WAIT.0, "WAIT"),
+ (QueryResultFlags::WITH_AVAILABILITY.0, "WITH_AVAILABILITY"),
+ (QueryResultFlags::PARTIAL.0, "PARTIAL"),
+ (QueryResultFlags::WITH_STATUS_KHR.0, "WITH_STATUS_KHR"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for QueryResultStatusKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ERROR => Some("ERROR"),
+ Self::NOT_READY => Some("NOT_READY"),
+ Self::COMPLETE => Some("COMPLETE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for QueryType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::OCCLUSION => Some("OCCLUSION"),
+ Self::PIPELINE_STATISTICS => Some("PIPELINE_STATISTICS"),
+ Self::TIMESTAMP => Some("TIMESTAMP"),
+ Self::RESULT_STATUS_ONLY_KHR => Some("RESULT_STATUS_ONLY_KHR"),
+ Self::TRANSFORM_FEEDBACK_STREAM_EXT => Some("TRANSFORM_FEEDBACK_STREAM_EXT"),
+ Self::PERFORMANCE_QUERY_KHR => Some("PERFORMANCE_QUERY_KHR"),
+ Self::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR => {
+ Some("ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR => {
+ Some("ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV => {
+ Some("ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV")
+ }
+ Self::PERFORMANCE_QUERY_INTEL => Some("PERFORMANCE_QUERY_INTEL"),
+ Self::VIDEO_ENCODESTREAM_BUFFER_RANGE_KHR => {
+ Some("VIDEO_ENCODESTREAM_BUFFER_RANGE_KHR")
+ }
+ Self::MESH_PRIMITIVES_GENERATED_EXT => Some("MESH_PRIMITIVES_GENERATED_EXT"),
+ Self::PRIMITIVES_GENERATED_EXT => Some("PRIMITIVES_GENERATED_EXT"),
+ Self::ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR => {
+ Some("ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_SIZE_KHR => Some("ACCELERATION_STRUCTURE_SIZE_KHR"),
+ Self::MICROMAP_SERIALIZATION_SIZE_EXT => Some("MICROMAP_SERIALIZATION_SIZE_EXT"),
+ Self::MICROMAP_COMPACTED_SIZE_EXT => Some("MICROMAP_COMPACTED_SIZE_EXT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for QueueFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (QueueFlags::GRAPHICS.0, "GRAPHICS"),
+ (QueueFlags::COMPUTE.0, "COMPUTE"),
+ (QueueFlags::TRANSFER.0, "TRANSFER"),
+ (QueueFlags::SPARSE_BINDING.0, "SPARSE_BINDING"),
+ (QueueFlags::VIDEO_DECODE_KHR.0, "VIDEO_DECODE_KHR"),
+ (QueueFlags::RESERVED_9_EXT.0, "RESERVED_9_EXT"),
+ (QueueFlags::VIDEO_ENCODE_KHR.0, "VIDEO_ENCODE_KHR"),
+ (QueueFlags::RESERVED_7_QCOM.0, "RESERVED_7_QCOM"),
+ (QueueFlags::OPTICAL_FLOW_NV.0, "OPTICAL_FLOW_NV"),
+ (QueueFlags::PROTECTED.0, "PROTECTED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for QueueGlobalPriorityKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::LOW => Some("LOW"),
+ Self::MEDIUM => Some("MEDIUM"),
+ Self::HIGH => Some("HIGH"),
+ Self::REALTIME => Some("REALTIME"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for RasterizationOrderAMD {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::STRICT => Some("STRICT"),
+ Self::RELAXED => Some("RELAXED"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for RayTracingInvocationReorderModeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NONE => Some("NONE"),
+ Self::REORDER => Some("REORDER"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for RayTracingShaderGroupTypeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::GENERAL => Some("GENERAL"),
+ Self::TRIANGLES_HIT_GROUP => Some("TRIANGLES_HIT_GROUP"),
+ Self::PROCEDURAL_HIT_GROUP => Some("PROCEDURAL_HIT_GROUP"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for RenderPassCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (RenderPassCreateFlags::RESERVED_0_KHR.0, "RESERVED_0_KHR"),
+ (RenderPassCreateFlags::TRANSFORM_QCOM.0, "TRANSFORM_QCOM"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for RenderingFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ RenderingFlags::CONTENTS_SECONDARY_COMMAND_BUFFERS.0,
+ "CONTENTS_SECONDARY_COMMAND_BUFFERS",
+ ),
+ (RenderingFlags::SUSPENDING.0, "SUSPENDING"),
+ (RenderingFlags::RESUMING.0, "RESUMING"),
+ (
+ RenderingFlags::ENABLE_LEGACY_DITHERING_EXT.0,
+ "ENABLE_LEGACY_DITHERING_EXT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ResolveModeFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ResolveModeFlags::NONE.0, "NONE"),
+ (ResolveModeFlags::SAMPLE_ZERO.0, "SAMPLE_ZERO"),
+ (ResolveModeFlags::AVERAGE.0, "AVERAGE"),
+ (ResolveModeFlags::MIN.0, "MIN"),
+ (ResolveModeFlags::MAX.0, "MAX"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SampleCountFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (SampleCountFlags::TYPE_1.0, "TYPE_1"),
+ (SampleCountFlags::TYPE_2.0, "TYPE_2"),
+ (SampleCountFlags::TYPE_4.0, "TYPE_4"),
+ (SampleCountFlags::TYPE_8.0, "TYPE_8"),
+ (SampleCountFlags::TYPE_16.0, "TYPE_16"),
+ (SampleCountFlags::TYPE_32.0, "TYPE_32"),
+ (SampleCountFlags::TYPE_64.0, "TYPE_64"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SamplerAddressMode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::REPEAT => Some("REPEAT"),
+ Self::MIRRORED_REPEAT => Some("MIRRORED_REPEAT"),
+ Self::CLAMP_TO_EDGE => Some("CLAMP_TO_EDGE"),
+ Self::CLAMP_TO_BORDER => Some("CLAMP_TO_BORDER"),
+ Self::MIRROR_CLAMP_TO_EDGE => Some("MIRROR_CLAMP_TO_EDGE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SamplerCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (SamplerCreateFlags::SUBSAMPLED_EXT.0, "SUBSAMPLED_EXT"),
+ (
+ SamplerCreateFlags::SUBSAMPLED_COARSE_RECONSTRUCTION_EXT.0,
+ "SUBSAMPLED_COARSE_RECONSTRUCTION_EXT",
+ ),
+ (
+ SamplerCreateFlags::DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT.0,
+ "DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT",
+ ),
+ (
+ SamplerCreateFlags::NON_SEAMLESS_CUBE_MAP_EXT.0,
+ "NON_SEAMLESS_CUBE_MAP_EXT",
+ ),
+ (
+ SamplerCreateFlags::IMAGE_PROCESSING_QCOM.0,
+ "IMAGE_PROCESSING_QCOM",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SamplerMipmapMode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NEAREST => Some("NEAREST"),
+ Self::LINEAR => Some("LINEAR"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SamplerReductionMode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::WEIGHTED_AVERAGE => Some("WEIGHTED_AVERAGE"),
+ Self::MIN => Some("MIN"),
+ Self::MAX => Some("MAX"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SamplerYcbcrModelConversion {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::RGB_IDENTITY => Some("RGB_IDENTITY"),
+ Self::YCBCR_IDENTITY => Some("YCBCR_IDENTITY"),
+ Self::YCBCR_709 => Some("YCBCR_709"),
+ Self::YCBCR_601 => Some("YCBCR_601"),
+ Self::YCBCR_2020 => Some("YCBCR_2020"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SamplerYcbcrRange {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ITU_FULL => Some("ITU_FULL"),
+ Self::ITU_NARROW => Some("ITU_NARROW"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ScopeNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEVICE => Some("DEVICE"),
+ Self::WORKGROUP => Some("WORKGROUP"),
+ Self::SUBGROUP => Some("SUBGROUP"),
+ Self::QUEUE_FAMILY => Some("QUEUE_FAMILY"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ScreenSurfaceCreateFlagsQNX {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SemaphoreCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SemaphoreImportFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(SemaphoreImportFlags::TEMPORARY.0, "TEMPORARY")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SemaphoreType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::BINARY => Some("BINARY"),
+ Self::TIMELINE => Some("TIMELINE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SemaphoreWaitFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(SemaphoreWaitFlags::ANY.0, "ANY")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ShaderCorePropertiesFlagsAMD {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ShaderFloatControlsIndependence {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::TYPE_32_ONLY => Some("TYPE_32_ONLY"),
+ Self::ALL => Some("ALL"),
+ Self::NONE => Some("NONE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ShaderGroupShaderKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::GENERAL => Some("GENERAL"),
+ Self::CLOSEST_HIT => Some("CLOSEST_HIT"),
+ Self::ANY_HIT => Some("ANY_HIT"),
+ Self::INTERSECTION => Some("INTERSECTION"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ShaderInfoTypeAMD {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::STATISTICS => Some("STATISTICS"),
+ Self::BINARY => Some("BINARY"),
+ Self::DISASSEMBLY => Some("DISASSEMBLY"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ShaderModuleCreateFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ShaderStageFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ShaderStageFlags::VERTEX.0, "VERTEX"),
+ (
+ ShaderStageFlags::TESSELLATION_CONTROL.0,
+ "TESSELLATION_CONTROL",
+ ),
+ (
+ ShaderStageFlags::TESSELLATION_EVALUATION.0,
+ "TESSELLATION_EVALUATION",
+ ),
+ (ShaderStageFlags::GEOMETRY.0, "GEOMETRY"),
+ (ShaderStageFlags::FRAGMENT.0, "FRAGMENT"),
+ (ShaderStageFlags::COMPUTE.0, "COMPUTE"),
+ (ShaderStageFlags::ALL_GRAPHICS.0, "ALL_GRAPHICS"),
+ (ShaderStageFlags::ALL.0, "ALL"),
+ (ShaderStageFlags::RAYGEN_KHR.0, "RAYGEN_KHR"),
+ (ShaderStageFlags::ANY_HIT_KHR.0, "ANY_HIT_KHR"),
+ (ShaderStageFlags::CLOSEST_HIT_KHR.0, "CLOSEST_HIT_KHR"),
+ (ShaderStageFlags::MISS_KHR.0, "MISS_KHR"),
+ (ShaderStageFlags::INTERSECTION_KHR.0, "INTERSECTION_KHR"),
+ (ShaderStageFlags::CALLABLE_KHR.0, "CALLABLE_KHR"),
+ (ShaderStageFlags::TASK_EXT.0, "TASK_EXT"),
+ (ShaderStageFlags::MESH_EXT.0, "MESH_EXT"),
+ (
+ ShaderStageFlags::SUBPASS_SHADING_HUAWEI.0,
+ "SUBPASS_SHADING_HUAWEI",
+ ),
+ (ShaderStageFlags::RESERVED_19_HUAWEI.0, "RESERVED_19_HUAWEI"),
+ (ShaderStageFlags::EXT_483_RESERVE_15.0, "EXT_483_RESERVE_15"),
+ (ShaderStageFlags::EXT_483_RESERVE_16.0, "EXT_483_RESERVE_16"),
+ (ShaderStageFlags::EXT_483_RESERVE_17.0, "EXT_483_RESERVE_17"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ShadingRatePaletteEntryNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::NO_INVOCATIONS => Some("NO_INVOCATIONS"),
+ Self::TYPE_16_INVOCATIONS_PER_PIXEL => Some("TYPE_16_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_8_INVOCATIONS_PER_PIXEL => Some("TYPE_8_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_4_INVOCATIONS_PER_PIXEL => Some("TYPE_4_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_2_INVOCATIONS_PER_PIXEL => Some("TYPE_2_INVOCATIONS_PER_PIXEL"),
+ Self::TYPE_1_INVOCATION_PER_PIXEL => Some("TYPE_1_INVOCATION_PER_PIXEL"),
+ Self::TYPE_1_INVOCATION_PER_2X1_PIXELS => Some("TYPE_1_INVOCATION_PER_2X1_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_1X2_PIXELS => Some("TYPE_1_INVOCATION_PER_1X2_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_2X2_PIXELS => Some("TYPE_1_INVOCATION_PER_2X2_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_4X2_PIXELS => Some("TYPE_1_INVOCATION_PER_4X2_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_2X4_PIXELS => Some("TYPE_1_INVOCATION_PER_2X4_PIXELS"),
+ Self::TYPE_1_INVOCATION_PER_4X4_PIXELS => Some("TYPE_1_INVOCATION_PER_4X4_PIXELS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SharingMode {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::EXCLUSIVE => Some("EXCLUSIVE"),
+ Self::CONCURRENT => Some("CONCURRENT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SparseImageFormatFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (SparseImageFormatFlags::SINGLE_MIPTAIL.0, "SINGLE_MIPTAIL"),
+ (
+ SparseImageFormatFlags::ALIGNED_MIP_SIZE.0,
+ "ALIGNED_MIP_SIZE",
+ ),
+ (
+ SparseImageFormatFlags::NONSTANDARD_BLOCK_SIZE.0,
+ "NONSTANDARD_BLOCK_SIZE",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SparseMemoryBindFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(SparseMemoryBindFlags::METADATA.0, "METADATA")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for StencilFaceFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (StencilFaceFlags::FRONT.0, "FRONT"),
+ (StencilFaceFlags::BACK.0, "BACK"),
+ (StencilFaceFlags::FRONT_AND_BACK.0, "FRONT_AND_BACK"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for StencilOp {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::KEEP => Some("KEEP"),
+ Self::ZERO => Some("ZERO"),
+ Self::REPLACE => Some("REPLACE"),
+ Self::INCREMENT_AND_CLAMP => Some("INCREMENT_AND_CLAMP"),
+ Self::DECREMENT_AND_CLAMP => Some("DECREMENT_AND_CLAMP"),
+ Self::INVERT => Some("INVERT"),
+ Self::INCREMENT_AND_WRAP => Some("INCREMENT_AND_WRAP"),
+ Self::DECREMENT_AND_WRAP => Some("DECREMENT_AND_WRAP"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for StreamDescriptorSurfaceCreateFlagsGGP {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for StructureType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::APPLICATION_INFO => Some("APPLICATION_INFO"),
+ Self::INSTANCE_CREATE_INFO => Some("INSTANCE_CREATE_INFO"),
+ Self::DEVICE_QUEUE_CREATE_INFO => Some("DEVICE_QUEUE_CREATE_INFO"),
+ Self::DEVICE_CREATE_INFO => Some("DEVICE_CREATE_INFO"),
+ Self::SUBMIT_INFO => Some("SUBMIT_INFO"),
+ Self::MEMORY_ALLOCATE_INFO => Some("MEMORY_ALLOCATE_INFO"),
+ Self::MAPPED_MEMORY_RANGE => Some("MAPPED_MEMORY_RANGE"),
+ Self::BIND_SPARSE_INFO => Some("BIND_SPARSE_INFO"),
+ Self::FENCE_CREATE_INFO => Some("FENCE_CREATE_INFO"),
+ Self::SEMAPHORE_CREATE_INFO => Some("SEMAPHORE_CREATE_INFO"),
+ Self::EVENT_CREATE_INFO => Some("EVENT_CREATE_INFO"),
+ Self::QUERY_POOL_CREATE_INFO => Some("QUERY_POOL_CREATE_INFO"),
+ Self::BUFFER_CREATE_INFO => Some("BUFFER_CREATE_INFO"),
+ Self::BUFFER_VIEW_CREATE_INFO => Some("BUFFER_VIEW_CREATE_INFO"),
+ Self::IMAGE_CREATE_INFO => Some("IMAGE_CREATE_INFO"),
+ Self::IMAGE_VIEW_CREATE_INFO => Some("IMAGE_VIEW_CREATE_INFO"),
+ Self::SHADER_MODULE_CREATE_INFO => Some("SHADER_MODULE_CREATE_INFO"),
+ Self::PIPELINE_CACHE_CREATE_INFO => Some("PIPELINE_CACHE_CREATE_INFO"),
+ Self::PIPELINE_SHADER_STAGE_CREATE_INFO => Some("PIPELINE_SHADER_STAGE_CREATE_INFO"),
+ Self::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO => {
+ Some("PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO => {
+ Some("PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_TESSELLATION_STATE_CREATE_INFO => {
+ Some("PIPELINE_TESSELLATION_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_VIEWPORT_STATE_CREATE_INFO => {
+ Some("PIPELINE_VIEWPORT_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_RASTERIZATION_STATE_CREATE_INFO => {
+ Some("PIPELINE_RASTERIZATION_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO => {
+ Some("PIPELINE_MULTISAMPLE_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO => {
+ Some("PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO => {
+ Some("PIPELINE_COLOR_BLEND_STATE_CREATE_INFO")
+ }
+ Self::PIPELINE_DYNAMIC_STATE_CREATE_INFO => Some("PIPELINE_DYNAMIC_STATE_CREATE_INFO"),
+ Self::GRAPHICS_PIPELINE_CREATE_INFO => Some("GRAPHICS_PIPELINE_CREATE_INFO"),
+ Self::COMPUTE_PIPELINE_CREATE_INFO => Some("COMPUTE_PIPELINE_CREATE_INFO"),
+ Self::PIPELINE_LAYOUT_CREATE_INFO => Some("PIPELINE_LAYOUT_CREATE_INFO"),
+ Self::SAMPLER_CREATE_INFO => Some("SAMPLER_CREATE_INFO"),
+ Self::DESCRIPTOR_SET_LAYOUT_CREATE_INFO => Some("DESCRIPTOR_SET_LAYOUT_CREATE_INFO"),
+ Self::DESCRIPTOR_POOL_CREATE_INFO => Some("DESCRIPTOR_POOL_CREATE_INFO"),
+ Self::DESCRIPTOR_SET_ALLOCATE_INFO => Some("DESCRIPTOR_SET_ALLOCATE_INFO"),
+ Self::WRITE_DESCRIPTOR_SET => Some("WRITE_DESCRIPTOR_SET"),
+ Self::COPY_DESCRIPTOR_SET => Some("COPY_DESCRIPTOR_SET"),
+ Self::FRAMEBUFFER_CREATE_INFO => Some("FRAMEBUFFER_CREATE_INFO"),
+ Self::RENDER_PASS_CREATE_INFO => Some("RENDER_PASS_CREATE_INFO"),
+ Self::COMMAND_POOL_CREATE_INFO => Some("COMMAND_POOL_CREATE_INFO"),
+ Self::COMMAND_BUFFER_ALLOCATE_INFO => Some("COMMAND_BUFFER_ALLOCATE_INFO"),
+ Self::COMMAND_BUFFER_INHERITANCE_INFO => Some("COMMAND_BUFFER_INHERITANCE_INFO"),
+ Self::COMMAND_BUFFER_BEGIN_INFO => Some("COMMAND_BUFFER_BEGIN_INFO"),
+ Self::RENDER_PASS_BEGIN_INFO => Some("RENDER_PASS_BEGIN_INFO"),
+ Self::BUFFER_MEMORY_BARRIER => Some("BUFFER_MEMORY_BARRIER"),
+ Self::IMAGE_MEMORY_BARRIER => Some("IMAGE_MEMORY_BARRIER"),
+ Self::MEMORY_BARRIER => Some("MEMORY_BARRIER"),
+ Self::LOADER_INSTANCE_CREATE_INFO => Some("LOADER_INSTANCE_CREATE_INFO"),
+ Self::LOADER_DEVICE_CREATE_INFO => Some("LOADER_DEVICE_CREATE_INFO"),
+ Self::SWAPCHAIN_CREATE_INFO_KHR => Some("SWAPCHAIN_CREATE_INFO_KHR"),
+ Self::PRESENT_INFO_KHR => Some("PRESENT_INFO_KHR"),
+ Self::DEVICE_GROUP_PRESENT_CAPABILITIES_KHR => {
+ Some("DEVICE_GROUP_PRESENT_CAPABILITIES_KHR")
+ }
+ Self::IMAGE_SWAPCHAIN_CREATE_INFO_KHR => Some("IMAGE_SWAPCHAIN_CREATE_INFO_KHR"),
+ Self::BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR => {
+ Some("BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR")
+ }
+ Self::ACQUIRE_NEXT_IMAGE_INFO_KHR => Some("ACQUIRE_NEXT_IMAGE_INFO_KHR"),
+ Self::DEVICE_GROUP_PRESENT_INFO_KHR => Some("DEVICE_GROUP_PRESENT_INFO_KHR"),
+ Self::DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR => {
+ Some("DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR")
+ }
+ Self::DISPLAY_MODE_CREATE_INFO_KHR => Some("DISPLAY_MODE_CREATE_INFO_KHR"),
+ Self::DISPLAY_SURFACE_CREATE_INFO_KHR => Some("DISPLAY_SURFACE_CREATE_INFO_KHR"),
+ Self::DISPLAY_PRESENT_INFO_KHR => Some("DISPLAY_PRESENT_INFO_KHR"),
+ Self::XLIB_SURFACE_CREATE_INFO_KHR => Some("XLIB_SURFACE_CREATE_INFO_KHR"),
+ Self::XCB_SURFACE_CREATE_INFO_KHR => Some("XCB_SURFACE_CREATE_INFO_KHR"),
+ Self::WAYLAND_SURFACE_CREATE_INFO_KHR => Some("WAYLAND_SURFACE_CREATE_INFO_KHR"),
+ Self::ANDROID_SURFACE_CREATE_INFO_KHR => Some("ANDROID_SURFACE_CREATE_INFO_KHR"),
+ Self::WIN32_SURFACE_CREATE_INFO_KHR => Some("WIN32_SURFACE_CREATE_INFO_KHR"),
+ Self::NATIVE_BUFFER_ANDROID => Some("NATIVE_BUFFER_ANDROID"),
+ Self::SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID => {
+ Some("SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID")
+ }
+ Self::PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID => {
+ Some("PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID")
+ }
+ Self::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT => {
+ Some("DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT")
+ }
+ Self::PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD => {
+ Some("PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD")
+ }
+ Self::DEBUG_MARKER_OBJECT_NAME_INFO_EXT => Some("DEBUG_MARKER_OBJECT_NAME_INFO_EXT"),
+ Self::DEBUG_MARKER_OBJECT_TAG_INFO_EXT => Some("DEBUG_MARKER_OBJECT_TAG_INFO_EXT"),
+ Self::DEBUG_MARKER_MARKER_INFO_EXT => Some("DEBUG_MARKER_MARKER_INFO_EXT"),
+ Self::VIDEO_PROFILE_INFO_KHR => Some("VIDEO_PROFILE_INFO_KHR"),
+ Self::VIDEO_CAPABILITIES_KHR => Some("VIDEO_CAPABILITIES_KHR"),
+ Self::VIDEO_PICTURE_RESOURCE_INFO_KHR => Some("VIDEO_PICTURE_RESOURCE_INFO_KHR"),
+ Self::VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR => {
+ Some("VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR")
+ }
+ Self::BIND_VIDEO_SESSION_MEMORY_INFO_KHR => Some("BIND_VIDEO_SESSION_MEMORY_INFO_KHR"),
+ Self::VIDEO_SESSION_CREATE_INFO_KHR => Some("VIDEO_SESSION_CREATE_INFO_KHR"),
+ Self::VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR => {
+ Some("VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR")
+ }
+ Self::VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR => {
+ Some("VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR")
+ }
+ Self::VIDEO_BEGIN_CODING_INFO_KHR => Some("VIDEO_BEGIN_CODING_INFO_KHR"),
+ Self::VIDEO_END_CODING_INFO_KHR => Some("VIDEO_END_CODING_INFO_KHR"),
+ Self::VIDEO_CODING_CONTROL_INFO_KHR => Some("VIDEO_CODING_CONTROL_INFO_KHR"),
+ Self::VIDEO_REFERENCE_SLOT_INFO_KHR => Some("VIDEO_REFERENCE_SLOT_INFO_KHR"),
+ Self::QUEUE_FAMILY_VIDEO_PROPERTIES_KHR => Some("QUEUE_FAMILY_VIDEO_PROPERTIES_KHR"),
+ Self::VIDEO_PROFILE_LIST_INFO_KHR => Some("VIDEO_PROFILE_LIST_INFO_KHR"),
+ Self::PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR => {
+ Some("PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR")
+ }
+ Self::VIDEO_FORMAT_PROPERTIES_KHR => Some("VIDEO_FORMAT_PROPERTIES_KHR"),
+ Self::QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR => {
+ Some("QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR")
+ }
+ Self::VIDEO_DECODE_INFO_KHR => Some("VIDEO_DECODE_INFO_KHR"),
+ Self::VIDEO_DECODE_CAPABILITIES_KHR => Some("VIDEO_DECODE_CAPABILITIES_KHR"),
+ Self::VIDEO_DECODE_USAGE_INFO_KHR => Some("VIDEO_DECODE_USAGE_INFO_KHR"),
+ Self::DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV => {
+ Some("DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV")
+ }
+ Self::DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV => {
+ Some("DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV")
+ }
+ Self::DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV => {
+ Some("DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT")
+ }
+ Self::PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT => {
+ Some("PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT")
+ }
+ Self::CU_MODULE_CREATE_INFO_NVX => Some("CU_MODULE_CREATE_INFO_NVX"),
+ Self::CU_FUNCTION_CREATE_INFO_NVX => Some("CU_FUNCTION_CREATE_INFO_NVX"),
+ Self::CU_LAUNCH_INFO_NVX => Some("CU_LAUNCH_INFO_NVX"),
+ Self::IMAGE_VIEW_HANDLE_INFO_NVX => Some("IMAGE_VIEW_HANDLE_INFO_NVX"),
+ Self::IMAGE_VIEW_ADDRESS_PROPERTIES_NVX => Some("IMAGE_VIEW_ADDRESS_PROPERTIES_NVX"),
+ Self::VIDEO_ENCODE_H264_CAPABILITIES_EXT => Some("VIDEO_ENCODE_H264_CAPABILITIES_EXT"),
+ Self::VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_PROFILE_INFO_EXT => Some("VIDEO_ENCODE_H264_PROFILE_INFO_EXT"),
+ Self::VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT => {
+ Some("VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_CAPABILITIES_EXT => Some("VIDEO_ENCODE_H265_CAPABILITIES_EXT"),
+ Self::VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_PROFILE_INFO_EXT => Some("VIDEO_ENCODE_H265_PROFILE_INFO_EXT"),
+ Self::VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT")
+ }
+ Self::VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT => {
+ Some("VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT")
+ }
+ Self::VIDEO_DECODE_H264_CAPABILITIES_KHR => Some("VIDEO_DECODE_H264_CAPABILITIES_KHR"),
+ Self::VIDEO_DECODE_H264_PICTURE_INFO_KHR => Some("VIDEO_DECODE_H264_PICTURE_INFO_KHR"),
+ Self::VIDEO_DECODE_H264_PROFILE_INFO_KHR => Some("VIDEO_DECODE_H264_PROFILE_INFO_KHR"),
+ Self::VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR => {
+ Some("VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR")
+ }
+ Self::VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR => {
+ Some("VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR")
+ }
+ Self::VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR => {
+ Some("VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR")
+ }
+ Self::TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD => {
+ Some("TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD")
+ }
+ Self::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR => {
+ Some("RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR")
+ }
+ Self::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT => {
+ Some("RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT")
+ }
+ Self::ATTACHMENT_SAMPLE_COUNT_INFO_AMD => Some("ATTACHMENT_SAMPLE_COUNT_INFO_AMD"),
+ Self::MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX => {
+ Some("MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX")
+ }
+ Self::STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP => {
+ Some("STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP")
+ }
+ Self::PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV")
+ }
+ Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV => {
+ Some("EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV")
+ }
+ Self::EXPORT_MEMORY_ALLOCATE_INFO_NV => Some("EXPORT_MEMORY_ALLOCATE_INFO_NV"),
+ Self::IMPORT_MEMORY_WIN32_HANDLE_INFO_NV => Some("IMPORT_MEMORY_WIN32_HANDLE_INFO_NV"),
+ Self::EXPORT_MEMORY_WIN32_HANDLE_INFO_NV => Some("EXPORT_MEMORY_WIN32_HANDLE_INFO_NV"),
+ Self::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV => {
+ Some("WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV")
+ }
+ Self::VALIDATION_FLAGS_EXT => Some("VALIDATION_FLAGS_EXT"),
+ Self::VI_SURFACE_CREATE_INFO_NN => Some("VI_SURFACE_CREATE_INFO_NN"),
+ Self::IMAGE_VIEW_ASTC_DECODE_MODE_EXT => Some("IMAGE_VIEW_ASTC_DECODE_MODE_EXT"),
+ Self::PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT")
+ }
+ Self::PIPELINE_ROBUSTNESS_CREATE_INFO_EXT => {
+ Some("PIPELINE_ROBUSTNESS_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT")
+ }
+ Self::IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR => {
+ Some("IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR")
+ }
+ Self::EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR => {
+ Some("EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR")
+ }
+ Self::MEMORY_WIN32_HANDLE_PROPERTIES_KHR => Some("MEMORY_WIN32_HANDLE_PROPERTIES_KHR"),
+ Self::MEMORY_GET_WIN32_HANDLE_INFO_KHR => Some("MEMORY_GET_WIN32_HANDLE_INFO_KHR"),
+ Self::IMPORT_MEMORY_FD_INFO_KHR => Some("IMPORT_MEMORY_FD_INFO_KHR"),
+ Self::MEMORY_FD_PROPERTIES_KHR => Some("MEMORY_FD_PROPERTIES_KHR"),
+ Self::MEMORY_GET_FD_INFO_KHR => Some("MEMORY_GET_FD_INFO_KHR"),
+ Self::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR => {
+ Some("WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR")
+ }
+ Self::IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR => {
+ Some("IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR")
+ }
+ Self::EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR => {
+ Some("EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR")
+ }
+ Self::D3D12_FENCE_SUBMIT_INFO_KHR => Some("D3D12_FENCE_SUBMIT_INFO_KHR"),
+ Self::SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR => {
+ Some("SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR")
+ }
+ Self::IMPORT_SEMAPHORE_FD_INFO_KHR => Some("IMPORT_SEMAPHORE_FD_INFO_KHR"),
+ Self::SEMAPHORE_GET_FD_INFO_KHR => Some("SEMAPHORE_GET_FD_INFO_KHR"),
+ Self::PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR")
+ }
+ Self::COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT => {
+ Some("COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT")
+ }
+ Self::CONDITIONAL_RENDERING_BEGIN_INFO_EXT => {
+ Some("CONDITIONAL_RENDERING_BEGIN_INFO_EXT")
+ }
+ Self::PRESENT_REGIONS_KHR => Some("PRESENT_REGIONS_KHR"),
+ Self::PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV")
+ }
+ Self::SURFACE_CAPABILITIES_2_EXT => Some("SURFACE_CAPABILITIES_2_EXT"),
+ Self::DISPLAY_POWER_INFO_EXT => Some("DISPLAY_POWER_INFO_EXT"),
+ Self::DEVICE_EVENT_INFO_EXT => Some("DEVICE_EVENT_INFO_EXT"),
+ Self::DISPLAY_EVENT_INFO_EXT => Some("DISPLAY_EVENT_INFO_EXT"),
+ Self::SWAPCHAIN_COUNTER_CREATE_INFO_EXT => Some("SWAPCHAIN_COUNTER_CREATE_INFO_EXT"),
+ Self::PRESENT_TIMES_INFO_GOOGLE => Some("PRESENT_TIMES_INFO_GOOGLE"),
+ Self::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX => {
+ Some("PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX")
+ }
+ Self::PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT")
+ }
+ Self::PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT")
+ }
+ Self::PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT")
+ }
+ Self::PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT")
+ }
+ Self::HDR_METADATA_EXT => Some("HDR_METADATA_EXT"),
+ Self::SHARED_PRESENT_SURFACE_CAPABILITIES_KHR => {
+ Some("SHARED_PRESENT_SURFACE_CAPABILITIES_KHR")
+ }
+ Self::IMPORT_FENCE_WIN32_HANDLE_INFO_KHR => Some("IMPORT_FENCE_WIN32_HANDLE_INFO_KHR"),
+ Self::EXPORT_FENCE_WIN32_HANDLE_INFO_KHR => Some("EXPORT_FENCE_WIN32_HANDLE_INFO_KHR"),
+ Self::FENCE_GET_WIN32_HANDLE_INFO_KHR => Some("FENCE_GET_WIN32_HANDLE_INFO_KHR"),
+ Self::IMPORT_FENCE_FD_INFO_KHR => Some("IMPORT_FENCE_FD_INFO_KHR"),
+ Self::FENCE_GET_FD_INFO_KHR => Some("FENCE_GET_FD_INFO_KHR"),
+ Self::PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR")
+ }
+ Self::QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR => {
+ Some("QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR")
+ }
+ Self::PERFORMANCE_QUERY_SUBMIT_INFO_KHR => Some("PERFORMANCE_QUERY_SUBMIT_INFO_KHR"),
+ Self::ACQUIRE_PROFILING_LOCK_INFO_KHR => Some("ACQUIRE_PROFILING_LOCK_INFO_KHR"),
+ Self::PERFORMANCE_COUNTER_KHR => Some("PERFORMANCE_COUNTER_KHR"),
+ Self::PERFORMANCE_COUNTER_DESCRIPTION_KHR => {
+ Some("PERFORMANCE_COUNTER_DESCRIPTION_KHR")
+ }
+ Self::PHYSICAL_DEVICE_SURFACE_INFO_2_KHR => Some("PHYSICAL_DEVICE_SURFACE_INFO_2_KHR"),
+ Self::SURFACE_CAPABILITIES_2_KHR => Some("SURFACE_CAPABILITIES_2_KHR"),
+ Self::SURFACE_FORMAT_2_KHR => Some("SURFACE_FORMAT_2_KHR"),
+ Self::DISPLAY_PROPERTIES_2_KHR => Some("DISPLAY_PROPERTIES_2_KHR"),
+ Self::DISPLAY_PLANE_PROPERTIES_2_KHR => Some("DISPLAY_PLANE_PROPERTIES_2_KHR"),
+ Self::DISPLAY_MODE_PROPERTIES_2_KHR => Some("DISPLAY_MODE_PROPERTIES_2_KHR"),
+ Self::DISPLAY_PLANE_INFO_2_KHR => Some("DISPLAY_PLANE_INFO_2_KHR"),
+ Self::DISPLAY_PLANE_CAPABILITIES_2_KHR => Some("DISPLAY_PLANE_CAPABILITIES_2_KHR"),
+ Self::IOS_SURFACE_CREATE_INFO_MVK => Some("IOS_SURFACE_CREATE_INFO_MVK"),
+ Self::MACOS_SURFACE_CREATE_INFO_MVK => Some("MACOS_SURFACE_CREATE_INFO_MVK"),
+ Self::DEBUG_UTILS_OBJECT_NAME_INFO_EXT => Some("DEBUG_UTILS_OBJECT_NAME_INFO_EXT"),
+ Self::DEBUG_UTILS_OBJECT_TAG_INFO_EXT => Some("DEBUG_UTILS_OBJECT_TAG_INFO_EXT"),
+ Self::DEBUG_UTILS_LABEL_EXT => Some("DEBUG_UTILS_LABEL_EXT"),
+ Self::DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT => {
+ Some("DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT")
+ }
+ Self::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT => {
+ Some("DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT")
+ }
+ Self::ANDROID_HARDWARE_BUFFER_USAGE_ANDROID => {
+ Some("ANDROID_HARDWARE_BUFFER_USAGE_ANDROID")
+ }
+ Self::ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID => {
+ Some("ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID")
+ }
+ Self::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID => {
+ Some("ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID")
+ }
+ Self::IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID => {
+ Some("IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID")
+ }
+ Self::MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID => {
+ Some("MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID")
+ }
+ Self::EXTERNAL_FORMAT_ANDROID => Some("EXTERNAL_FORMAT_ANDROID"),
+ Self::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID => {
+ Some("ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID")
+ }
+ Self::SAMPLE_LOCATIONS_INFO_EXT => Some("SAMPLE_LOCATIONS_INFO_EXT"),
+ Self::RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT => {
+ Some("RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT")
+ }
+ Self::PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT")
+ }
+ Self::MULTISAMPLE_PROPERTIES_EXT => Some("MULTISAMPLE_PROPERTIES_EXT"),
+ Self::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT")
+ }
+ Self::PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT")
+ }
+ Self::PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV")
+ }
+ Self::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR => {
+ Some("WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR => {
+ Some("ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR => {
+ Some("ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR => {
+ Some("ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR => {
+ Some("ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR => {
+ Some("ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_GEOMETRY_KHR => {
+ Some("ACCELERATION_STRUCTURE_GEOMETRY_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_VERSION_INFO_KHR => {
+ Some("ACCELERATION_STRUCTURE_VERSION_INFO_KHR")
+ }
+ Self::COPY_ACCELERATION_STRUCTURE_INFO_KHR => {
+ Some("COPY_ACCELERATION_STRUCTURE_INFO_KHR")
+ }
+ Self::COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR => {
+ Some("COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR")
+ }
+ Self::COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR => {
+ Some("COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR")
+ }
+ Self::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_CREATE_INFO_KHR => {
+ Some("ACCELERATION_STRUCTURE_CREATE_INFO_KHR")
+ }
+ Self::ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR => {
+ Some("ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR")
+ }
+ Self::RAY_TRACING_PIPELINE_CREATE_INFO_KHR => {
+ Some("RAY_TRACING_PIPELINE_CREATE_INFO_KHR")
+ }
+ Self::RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR => {
+ Some("RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR")
+ }
+ Self::RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR => {
+ Some("RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR")
+ }
+ Self::PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR")
+ }
+ Self::PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV")
+ }
+ Self::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT => {
+ Some("DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT => {
+ Some("PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT")
+ }
+ Self::IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT => {
+ Some("IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT")
+ }
+ Self::IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT => {
+ Some("IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT")
+ }
+ Self::IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT => {
+ Some("IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT")
+ }
+ Self::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT => {
+ Some("DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT")
+ }
+ Self::VALIDATION_CACHE_CREATE_INFO_EXT => Some("VALIDATION_CACHE_CREATE_INFO_EXT"),
+ Self::SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT => {
+ Some("SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR")
+ }
+ Self::PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV")
+ }
+ Self::PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV")
+ }
+ Self::RAY_TRACING_PIPELINE_CREATE_INFO_NV => {
+ Some("RAY_TRACING_PIPELINE_CREATE_INFO_NV")
+ }
+ Self::ACCELERATION_STRUCTURE_CREATE_INFO_NV => {
+ Some("ACCELERATION_STRUCTURE_CREATE_INFO_NV")
+ }
+ Self::GEOMETRY_NV => Some("GEOMETRY_NV"),
+ Self::GEOMETRY_TRIANGLES_NV => Some("GEOMETRY_TRIANGLES_NV"),
+ Self::GEOMETRY_AABB_NV => Some("GEOMETRY_AABB_NV"),
+ Self::BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV => {
+ Some("BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV")
+ }
+ Self::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV => {
+ Some("WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV")
+ }
+ Self::ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV => {
+ Some("ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV")
+ }
+ Self::RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV => {
+ Some("RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV")
+ }
+ Self::ACCELERATION_STRUCTURE_INFO_NV => Some("ACCELERATION_STRUCTURE_INFO_NV"),
+ Self::PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV")
+ }
+ Self::PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT => {
+ Some("PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT")
+ }
+ Self::FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT => {
+ Some("FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT")
+ }
+ Self::IMPORT_MEMORY_HOST_POINTER_INFO_EXT => {
+ Some("IMPORT_MEMORY_HOST_POINTER_INFO_EXT")
+ }
+ Self::MEMORY_HOST_POINTER_PROPERTIES_EXT => Some("MEMORY_HOST_POINTER_PROPERTIES_EXT"),
+ Self::PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR")
+ }
+ Self::PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD => {
+ Some("PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD")
+ }
+ Self::CALIBRATED_TIMESTAMP_INFO_EXT => Some("CALIBRATED_TIMESTAMP_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD => {
+ Some("PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD")
+ }
+ Self::VIDEO_DECODE_H265_CAPABILITIES_KHR => Some("VIDEO_DECODE_H265_CAPABILITIES_KHR"),
+ Self::VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR => {
+ Some("VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR")
+ }
+ Self::VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR => {
+ Some("VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR")
+ }
+ Self::VIDEO_DECODE_H265_PROFILE_INFO_KHR => Some("VIDEO_DECODE_H265_PROFILE_INFO_KHR"),
+ Self::VIDEO_DECODE_H265_PICTURE_INFO_KHR => Some("VIDEO_DECODE_H265_PICTURE_INFO_KHR"),
+ Self::VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR => {
+ Some("VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR")
+ }
+ Self::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR => {
+ Some("DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR")
+ }
+ Self::PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR")
+ }
+ Self::QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR => {
+ Some("QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR")
+ }
+ Self::DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD => {
+ Some("DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD")
+ }
+ Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT")
+ }
+ Self::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT")
+ }
+ Self::PRESENT_FRAME_TOKEN_GGP => Some("PRESENT_FRAME_TOKEN_GGP"),
+ Self::PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV")
+ }
+ Self::PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV")
+ }
+ Self::CHECKPOINT_DATA_NV => Some("CHECKPOINT_DATA_NV"),
+ Self::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV => {
+ Some("QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL => {
+ Some("PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL")
+ }
+ Self::QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL => {
+ Some("QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL")
+ }
+ Self::INITIALIZE_PERFORMANCE_API_INFO_INTEL => {
+ Some("INITIALIZE_PERFORMANCE_API_INFO_INTEL")
+ }
+ Self::PERFORMANCE_MARKER_INFO_INTEL => Some("PERFORMANCE_MARKER_INFO_INTEL"),
+ Self::PERFORMANCE_STREAM_MARKER_INFO_INTEL => {
+ Some("PERFORMANCE_STREAM_MARKER_INFO_INTEL")
+ }
+ Self::PERFORMANCE_OVERRIDE_INFO_INTEL => Some("PERFORMANCE_OVERRIDE_INFO_INTEL"),
+ Self::PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL => {
+ Some("PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL")
+ }
+ Self::PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT")
+ }
+ Self::DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD => {
+ Some("DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD")
+ }
+ Self::SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD => {
+ Some("SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD")
+ }
+ Self::IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA => {
+ Some("IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA")
+ }
+ Self::METAL_SURFACE_CREATE_INFO_EXT => Some("METAL_SURFACE_CREATE_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT")
+ }
+ Self::RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT => {
+ Some("RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT")
+ }
+ Self::FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR => {
+ Some("FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR")
+ }
+ Self::PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR => {
+ Some("PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD => {
+ Some("PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD")
+ }
+ Self::PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD => {
+ Some("PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT")
+ }
+ Self::MEMORY_PRIORITY_ALLOCATE_INFO_EXT => Some("MEMORY_PRIORITY_ALLOCATE_INFO_EXT"),
+ Self::SURFACE_PROTECTED_CAPABILITIES_KHR => Some("SURFACE_PROTECTED_CAPABILITIES_KHR"),
+ Self::PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT")
+ }
+ Self::BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT => {
+ Some("BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT")
+ }
+ Self::VALIDATION_FEATURES_EXT => Some("VALIDATION_FEATURES_EXT"),
+ Self::PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV")
+ }
+ Self::COOPERATIVE_MATRIX_PROPERTIES_NV => Some("COOPERATIVE_MATRIX_PROPERTIES_NV"),
+ Self::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV")
+ }
+ Self::PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV")
+ }
+ Self::FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV => {
+ Some("FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT")
+ }
+ Self::PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT")
+ }
+ Self::SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT => {
+ Some("SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT")
+ }
+ Self::SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT => {
+ Some("SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT")
+ }
+ Self::SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT => {
+ Some("SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT")
+ }
+ Self::HEADLESS_SURFACE_CREATE_INFO_EXT => Some("HEADLESS_SURFACE_CREATE_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT")
+ }
+ Self::PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT => {
+ Some("PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR")
+ }
+ Self::PIPELINE_INFO_KHR => Some("PIPELINE_INFO_KHR"),
+ Self::PIPELINE_EXECUTABLE_PROPERTIES_KHR => Some("PIPELINE_EXECUTABLE_PROPERTIES_KHR"),
+ Self::PIPELINE_EXECUTABLE_INFO_KHR => Some("PIPELINE_EXECUTABLE_INFO_KHR"),
+ Self::PIPELINE_EXECUTABLE_STATISTIC_KHR => Some("PIPELINE_EXECUTABLE_STATISTIC_KHR"),
+ Self::PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR => {
+ Some("PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT")
+ }
+ Self::SURFACE_PRESENT_MODE_EXT => Some("SURFACE_PRESENT_MODE_EXT"),
+ Self::SURFACE_PRESENT_SCALING_CAPABILITIES_EXT => {
+ Some("SURFACE_PRESENT_SCALING_CAPABILITIES_EXT")
+ }
+ Self::SURFACE_PRESENT_MODE_COMPATIBILITY_EXT => {
+ Some("SURFACE_PRESENT_MODE_COMPATIBILITY_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT")
+ }
+ Self::SWAPCHAIN_PRESENT_FENCE_INFO_EXT => Some("SWAPCHAIN_PRESENT_FENCE_INFO_EXT"),
+ Self::SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT => {
+ Some("SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT")
+ }
+ Self::SWAPCHAIN_PRESENT_MODE_INFO_EXT => Some("SWAPCHAIN_PRESENT_MODE_INFO_EXT"),
+ Self::SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT => {
+ Some("SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT")
+ }
+ Self::RELEASE_SWAPCHAIN_IMAGES_INFO_EXT => Some("RELEASE_SWAPCHAIN_IMAGES_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV")
+ }
+ Self::GRAPHICS_SHADER_GROUP_CREATE_INFO_NV => {
+ Some("GRAPHICS_SHADER_GROUP_CREATE_INFO_NV")
+ }
+ Self::GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV => {
+ Some("GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV")
+ }
+ Self::INDIRECT_COMMANDS_LAYOUT_TOKEN_NV => Some("INDIRECT_COMMANDS_LAYOUT_TOKEN_NV"),
+ Self::INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV => {
+ Some("INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV")
+ }
+ Self::GENERATED_COMMANDS_INFO_NV => Some("GENERATED_COMMANDS_INFO_NV"),
+ Self::GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV => {
+ Some("GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV")
+ }
+ Self::COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV => {
+ Some("COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT")
+ }
+ Self::COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM => {
+ Some("COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM")
+ }
+ Self::RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM => {
+ Some("RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM")
+ }
+ Self::PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT")
+ }
+ Self::DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT => {
+ Some("DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT")
+ }
+ Self::DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT => {
+ Some("DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT")
+ }
+ Self::PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT")
+ }
+ Self::SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT => {
+ Some("SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT")
+ }
+ Self::PIPELINE_LIBRARY_CREATE_INFO_KHR => Some("PIPELINE_LIBRARY_CREATE_INFO_KHR"),
+ Self::PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV")
+ }
+ Self::SURFACE_CAPABILITIES_PRESENT_BARRIER_NV => {
+ Some("SURFACE_CAPABILITIES_PRESENT_BARRIER_NV")
+ }
+ Self::SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV => {
+ Some("SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV")
+ }
+ Self::PRESENT_ID_KHR => Some("PRESENT_ID_KHR"),
+ Self::PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR")
+ }
+ Self::VIDEO_ENCODE_INFO_KHR => Some("VIDEO_ENCODE_INFO_KHR"),
+ Self::VIDEO_ENCODE_RATE_CONTROL_INFO_KHR => Some("VIDEO_ENCODE_RATE_CONTROL_INFO_KHR"),
+ Self::VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR => {
+ Some("VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR")
+ }
+ Self::VIDEO_ENCODE_CAPABILITIES_KHR => Some("VIDEO_ENCODE_CAPABILITIES_KHR"),
+ Self::VIDEO_ENCODE_USAGE_INFO_KHR => Some("VIDEO_ENCODE_USAGE_INFO_KHR"),
+ Self::PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV")
+ }
+ Self::DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV => {
+ Some("DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV")
+ }
+ Self::RESERVED_QCOM => Some("RESERVED_QCOM"),
+ Self::EXPORT_METAL_OBJECT_CREATE_INFO_EXT => {
+ Some("EXPORT_METAL_OBJECT_CREATE_INFO_EXT")
+ }
+ Self::EXPORT_METAL_OBJECTS_INFO_EXT => Some("EXPORT_METAL_OBJECTS_INFO_EXT"),
+ Self::EXPORT_METAL_DEVICE_INFO_EXT => Some("EXPORT_METAL_DEVICE_INFO_EXT"),
+ Self::EXPORT_METAL_COMMAND_QUEUE_INFO_EXT => {
+ Some("EXPORT_METAL_COMMAND_QUEUE_INFO_EXT")
+ }
+ Self::EXPORT_METAL_BUFFER_INFO_EXT => Some("EXPORT_METAL_BUFFER_INFO_EXT"),
+ Self::IMPORT_METAL_BUFFER_INFO_EXT => Some("IMPORT_METAL_BUFFER_INFO_EXT"),
+ Self::EXPORT_METAL_TEXTURE_INFO_EXT => Some("EXPORT_METAL_TEXTURE_INFO_EXT"),
+ Self::IMPORT_METAL_TEXTURE_INFO_EXT => Some("IMPORT_METAL_TEXTURE_INFO_EXT"),
+ Self::EXPORT_METAL_IO_SURFACE_INFO_EXT => Some("EXPORT_METAL_IO_SURFACE_INFO_EXT"),
+ Self::IMPORT_METAL_IO_SURFACE_INFO_EXT => Some("IMPORT_METAL_IO_SURFACE_INFO_EXT"),
+ Self::EXPORT_METAL_SHARED_EVENT_INFO_EXT => Some("EXPORT_METAL_SHARED_EVENT_INFO_EXT"),
+ Self::IMPORT_METAL_SHARED_EVENT_INFO_EXT => Some("IMPORT_METAL_SHARED_EVENT_INFO_EXT"),
+ Self::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV => {
+ Some("QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV")
+ }
+ Self::CHECKPOINT_DATA_2_NV => Some("CHECKPOINT_DATA_2_NV"),
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT")
+ }
+ Self::DESCRIPTOR_ADDRESS_INFO_EXT => Some("DESCRIPTOR_ADDRESS_INFO_EXT"),
+ Self::DESCRIPTOR_GET_INFO_EXT => Some("DESCRIPTOR_GET_INFO_EXT"),
+ Self::BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => {
+ Some("BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT")
+ }
+ Self::IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => {
+ Some("IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT")
+ }
+ Self::IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => {
+ Some("IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT")
+ }
+ Self::SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => {
+ Some("SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT")
+ }
+ Self::OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT => {
+ Some("OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT")
+ }
+ Self::DESCRIPTOR_BUFFER_BINDING_INFO_EXT => Some("DESCRIPTOR_BUFFER_BINDING_INFO_EXT"),
+ Self::DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT => {
+ Some("DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT")
+ }
+ Self::ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT => {
+ Some("ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT")
+ }
+ Self::GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT => {
+ Some("GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD => {
+ Some("PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV")
+ }
+ Self::PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV => {
+ Some("PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV")
+ }
+ Self::ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV => {
+ Some("ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV")
+ }
+ Self::ACCELERATION_STRUCTURE_MOTION_INFO_NV => {
+ Some("ACCELERATION_STRUCTURE_MOTION_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT")
+ }
+ Self::COPY_COMMAND_TRANSFORM_INFO_QCOM => Some("COPY_COMMAND_TRANSFORM_INFO_QCOM"),
+ Self::PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT")
+ }
+ Self::IMAGE_COMPRESSION_CONTROL_EXT => Some("IMAGE_COMPRESSION_CONTROL_EXT"),
+ Self::SUBRESOURCE_LAYOUT_2_EXT => Some("SUBRESOURCE_LAYOUT_2_EXT"),
+ Self::IMAGE_SUBRESOURCE_2_EXT => Some("IMAGE_SUBRESOURCE_2_EXT"),
+ Self::IMAGE_COMPRESSION_PROPERTIES_EXT => Some("IMAGE_COMPRESSION_PROPERTIES_EXT"),
+ Self::PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_FAULT_FEATURES_EXT => Some("PHYSICAL_DEVICE_FAULT_FEATURES_EXT"),
+ Self::DEVICE_FAULT_COUNTS_EXT => Some("DEVICE_FAULT_COUNTS_EXT"),
+ Self::DEVICE_FAULT_INFO_EXT => Some("DEVICE_FAULT_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT")
+ }
+ Self::DIRECTFB_SURFACE_CREATE_INFO_EXT => Some("DIRECTFB_SURFACE_CREATE_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT")
+ }
+ Self::VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT => {
+ Some("VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT")
+ }
+ Self::VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT => {
+ Some("VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT")
+ }
+ Self::PHYSICAL_DEVICE_DRM_PROPERTIES_EXT => Some("PHYSICAL_DEVICE_DRM_PROPERTIES_EXT"),
+ Self::PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT")
+ }
+ Self::DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT => {
+ Some("DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT")
+ }
+ Self::PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT")
+ }
+ Self::PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT => {
+ Some("PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT")
+ }
+ Self::IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA => {
+ Some("IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA")
+ }
+ Self::MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA => {
+ Some("MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA")
+ }
+ Self::MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA => {
+ Some("MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA")
+ }
+ Self::IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA => {
+ Some("IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA")
+ }
+ Self::SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA => {
+ Some("SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA")
+ }
+ Self::BUFFER_COLLECTION_CREATE_INFO_FUCHSIA => {
+ Some("BUFFER_COLLECTION_CREATE_INFO_FUCHSIA")
+ }
+ Self::IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA => {
+ Some("IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA")
+ }
+ Self::BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA => {
+ Some("BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA")
+ }
+ Self::BUFFER_COLLECTION_PROPERTIES_FUCHSIA => {
+ Some("BUFFER_COLLECTION_PROPERTIES_FUCHSIA")
+ }
+ Self::BUFFER_CONSTRAINTS_INFO_FUCHSIA => Some("BUFFER_CONSTRAINTS_INFO_FUCHSIA"),
+ Self::BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA => {
+ Some("BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA")
+ }
+ Self::IMAGE_CONSTRAINTS_INFO_FUCHSIA => Some("IMAGE_CONSTRAINTS_INFO_FUCHSIA"),
+ Self::IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA => {
+ Some("IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA")
+ }
+ Self::SYSMEM_COLOR_SPACE_FUCHSIA => Some("SYSMEM_COLOR_SPACE_FUCHSIA"),
+ Self::BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA => {
+ Some("BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA")
+ }
+ Self::SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI => {
+ Some("SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI")
+ }
+ Self::PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI => {
+ Some("PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI")
+ }
+ Self::PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI => {
+ Some("PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI")
+ }
+ Self::PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI => {
+ Some("PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI")
+ }
+ Self::MEMORY_GET_REMOTE_ADDRESS_INFO_NV => Some("MEMORY_GET_REMOTE_ADDRESS_INFO_NV"),
+ Self::PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV")
+ }
+ Self::PIPELINE_PROPERTIES_IDENTIFIER_EXT => Some("PIPELINE_PROPERTIES_IDENTIFIER_EXT"),
+ Self::PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT")
+ }
+ Self::SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT => {
+ Some("SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT")
+ }
+ Self::MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT => {
+ Some("MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT")
+ }
+ Self::SCREEN_SURFACE_CREATE_INFO_QNX => Some("SCREEN_SURFACE_CREATE_INFO_QNX"),
+ Self::PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT")
+ }
+ Self::PIPELINE_COLOR_WRITE_CREATE_INFO_EXT => {
+ Some("PIPELINE_COLOR_WRITE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT")
+ }
+ Self::IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT => Some("IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT")
+ }
+ Self::MICROMAP_BUILD_INFO_EXT => Some("MICROMAP_BUILD_INFO_EXT"),
+ Self::MICROMAP_VERSION_INFO_EXT => Some("MICROMAP_VERSION_INFO_EXT"),
+ Self::COPY_MICROMAP_INFO_EXT => Some("COPY_MICROMAP_INFO_EXT"),
+ Self::COPY_MICROMAP_TO_MEMORY_INFO_EXT => Some("COPY_MICROMAP_TO_MEMORY_INFO_EXT"),
+ Self::COPY_MEMORY_TO_MICROMAP_INFO_EXT => Some("COPY_MEMORY_TO_MICROMAP_INFO_EXT"),
+ Self::PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT")
+ }
+ Self::MICROMAP_CREATE_INFO_EXT => Some("MICROMAP_CREATE_INFO_EXT"),
+ Self::MICROMAP_BUILD_SIZES_INFO_EXT => Some("MICROMAP_BUILD_SIZES_INFO_EXT"),
+ Self::ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT => {
+ Some("ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT")
+ }
+ Self::PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT")
+ }
+ Self::SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT => {
+ Some("SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE => {
+ Some("PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE")
+ }
+ Self::DESCRIPTOR_SET_BINDING_REFERENCE_VALVE => {
+ Some("DESCRIPTOR_SET_BINDING_REFERENCE_VALVE")
+ }
+ Self::DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE => {
+ Some("DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE")
+ }
+ Self::PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM")
+ }
+ Self::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM => {
+ Some("PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM")
+ }
+ Self::SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM => {
+ Some("SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM")
+ }
+ Self::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM => {
+ Some("PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM => {
+ Some("PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM")
+ }
+ Self::IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM => {
+ Some("IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM")
+ }
+ Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT")
+ }
+ Self::RENDER_PASS_CREATION_CONTROL_EXT => Some("RENDER_PASS_CREATION_CONTROL_EXT"),
+ Self::RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT => {
+ Some("RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT")
+ }
+ Self::RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT => {
+ Some("RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT")
+ }
+ Self::DIRECT_DRIVER_LOADING_INFO_LUNARG => Some("DIRECT_DRIVER_LOADING_INFO_LUNARG"),
+ Self::DIRECT_DRIVER_LOADING_LIST_LUNARG => Some("DIRECT_DRIVER_LOADING_LIST_LUNARG"),
+ Self::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT => {
+ Some("PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT")
+ }
+ Self::PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT => {
+ Some("PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT")
+ }
+ Self::SHADER_MODULE_IDENTIFIER_EXT => Some("SHADER_MODULE_IDENTIFIER_EXT"),
+ Self::PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV")
+ }
+ Self::OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV => Some("OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV"),
+ Self::OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV => {
+ Some("OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV")
+ }
+ Self::OPTICAL_FLOW_SESSION_CREATE_INFO_NV => {
+ Some("OPTICAL_FLOW_SESSION_CREATE_INFO_NV")
+ }
+ Self::OPTICAL_FLOW_EXECUTE_INFO_NV => Some("OPTICAL_FLOW_EXECUTE_INFO_NV"),
+ Self::OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV => {
+ Some("OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV")
+ }
+ Self::PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT")
+ }
+ Self::PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM => {
+ Some("PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM")
+ }
+ Self::TILE_PROPERTIES_QCOM => Some("TILE_PROPERTIES_QCOM"),
+ Self::PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC => {
+ Some("PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC")
+ }
+ Self::AMIGO_PROFILING_SUBMIT_INFO_SEC => Some("AMIGO_PROFILING_SUBMIT_INFO_SEC"),
+ Self::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM => {
+ Some("PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV")
+ }
+ Self::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV => {
+ Some("PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV")
+ }
+ Self::PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT => {
+ Some("PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT")
+ }
+ Self::MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT => {
+ Some("MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM => {
+ Some("PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM => {
+ Some("PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM")
+ }
+ Self::PHYSICAL_DEVICE_SUBGROUP_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_SUBGROUP_PROPERTIES")
+ }
+ Self::BIND_BUFFER_MEMORY_INFO => Some("BIND_BUFFER_MEMORY_INFO"),
+ Self::BIND_IMAGE_MEMORY_INFO => Some("BIND_IMAGE_MEMORY_INFO"),
+ Self::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES => {
+ Some("PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES")
+ }
+ Self::MEMORY_DEDICATED_REQUIREMENTS => Some("MEMORY_DEDICATED_REQUIREMENTS"),
+ Self::MEMORY_DEDICATED_ALLOCATE_INFO => Some("MEMORY_DEDICATED_ALLOCATE_INFO"),
+ Self::MEMORY_ALLOCATE_FLAGS_INFO => Some("MEMORY_ALLOCATE_FLAGS_INFO"),
+ Self::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO => {
+ Some("DEVICE_GROUP_RENDER_PASS_BEGIN_INFO")
+ }
+ Self::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO => {
+ Some("DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO")
+ }
+ Self::DEVICE_GROUP_SUBMIT_INFO => Some("DEVICE_GROUP_SUBMIT_INFO"),
+ Self::DEVICE_GROUP_BIND_SPARSE_INFO => Some("DEVICE_GROUP_BIND_SPARSE_INFO"),
+ Self::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO => {
+ Some("BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO")
+ }
+ Self::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO => {
+ Some("BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO")
+ }
+ Self::PHYSICAL_DEVICE_GROUP_PROPERTIES => Some("PHYSICAL_DEVICE_GROUP_PROPERTIES"),
+ Self::DEVICE_GROUP_DEVICE_CREATE_INFO => Some("DEVICE_GROUP_DEVICE_CREATE_INFO"),
+ Self::BUFFER_MEMORY_REQUIREMENTS_INFO_2 => Some("BUFFER_MEMORY_REQUIREMENTS_INFO_2"),
+ Self::IMAGE_MEMORY_REQUIREMENTS_INFO_2 => Some("IMAGE_MEMORY_REQUIREMENTS_INFO_2"),
+ Self::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 => {
+ Some("IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2")
+ }
+ Self::MEMORY_REQUIREMENTS_2 => Some("MEMORY_REQUIREMENTS_2"),
+ Self::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 => Some("SPARSE_IMAGE_MEMORY_REQUIREMENTS_2"),
+ Self::PHYSICAL_DEVICE_FEATURES_2 => Some("PHYSICAL_DEVICE_FEATURES_2"),
+ Self::PHYSICAL_DEVICE_PROPERTIES_2 => Some("PHYSICAL_DEVICE_PROPERTIES_2"),
+ Self::FORMAT_PROPERTIES_2 => Some("FORMAT_PROPERTIES_2"),
+ Self::IMAGE_FORMAT_PROPERTIES_2 => Some("IMAGE_FORMAT_PROPERTIES_2"),
+ Self::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 => {
+ Some("PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2")
+ }
+ Self::QUEUE_FAMILY_PROPERTIES_2 => Some("QUEUE_FAMILY_PROPERTIES_2"),
+ Self::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 => {
+ Some("PHYSICAL_DEVICE_MEMORY_PROPERTIES_2")
+ }
+ Self::SPARSE_IMAGE_FORMAT_PROPERTIES_2 => Some("SPARSE_IMAGE_FORMAT_PROPERTIES_2"),
+ Self::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 => {
+ Some("PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2")
+ }
+ Self::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES")
+ }
+ Self::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO => {
+ Some("RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO")
+ }
+ Self::IMAGE_VIEW_USAGE_CREATE_INFO => Some("IMAGE_VIEW_USAGE_CREATE_INFO"),
+ Self::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO => {
+ Some("PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO")
+ }
+ Self::RENDER_PASS_MULTIVIEW_CREATE_INFO => Some("RENDER_PASS_MULTIVIEW_CREATE_INFO"),
+ Self::PHYSICAL_DEVICE_MULTIVIEW_FEATURES => Some("PHYSICAL_DEVICE_MULTIVIEW_FEATURES"),
+ Self::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES")
+ }
+ Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES => {
+ Some("PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES")
+ }
+ Self::PROTECTED_SUBMIT_INFO => Some("PROTECTED_SUBMIT_INFO"),
+ Self::PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES => {
+ Some("PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES")
+ }
+ Self::DEVICE_QUEUE_INFO_2 => Some("DEVICE_QUEUE_INFO_2"),
+ Self::SAMPLER_YCBCR_CONVERSION_CREATE_INFO => {
+ Some("SAMPLER_YCBCR_CONVERSION_CREATE_INFO")
+ }
+ Self::SAMPLER_YCBCR_CONVERSION_INFO => Some("SAMPLER_YCBCR_CONVERSION_INFO"),
+ Self::BIND_IMAGE_PLANE_MEMORY_INFO => Some("BIND_IMAGE_PLANE_MEMORY_INFO"),
+ Self::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO => {
+ Some("IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO")
+ }
+ Self::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES => {
+ Some("PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES")
+ }
+ Self::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES => {
+ Some("SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES")
+ }
+ Self::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO => {
+ Some("DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO")
+ }
+ Self::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO => {
+ Some("PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO")
+ }
+ Self::EXTERNAL_IMAGE_FORMAT_PROPERTIES => Some("EXTERNAL_IMAGE_FORMAT_PROPERTIES"),
+ Self::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO => {
+ Some("PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO")
+ }
+ Self::EXTERNAL_BUFFER_PROPERTIES => Some("EXTERNAL_BUFFER_PROPERTIES"),
+ Self::PHYSICAL_DEVICE_ID_PROPERTIES => Some("PHYSICAL_DEVICE_ID_PROPERTIES"),
+ Self::EXTERNAL_MEMORY_BUFFER_CREATE_INFO => Some("EXTERNAL_MEMORY_BUFFER_CREATE_INFO"),
+ Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO => Some("EXTERNAL_MEMORY_IMAGE_CREATE_INFO"),
+ Self::EXPORT_MEMORY_ALLOCATE_INFO => Some("EXPORT_MEMORY_ALLOCATE_INFO"),
+ Self::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO => {
+ Some("PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO")
+ }
+ Self::EXTERNAL_FENCE_PROPERTIES => Some("EXTERNAL_FENCE_PROPERTIES"),
+ Self::EXPORT_FENCE_CREATE_INFO => Some("EXPORT_FENCE_CREATE_INFO"),
+ Self::EXPORT_SEMAPHORE_CREATE_INFO => Some("EXPORT_SEMAPHORE_CREATE_INFO"),
+ Self::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO => {
+ Some("PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO")
+ }
+ Self::EXTERNAL_SEMAPHORE_PROPERTIES => Some("EXTERNAL_SEMAPHORE_PROPERTIES"),
+ Self::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES")
+ }
+ Self::DESCRIPTOR_SET_LAYOUT_SUPPORT => Some("DESCRIPTOR_SET_LAYOUT_SUPPORT"),
+ Self::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_VULKAN_1_1_FEATURES => {
+ Some("PHYSICAL_DEVICE_VULKAN_1_1_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES")
+ }
+ Self::PHYSICAL_DEVICE_VULKAN_1_2_FEATURES => {
+ Some("PHYSICAL_DEVICE_VULKAN_1_2_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES")
+ }
+ Self::IMAGE_FORMAT_LIST_CREATE_INFO => Some("IMAGE_FORMAT_LIST_CREATE_INFO"),
+ Self::ATTACHMENT_DESCRIPTION_2 => Some("ATTACHMENT_DESCRIPTION_2"),
+ Self::ATTACHMENT_REFERENCE_2 => Some("ATTACHMENT_REFERENCE_2"),
+ Self::SUBPASS_DESCRIPTION_2 => Some("SUBPASS_DESCRIPTION_2"),
+ Self::SUBPASS_DEPENDENCY_2 => Some("SUBPASS_DEPENDENCY_2"),
+ Self::RENDER_PASS_CREATE_INFO_2 => Some("RENDER_PASS_CREATE_INFO_2"),
+ Self::SUBPASS_BEGIN_INFO => Some("SUBPASS_BEGIN_INFO"),
+ Self::SUBPASS_END_INFO => Some("SUBPASS_END_INFO"),
+ Self::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES => {
+ Some("PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_DRIVER_PROPERTIES => Some("PHYSICAL_DEVICE_DRIVER_PROPERTIES"),
+ Self::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES")
+ }
+ Self::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO => {
+ Some("DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO")
+ }
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES => {
+ Some("PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES")
+ }
+ Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO => {
+ Some("DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO")
+ }
+ Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT => {
+ Some("DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT")
+ }
+ Self::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES")
+ }
+ Self::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE => {
+ Some("SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE")
+ }
+ Self::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES => {
+ Some("PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES")
+ }
+ Self::IMAGE_STENCIL_USAGE_CREATE_INFO => Some("IMAGE_STENCIL_USAGE_CREATE_INFO"),
+ Self::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES")
+ }
+ Self::SAMPLER_REDUCTION_MODE_CREATE_INFO => Some("SAMPLER_REDUCTION_MODE_CREATE_INFO"),
+ Self::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES => {
+ Some("PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES => {
+ Some("PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES")
+ }
+ Self::FRAMEBUFFER_ATTACHMENTS_CREATE_INFO => {
+ Some("FRAMEBUFFER_ATTACHMENTS_CREATE_INFO")
+ }
+ Self::FRAMEBUFFER_ATTACHMENT_IMAGE_INFO => Some("FRAMEBUFFER_ATTACHMENT_IMAGE_INFO"),
+ Self::RENDER_PASS_ATTACHMENT_BEGIN_INFO => Some("RENDER_PASS_ATTACHMENT_BEGIN_INFO"),
+ Self::PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES => {
+ Some("PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES => {
+ Some("PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES")
+ }
+ Self::ATTACHMENT_REFERENCE_STENCIL_LAYOUT => {
+ Some("ATTACHMENT_REFERENCE_STENCIL_LAYOUT")
+ }
+ Self::ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT => {
+ Some("ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT")
+ }
+ Self::PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES => {
+ Some("PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES => {
+ Some("PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES")
+ }
+ Self::SEMAPHORE_TYPE_CREATE_INFO => Some("SEMAPHORE_TYPE_CREATE_INFO"),
+ Self::TIMELINE_SEMAPHORE_SUBMIT_INFO => Some("TIMELINE_SEMAPHORE_SUBMIT_INFO"),
+ Self::SEMAPHORE_WAIT_INFO => Some("SEMAPHORE_WAIT_INFO"),
+ Self::SEMAPHORE_SIGNAL_INFO => Some("SEMAPHORE_SIGNAL_INFO"),
+ Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES => {
+ Some("PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES")
+ }
+ Self::BUFFER_DEVICE_ADDRESS_INFO => Some("BUFFER_DEVICE_ADDRESS_INFO"),
+ Self::BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO => {
+ Some("BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO")
+ }
+ Self::MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO => {
+ Some("MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO")
+ }
+ Self::DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO => {
+ Some("DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO")
+ }
+ Self::PHYSICAL_DEVICE_VULKAN_1_3_FEATURES => {
+ Some("PHYSICAL_DEVICE_VULKAN_1_3_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES")
+ }
+ Self::PIPELINE_CREATION_FEEDBACK_CREATE_INFO => {
+ Some("PIPELINE_CREATION_FEEDBACK_CREATE_INFO")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_TOOL_PROPERTIES => Some("PHYSICAL_DEVICE_TOOL_PROPERTIES"),
+ Self::PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES => {
+ Some("PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES")
+ }
+ Self::DEVICE_PRIVATE_DATA_CREATE_INFO => Some("DEVICE_PRIVATE_DATA_CREATE_INFO"),
+ Self::PRIVATE_DATA_SLOT_CREATE_INFO => Some("PRIVATE_DATA_SLOT_CREATE_INFO"),
+ Self::PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES => {
+ Some("PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES")
+ }
+ Self::MEMORY_BARRIER_2 => Some("MEMORY_BARRIER_2"),
+ Self::BUFFER_MEMORY_BARRIER_2 => Some("BUFFER_MEMORY_BARRIER_2"),
+ Self::IMAGE_MEMORY_BARRIER_2 => Some("IMAGE_MEMORY_BARRIER_2"),
+ Self::DEPENDENCY_INFO => Some("DEPENDENCY_INFO"),
+ Self::SUBMIT_INFO_2 => Some("SUBMIT_INFO_2"),
+ Self::SEMAPHORE_SUBMIT_INFO => Some("SEMAPHORE_SUBMIT_INFO"),
+ Self::COMMAND_BUFFER_SUBMIT_INFO => Some("COMMAND_BUFFER_SUBMIT_INFO"),
+ Self::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES => {
+ Some("PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES => {
+ Some("PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES => {
+ Some("PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES")
+ }
+ Self::COPY_BUFFER_INFO_2 => Some("COPY_BUFFER_INFO_2"),
+ Self::COPY_IMAGE_INFO_2 => Some("COPY_IMAGE_INFO_2"),
+ Self::COPY_BUFFER_TO_IMAGE_INFO_2 => Some("COPY_BUFFER_TO_IMAGE_INFO_2"),
+ Self::COPY_IMAGE_TO_BUFFER_INFO_2 => Some("COPY_IMAGE_TO_BUFFER_INFO_2"),
+ Self::BLIT_IMAGE_INFO_2 => Some("BLIT_IMAGE_INFO_2"),
+ Self::RESOLVE_IMAGE_INFO_2 => Some("RESOLVE_IMAGE_INFO_2"),
+ Self::BUFFER_COPY_2 => Some("BUFFER_COPY_2"),
+ Self::IMAGE_COPY_2 => Some("IMAGE_COPY_2"),
+ Self::IMAGE_BLIT_2 => Some("IMAGE_BLIT_2"),
+ Self::BUFFER_IMAGE_COPY_2 => Some("BUFFER_IMAGE_COPY_2"),
+ Self::IMAGE_RESOLVE_2 => Some("IMAGE_RESOLVE_2"),
+ Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES")
+ }
+ Self::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO => {
+ Some("PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO")
+ }
+ Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES => {
+ Some("PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES => {
+ Some("PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES")
+ }
+ Self::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK => {
+ Some("WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK")
+ }
+ Self::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO => {
+ Some("DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO")
+ }
+ Self::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES => {
+ Some("PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES")
+ }
+ Self::RENDERING_INFO => Some("RENDERING_INFO"),
+ Self::RENDERING_ATTACHMENT_INFO => Some("RENDERING_ATTACHMENT_INFO"),
+ Self::PIPELINE_RENDERING_CREATE_INFO => Some("PIPELINE_RENDERING_CREATE_INFO"),
+ Self::PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES => {
+ Some("PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES")
+ }
+ Self::COMMAND_BUFFER_INHERITANCE_RENDERING_INFO => {
+ Some("COMMAND_BUFFER_INHERITANCE_RENDERING_INFO")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES => {
+ Some("PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES")
+ }
+ Self::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES")
+ }
+ Self::FORMAT_PROPERTIES_3 => Some("FORMAT_PROPERTIES_3"),
+ Self::PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES => {
+ Some("PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES")
+ }
+ Self::PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES => {
+ Some("PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES")
+ }
+ Self::DEVICE_BUFFER_MEMORY_REQUIREMENTS => Some("DEVICE_BUFFER_MEMORY_REQUIREMENTS"),
+ Self::DEVICE_IMAGE_MEMORY_REQUIREMENTS => Some("DEVICE_IMAGE_MEMORY_REQUIREMENTS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SubgroupFeatureFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (SubgroupFeatureFlags::BASIC.0, "BASIC"),
+ (SubgroupFeatureFlags::VOTE.0, "VOTE"),
+ (SubgroupFeatureFlags::ARITHMETIC.0, "ARITHMETIC"),
+ (SubgroupFeatureFlags::BALLOT.0, "BALLOT"),
+ (SubgroupFeatureFlags::SHUFFLE.0, "SHUFFLE"),
+ (SubgroupFeatureFlags::SHUFFLE_RELATIVE.0, "SHUFFLE_RELATIVE"),
+ (SubgroupFeatureFlags::CLUSTERED.0, "CLUSTERED"),
+ (SubgroupFeatureFlags::QUAD.0, "QUAD"),
+ (SubgroupFeatureFlags::PARTITIONED_NV.0, "PARTITIONED_NV"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SubmitFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(SubmitFlags::PROTECTED.0, "PROTECTED")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SubpassContents {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::INLINE => Some("INLINE"),
+ Self::SECONDARY_COMMAND_BUFFERS => Some("SECONDARY_COMMAND_BUFFERS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SubpassDescriptionFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ SubpassDescriptionFlags::PER_VIEW_ATTRIBUTES_NVX.0,
+ "PER_VIEW_ATTRIBUTES_NVX",
+ ),
+ (
+ SubpassDescriptionFlags::PER_VIEW_POSITION_X_ONLY_NVX.0,
+ "PER_VIEW_POSITION_X_ONLY_NVX",
+ ),
+ (
+ SubpassDescriptionFlags::FRAGMENT_REGION_QCOM.0,
+ "FRAGMENT_REGION_QCOM",
+ ),
+ (
+ SubpassDescriptionFlags::SHADER_RESOLVE_QCOM.0,
+ "SHADER_RESOLVE_QCOM",
+ ),
+ (
+ SubpassDescriptionFlags::RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT.0,
+ "RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT",
+ ),
+ (
+ SubpassDescriptionFlags::RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT.0,
+ "RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT",
+ ),
+ (
+ SubpassDescriptionFlags::RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT.0,
+ "RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT",
+ ),
+ (
+ SubpassDescriptionFlags::ENABLE_LEGACY_DITHERING_EXT.0,
+ "ENABLE_LEGACY_DITHERING_EXT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SubpassMergeStatusEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::MERGED => Some("MERGED"),
+ Self::DISALLOWED => Some("DISALLOWED"),
+ Self::NOT_MERGED_SIDE_EFFECTS => Some("NOT_MERGED_SIDE_EFFECTS"),
+ Self::NOT_MERGED_SAMPLES_MISMATCH => Some("NOT_MERGED_SAMPLES_MISMATCH"),
+ Self::NOT_MERGED_VIEWS_MISMATCH => Some("NOT_MERGED_VIEWS_MISMATCH"),
+ Self::NOT_MERGED_ALIASING => Some("NOT_MERGED_ALIASING"),
+ Self::NOT_MERGED_DEPENDENCIES => Some("NOT_MERGED_DEPENDENCIES"),
+ Self::NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT => {
+ Some("NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT")
+ }
+ Self::NOT_MERGED_TOO_MANY_ATTACHMENTS => Some("NOT_MERGED_TOO_MANY_ATTACHMENTS"),
+ Self::NOT_MERGED_INSUFFICIENT_STORAGE => Some("NOT_MERGED_INSUFFICIENT_STORAGE"),
+ Self::NOT_MERGED_DEPTH_STENCIL_COUNT => Some("NOT_MERGED_DEPTH_STENCIL_COUNT"),
+ Self::NOT_MERGED_RESOLVE_ATTACHMENT_REUSE => {
+ Some("NOT_MERGED_RESOLVE_ATTACHMENT_REUSE")
+ }
+ Self::NOT_MERGED_SINGLE_SUBPASS => Some("NOT_MERGED_SINGLE_SUBPASS"),
+ Self::NOT_MERGED_UNSPECIFIED => Some("NOT_MERGED_UNSPECIFIED"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for SurfaceCounterFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(SurfaceCounterFlagsEXT::VBLANK.0, "VBLANK")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SurfaceTransformFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (SurfaceTransformFlagsKHR::IDENTITY.0, "IDENTITY"),
+ (SurfaceTransformFlagsKHR::ROTATE_90.0, "ROTATE_90"),
+ (SurfaceTransformFlagsKHR::ROTATE_180.0, "ROTATE_180"),
+ (SurfaceTransformFlagsKHR::ROTATE_270.0, "ROTATE_270"),
+ (
+ SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR.0,
+ "HORIZONTAL_MIRROR",
+ ),
+ (
+ SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_90.0,
+ "HORIZONTAL_MIRROR_ROTATE_90",
+ ),
+ (
+ SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_180.0,
+ "HORIZONTAL_MIRROR_ROTATE_180",
+ ),
+ (
+ SurfaceTransformFlagsKHR::HORIZONTAL_MIRROR_ROTATE_270.0,
+ "HORIZONTAL_MIRROR_ROTATE_270",
+ ),
+ (SurfaceTransformFlagsKHR::INHERIT.0, "INHERIT"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SwapchainCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ SwapchainCreateFlagsKHR::SPLIT_INSTANCE_BIND_REGIONS.0,
+ "SPLIT_INSTANCE_BIND_REGIONS",
+ ),
+ (SwapchainCreateFlagsKHR::PROTECTED.0, "PROTECTED"),
+ (SwapchainCreateFlagsKHR::MUTABLE_FORMAT.0, "MUTABLE_FORMAT"),
+ (
+ SwapchainCreateFlagsKHR::DEFERRED_MEMORY_ALLOCATION_EXT.0,
+ "DEFERRED_MEMORY_ALLOCATION_EXT",
+ ),
+ (SwapchainCreateFlagsKHR::RESERVED_4_EXT.0, "RESERVED_4_EXT"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SwapchainImageUsageFlagsANDROID {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(SwapchainImageUsageFlagsANDROID::SHARED.0, "SHARED")];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for SystemAllocationScope {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::COMMAND => Some("COMMAND"),
+ Self::OBJECT => Some("OBJECT"),
+ Self::CACHE => Some("CACHE"),
+ Self::DEVICE => Some("DEVICE"),
+ Self::INSTANCE => Some("INSTANCE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for TessellationDomainOrigin {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UPPER_LEFT => Some("UPPER_LEFT"),
+ Self::LOWER_LEFT => Some("LOWER_LEFT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for TimeDomainEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEVICE => Some("DEVICE"),
+ Self::CLOCK_MONOTONIC => Some("CLOCK_MONOTONIC"),
+ Self::CLOCK_MONOTONIC_RAW => Some("CLOCK_MONOTONIC_RAW"),
+ Self::QUERY_PERFORMANCE_COUNTER => Some("QUERY_PERFORMANCE_COUNTER"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ToolPurposeFlags {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (ToolPurposeFlags::VALIDATION.0, "VALIDATION"),
+ (ToolPurposeFlags::PROFILING.0, "PROFILING"),
+ (ToolPurposeFlags::TRACING.0, "TRACING"),
+ (
+ ToolPurposeFlags::ADDITIONAL_FEATURES.0,
+ "ADDITIONAL_FEATURES",
+ ),
+ (ToolPurposeFlags::MODIFYING_FEATURES.0, "MODIFYING_FEATURES"),
+ (
+ ToolPurposeFlags::DEBUG_REPORTING_EXT.0,
+ "DEBUG_REPORTING_EXT",
+ ),
+ (ToolPurposeFlags::DEBUG_MARKERS_EXT.0, "DEBUG_MARKERS_EXT"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ValidationCacheCreateFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ValidationCacheHeaderVersionEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ONE => Some("ONE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ValidationCheckEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ALL => Some("ALL"),
+ Self::SHADERS => Some("SHADERS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ValidationFeatureDisableEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::ALL => Some("ALL"),
+ Self::SHADERS => Some("SHADERS"),
+ Self::THREAD_SAFETY => Some("THREAD_SAFETY"),
+ Self::API_PARAMETERS => Some("API_PARAMETERS"),
+ Self::OBJECT_LIFETIMES => Some("OBJECT_LIFETIMES"),
+ Self::CORE_CHECKS => Some("CORE_CHECKS"),
+ Self::UNIQUE_HANDLES => Some("UNIQUE_HANDLES"),
+ Self::SHADER_VALIDATION_CACHE => Some("SHADER_VALIDATION_CACHE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ValidationFeatureEnableEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::GPU_ASSISTED => Some("GPU_ASSISTED"),
+ Self::GPU_ASSISTED_RESERVE_BINDING_SLOT => Some("GPU_ASSISTED_RESERVE_BINDING_SLOT"),
+ Self::BEST_PRACTICES => Some("BEST_PRACTICES"),
+ Self::DEBUG_PRINTF => Some("DEBUG_PRINTF"),
+ Self::SYNCHRONIZATION_VALIDATION => Some("SYNCHRONIZATION_VALIDATION"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for VendorId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::VIV => Some("VIV"),
+ Self::VSI => Some("VSI"),
+ Self::KAZAN => Some("KAZAN"),
+ Self::CODEPLAY => Some("CODEPLAY"),
+ Self::MESA => Some("MESA"),
+ Self::POCL => Some("POCL"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for VertexInputRate {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::VERTEX => Some("VERTEX"),
+ Self::INSTANCE => Some("INSTANCE"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for ViSurfaceCreateFlagsNN {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoBeginCodingFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoCapabilityFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ VideoCapabilityFlagsKHR::PROTECTED_CONTENT.0,
+ "PROTECTED_CONTENT",
+ ),
+ (
+ VideoCapabilityFlagsKHR::SEPARATE_REFERENCE_IMAGES.0,
+ "SEPARATE_REFERENCE_IMAGES",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoChromaSubsamplingFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoChromaSubsamplingFlagsKHR::INVALID.0, "INVALID"),
+ (VideoChromaSubsamplingFlagsKHR::MONOCHROME.0, "MONOCHROME"),
+ (VideoChromaSubsamplingFlagsKHR::TYPE_420.0, "TYPE_420"),
+ (VideoChromaSubsamplingFlagsKHR::TYPE_422.0, "TYPE_422"),
+ (VideoChromaSubsamplingFlagsKHR::TYPE_444.0, "TYPE_444"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoCodecOperationFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoCodecOperationFlagsKHR::NONE.0, "NONE"),
+ (
+ VideoCodecOperationFlagsKHR::ENCODE_H264_EXT.0,
+ "ENCODE_H264_EXT",
+ ),
+ (
+ VideoCodecOperationFlagsKHR::ENCODE_H265_EXT.0,
+ "ENCODE_H265_EXT",
+ ),
+ (VideoCodecOperationFlagsKHR::DECODE_H264.0, "DECODE_H264"),
+ (VideoCodecOperationFlagsKHR::DECODE_H265.0, "DECODE_H265"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoCodingControlFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoCodingControlFlagsKHR::RESET.0, "RESET"),
+ (
+ VideoCodingControlFlagsKHR::ENCODE_RATE_CONTROL.0,
+ "ENCODE_RATE_CONTROL",
+ ),
+ (
+ VideoCodingControlFlagsKHR::ENCODE_RATE_CONTROL_LAYER.0,
+ "ENCODE_RATE_CONTROL_LAYER",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoComponentBitDepthFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoComponentBitDepthFlagsKHR::INVALID.0, "INVALID"),
+ (VideoComponentBitDepthFlagsKHR::TYPE_8.0, "TYPE_8"),
+ (VideoComponentBitDepthFlagsKHR::TYPE_10.0, "TYPE_10"),
+ (VideoComponentBitDepthFlagsKHR::TYPE_12.0, "TYPE_12"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoDecodeCapabilityFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ VideoDecodeCapabilityFlagsKHR::DPB_AND_OUTPUT_COINCIDE.0,
+ "DPB_AND_OUTPUT_COINCIDE",
+ ),
+ (
+ VideoDecodeCapabilityFlagsKHR::DPB_AND_OUTPUT_DISTINCT.0,
+ "DPB_AND_OUTPUT_DISTINCT",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoDecodeFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoDecodeH264PictureLayoutFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ VideoDecodeH264PictureLayoutFlagsKHR::PROGRESSIVE.0,
+ "PROGRESSIVE",
+ ),
+ (
+ VideoDecodeH264PictureLayoutFlagsKHR::INTERLACED_INTERLEAVED_LINES.0,
+ "INTERLACED_INTERLEAVED_LINES",
+ ),
+ (
+ VideoDecodeH264PictureLayoutFlagsKHR::INTERLACED_SEPARATE_PLANES.0,
+ "INTERLACED_SEPARATE_PLANES",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoDecodeUsageFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoDecodeUsageFlagsKHR::DEFAULT.0, "DEFAULT"),
+ (VideoDecodeUsageFlagsKHR::TRANSCODING.0, "TRANSCODING"),
+ (VideoDecodeUsageFlagsKHR::OFFLINE.0, "OFFLINE"),
+ (VideoDecodeUsageFlagsKHR::STREAMING.0, "STREAMING"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeCapabilityFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ VideoEncodeCapabilityFlagsKHR::PRECEDING_EXTERNALLY_ENCODED_BYTES.0,
+ "PRECEDING_EXTERNALLY_ENCODED_BYTES",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeContentFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeContentFlagsKHR::DEFAULT.0, "DEFAULT"),
+ (VideoEncodeContentFlagsKHR::CAMERA.0, "CAMERA"),
+ (VideoEncodeContentFlagsKHR::DESKTOP.0, "DESKTOP"),
+ (VideoEncodeContentFlagsKHR::RENDERED.0, "RENDERED"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH264CapabilityFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DIRECT_8X8_INFERENCE_ENABLED.0,
+ "DIRECT_8X8_INFERENCE_ENABLED",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DIRECT_8X8_INFERENCE_DISABLED.0,
+ "DIRECT_8X8_INFERENCE_DISABLED",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::SEPARATE_COLOUR_PLANE.0,
+ "SEPARATE_COLOUR_PLANE",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::QPPRIME_Y_ZERO_TRANSFORM_BYPASS.0,
+ "QPPRIME_Y_ZERO_TRANSFORM_BYPASS",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::SCALING_LISTS.0,
+ "SCALING_LISTS",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::HRD_COMPLIANCE.0,
+ "HRD_COMPLIANCE",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::CHROMA_QP_OFFSET.0,
+ "CHROMA_QP_OFFSET",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::SECOND_CHROMA_QP_OFFSET.0,
+ "SECOND_CHROMA_QP_OFFSET",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::PIC_INIT_QP_MINUS26.0,
+ "PIC_INIT_QP_MINUS26",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::WEIGHTED_PRED.0,
+ "WEIGHTED_PRED",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::WEIGHTED_BIPRED_EXPLICIT.0,
+ "WEIGHTED_BIPRED_EXPLICIT",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::WEIGHTED_BIPRED_IMPLICIT.0,
+ "WEIGHTED_BIPRED_IMPLICIT",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::WEIGHTED_PRED_NO_TABLE.0,
+ "WEIGHTED_PRED_NO_TABLE",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::TRANSFORM_8X8.0,
+ "TRANSFORM_8X8",
+ ),
+ (VideoEncodeH264CapabilityFlagsEXT::CABAC.0, "CABAC"),
+ (VideoEncodeH264CapabilityFlagsEXT::CAVLC.0, "CAVLC"),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DEBLOCKING_FILTER_DISABLED.0,
+ "DEBLOCKING_FILTER_DISABLED",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DEBLOCKING_FILTER_ENABLED.0,
+ "DEBLOCKING_FILTER_ENABLED",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DEBLOCKING_FILTER_PARTIAL.0,
+ "DEBLOCKING_FILTER_PARTIAL",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DISABLE_DIRECT_SPATIAL_MV_PRED.0,
+ "DISABLE_DIRECT_SPATIAL_MV_PRED",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::MULTIPLE_SLICE_PER_FRAME.0,
+ "MULTIPLE_SLICE_PER_FRAME",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::SLICE_MB_COUNT.0,
+ "SLICE_MB_COUNT",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::ROW_UNALIGNED_SLICE.0,
+ "ROW_UNALIGNED_SLICE",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::DIFFERENT_SLICE_TYPE.0,
+ "DIFFERENT_SLICE_TYPE",
+ ),
+ (
+ VideoEncodeH264CapabilityFlagsEXT::B_FRAME_IN_L1_LIST.0,
+ "B_FRAME_IN_L1_LIST",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH264InputModeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeH264InputModeFlagsEXT::FRAME.0, "FRAME"),
+ (VideoEncodeH264InputModeFlagsEXT::SLICE.0, "SLICE"),
+ (VideoEncodeH264InputModeFlagsEXT::NON_VCL.0, "NON_VCL"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH264OutputModeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeH264OutputModeFlagsEXT::FRAME.0, "FRAME"),
+ (VideoEncodeH264OutputModeFlagsEXT::SLICE.0, "SLICE"),
+ (VideoEncodeH264OutputModeFlagsEXT::NON_VCL.0, "NON_VCL"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH264RateControlStructureEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNKNOWN => Some("UNKNOWN"),
+ Self::FLAT => Some("FLAT"),
+ Self::DYADIC => Some("DYADIC"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for VideoEncodeH265CapabilityFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ VideoEncodeH265CapabilityFlagsEXT::SEPARATE_COLOUR_PLANE.0,
+ "SEPARATE_COLOUR_PLANE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::SCALING_LISTS.0,
+ "SCALING_LISTS",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::SAMPLE_ADAPTIVE_OFFSET_ENABLED.0,
+ "SAMPLE_ADAPTIVE_OFFSET_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::PCM_ENABLE.0,
+ "PCM_ENABLE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::SPS_TEMPORAL_MVP_ENABLED.0,
+ "SPS_TEMPORAL_MVP_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::HRD_COMPLIANCE.0,
+ "HRD_COMPLIANCE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::INIT_QP_MINUS26.0,
+ "INIT_QP_MINUS26",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::LOG2_PARALLEL_MERGE_LEVEL_MINUS2.0,
+ "LOG2_PARALLEL_MERGE_LEVEL_MINUS2",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::SIGN_DATA_HIDING_ENABLED.0,
+ "SIGN_DATA_HIDING_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::TRANSFORM_SKIP_ENABLED.0,
+ "TRANSFORM_SKIP_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::TRANSFORM_SKIP_DISABLED.0,
+ "TRANSFORM_SKIP_DISABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT.0,
+ "PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::WEIGHTED_PRED.0,
+ "WEIGHTED_PRED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::WEIGHTED_BIPRED.0,
+ "WEIGHTED_BIPRED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::WEIGHTED_PRED_NO_TABLE.0,
+ "WEIGHTED_PRED_NO_TABLE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::TRANSQUANT_BYPASS_ENABLED.0,
+ "TRANSQUANT_BYPASS_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::ENTROPY_CODING_SYNC_ENABLED.0,
+ "ENTROPY_CODING_SYNC_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::DEBLOCKING_FILTER_OVERRIDE_ENABLED.0,
+ "DEBLOCKING_FILTER_OVERRIDE_ENABLED",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::MULTIPLE_TILE_PER_FRAME.0,
+ "MULTIPLE_TILE_PER_FRAME",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::MULTIPLE_SLICE_PER_TILE.0,
+ "MULTIPLE_SLICE_PER_TILE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::MULTIPLE_TILE_PER_SLICE.0,
+ "MULTIPLE_TILE_PER_SLICE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::SLICE_SEGMENT_CTB_COUNT.0,
+ "SLICE_SEGMENT_CTB_COUNT",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::ROW_UNALIGNED_SLICE_SEGMENT.0,
+ "ROW_UNALIGNED_SLICE_SEGMENT",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::DEPENDENT_SLICE_SEGMENT.0,
+ "DEPENDENT_SLICE_SEGMENT",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::DIFFERENT_SLICE_TYPE.0,
+ "DIFFERENT_SLICE_TYPE",
+ ),
+ (
+ VideoEncodeH265CapabilityFlagsEXT::B_FRAME_IN_L1_LIST.0,
+ "B_FRAME_IN_L1_LIST",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH265CtbSizeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeH265CtbSizeFlagsEXT::TYPE_16.0, "TYPE_16"),
+ (VideoEncodeH265CtbSizeFlagsEXT::TYPE_32.0, "TYPE_32"),
+ (VideoEncodeH265CtbSizeFlagsEXT::TYPE_64.0, "TYPE_64"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH265InputModeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeH265InputModeFlagsEXT::FRAME.0, "FRAME"),
+ (
+ VideoEncodeH265InputModeFlagsEXT::SLICE_SEGMENT.0,
+ "SLICE_SEGMENT",
+ ),
+ (VideoEncodeH265InputModeFlagsEXT::NON_VCL.0, "NON_VCL"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH265OutputModeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeH265OutputModeFlagsEXT::FRAME.0, "FRAME"),
+ (
+ VideoEncodeH265OutputModeFlagsEXT::SLICE_SEGMENT.0,
+ "SLICE_SEGMENT",
+ ),
+ (VideoEncodeH265OutputModeFlagsEXT::NON_VCL.0, "NON_VCL"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeH265RateControlStructureEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNKNOWN => Some("UNKNOWN"),
+ Self::FLAT => Some("FLAT"),
+ Self::DYADIC => Some("DYADIC"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for VideoEncodeH265TransformBlockSizeFlagsEXT {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (
+ VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_4.0,
+ "TYPE_4",
+ ),
+ (
+ VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_8.0,
+ "TYPE_8",
+ ),
+ (
+ VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_16.0,
+ "TYPE_16",
+ ),
+ (
+ VideoEncodeH265TransformBlockSizeFlagsEXT::TYPE_32.0,
+ "TYPE_32",
+ ),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeRateControlFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeRateControlModeFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeRateControlModeFlagsKHR::NONE.0, "NONE"),
+ (VideoEncodeRateControlModeFlagsKHR::CBR.0, "CBR"),
+ (VideoEncodeRateControlModeFlagsKHR::VBR.0, "VBR"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEncodeTuningModeKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::DEFAULT => Some("DEFAULT"),
+ Self::HIGH_QUALITY => Some("HIGH_QUALITY"),
+ Self::LOW_LATENCY => Some("LOW_LATENCY"),
+ Self::ULTRA_LOW_LATENCY => Some("ULTRA_LOW_LATENCY"),
+ Self::LOSSLESS => Some("LOSSLESS"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for VideoEncodeUsageFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[
+ (VideoEncodeUsageFlagsKHR::DEFAULT.0, "DEFAULT"),
+ (VideoEncodeUsageFlagsKHR::TRANSCODING.0, "TRANSCODING"),
+ (VideoEncodeUsageFlagsKHR::STREAMING.0, "STREAMING"),
+ (VideoEncodeUsageFlagsKHR::RECORDING.0, "RECORDING"),
+ (VideoEncodeUsageFlagsKHR::CONFERENCING.0, "CONFERENCING"),
+ ];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoEndCodingFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoSessionCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[(
+ VideoSessionCreateFlagsKHR::PROTECTED_CONTENT.0,
+ "PROTECTED_CONTENT",
+ )];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for VideoSessionParametersCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for ViewportCoordinateSwizzleNV {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::POSITIVE_X => Some("POSITIVE_X"),
+ Self::NEGATIVE_X => Some("NEGATIVE_X"),
+ Self::POSITIVE_Y => Some("POSITIVE_Y"),
+ Self::NEGATIVE_Y => Some("NEGATIVE_Y"),
+ Self::POSITIVE_Z => Some("POSITIVE_Z"),
+ Self::NEGATIVE_Z => Some("NEGATIVE_Z"),
+ Self::POSITIVE_W => Some("POSITIVE_W"),
+ Self::NEGATIVE_W => Some("NEGATIVE_W"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for WaylandSurfaceCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for Win32SurfaceCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for XcbSurfaceCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
+impl fmt::Debug for XlibSurfaceCreateFlagsKHR {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ const KNOWN: &[(Flags, &str)] = &[];
+ debug_flags(f, KNOWN, self.0)
+ }
+}
diff --git a/third_party/rust/ash/src/vk/constants.rs b/third_party/rust/ash/src/vk/constants.rs
new file mode 100644
index 0000000000..78da55ec71
--- /dev/null
+++ b/third_party/rust/ash/src/vk/constants.rs
@@ -0,0 +1,27 @@
+use crate::vk::definitions::*;
+pub const MAX_PHYSICAL_DEVICE_NAME_SIZE: usize = 256;
+pub const UUID_SIZE: usize = 16;
+pub const LUID_SIZE: usize = 8;
+pub const MAX_EXTENSION_NAME_SIZE: usize = 256;
+pub const MAX_DESCRIPTION_SIZE: usize = 256;
+pub const MAX_MEMORY_TYPES: usize = 32;
+#[doc = "The maximum number of unique memory heaps, each of which supporting 1 or more memory types"]
+pub const MAX_MEMORY_HEAPS: usize = 16;
+pub const LOD_CLAMP_NONE: f32 = 1000.00;
+pub const REMAINING_MIP_LEVELS: u32 = !0;
+pub const REMAINING_ARRAY_LAYERS: u32 = !0;
+pub const WHOLE_SIZE: u64 = !0;
+pub const ATTACHMENT_UNUSED: u32 = !0;
+pub const TRUE: Bool32 = 1;
+pub const FALSE: Bool32 = 0;
+pub const QUEUE_FAMILY_IGNORED: u32 = !0;
+pub const QUEUE_FAMILY_EXTERNAL: u32 = !1;
+pub const QUEUE_FAMILY_FOREIGN_EXT: u32 = !2;
+pub const SUBPASS_EXTERNAL: u32 = !0;
+pub const MAX_DEVICE_GROUP_SIZE: usize = 32;
+pub const MAX_DRIVER_NAME_SIZE: usize = 256;
+pub const MAX_DRIVER_INFO_SIZE: usize = 256;
+pub const SHADER_UNUSED_KHR: u32 = !0;
+pub const MAX_GLOBAL_PRIORITY_SIZE_KHR: usize = 16;
+pub const MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT: usize = 32;
+pub const SHADER_UNUSED_NV: u32 = SHADER_UNUSED_KHR;
diff --git a/third_party/rust/ash/src/vk/definitions.rs b/third_party/rust/ash/src/vk/definitions.rs
new file mode 100644
index 0000000000..c73770b05e
--- /dev/null
+++ b/third_party/rust/ash/src/vk/definitions.rs
@@ -0,0 +1,73178 @@
+use crate::vk::aliases::*;
+use crate::vk::bitflags::*;
+use crate::vk::constants::*;
+use crate::vk::enums::*;
+use crate::vk::native::*;
+use crate::vk::platform_types::*;
+use crate::vk::prelude::*;
+use crate::vk::{ptr_chain_iter, Handle};
+use std::fmt;
+use std::os::raw::*;
+#[deprecated = "This define is deprecated. VK_MAKE_API_VERSION should be used instead."]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_MAKE_VERSION.html>"]
+pub const fn make_version(major: u32, minor: u32, patch: u32) -> u32 {
+ ((major) << 22) | ((minor) << 12) | (patch)
+}
+#[deprecated = "This define is deprecated. VK_API_VERSION_MAJOR should be used instead."]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_MAJOR.html>"]
+pub const fn version_major(version: u32) -> u32 {
+ (version) >> 22
+}
+#[deprecated = "This define is deprecated. VK_API_VERSION_MINOR should be used instead."]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_MINOR.html>"]
+pub const fn version_minor(version: u32) -> u32 {
+ ((version) >> 12) & 0x3ffu32
+}
+#[deprecated = "This define is deprecated. VK_API_VERSION_PATCH should be used instead."]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_VERSION_PATCH.html>"]
+pub const fn version_patch(version: u32) -> u32 {
+ (version) & 0xfffu32
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_MAKE_API_VERSION.html>"]
+pub const fn make_api_version(variant: u32, major: u32, minor: u32, patch: u32) -> u32 {
+ ((variant) << 29) | ((major) << 22) | ((minor) << 12) | (patch)
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_VARIANT.html>"]
+pub const fn api_version_variant(version: u32) -> u32 {
+ (version) >> 29
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_MAJOR.html>"]
+pub const fn api_version_major(version: u32) -> u32 {
+ ((version) >> 22) & 0x7fu32
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_MINOR.html>"]
+pub const fn api_version_minor(version: u32) -> u32 {
+ ((version) >> 12) & 0x3ffu32
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_PATCH.html>"]
+pub const fn api_version_patch(version: u32) -> u32 {
+ (version) & 0xfffu32
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_1_0.html>"]
+pub const API_VERSION_1_0: u32 = make_api_version(0, 1, 0, 0);
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_1_1.html>"]
+pub const API_VERSION_1_1: u32 = make_api_version(0, 1, 1, 0);
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_1_2.html>"]
+pub const API_VERSION_1_2: u32 = make_api_version(0, 1, 2, 0);
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_API_VERSION_1_3.html>"]
+pub const API_VERSION_1_3: u32 = make_api_version(0, 1, 3, 0);
+pub const HEADER_VERSION: u32 = 238u32;
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VK_HEADER_VERSION_COMPLETE.html>"]
+pub const HEADER_VERSION_COMPLETE: u32 = make_api_version(0, 1, 3, HEADER_VERSION);
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSampleMask.html>"]
+pub type SampleMask = u32;
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBool32.html>"]
+pub type Bool32 = u32;
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFlags.html>"]
+pub type Flags = u32;
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFlags64.html>"]
+pub type Flags64 = u64;
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceSize.html>"]
+pub type DeviceSize = u64;
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceAddress.html>"]
+pub type DeviceAddress = u64;
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPoolCreateFlags.html>"]
+pub struct QueryPoolCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(QueryPoolCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineDynamicStateCreateFlags.html>"]
+pub struct PipelineDynamicStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineDynamicStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineMultisampleStateCreateFlags.html>"]
+pub struct PipelineMultisampleStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineMultisampleStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationStateCreateFlags.html>"]
+pub struct PipelineRasterizationStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineRasterizationStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportStateCreateFlags.html>"]
+pub struct PipelineViewportStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineViewportStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineTessellationStateCreateFlags.html>"]
+pub struct PipelineTessellationStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineTessellationStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineInputAssemblyStateCreateFlags.html>"]
+pub struct PipelineInputAssemblyStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineInputAssemblyStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineVertexInputStateCreateFlags.html>"]
+pub struct PipelineVertexInputStateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineVertexInputStateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferViewCreateFlags.html>"]
+pub struct BufferViewCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(BufferViewCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceCreateFlags.html>"]
+pub struct DeviceCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DeviceCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryMapFlags.html>"]
+pub struct MemoryMapFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(MemoryMapFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorPoolResetFlags.html>"]
+pub struct DescriptorPoolResetFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DescriptorPoolResetFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorUpdateTemplateCreateFlags.html>"]
+pub struct DescriptorUpdateTemplateCreateFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(DescriptorUpdateTemplateCreateFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMotionInfoFlagsNV.html>"]
+pub struct AccelerationStructureMotionInfoFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(AccelerationStructureMotionInfoFlagsNV, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMotionInstanceFlagsNV.html>"]
+pub struct AccelerationStructureMotionInstanceFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(AccelerationStructureMotionInstanceFlagsNV, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDirectDriverLoadingFlagsLUNARG.html>"]
+pub struct DirectDriverLoadingFlagsLUNARG(pub(crate) Flags);
+vk_bitflags_wrapped!(DirectDriverLoadingFlagsLUNARG, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayModeCreateFlagsKHR.html>"]
+pub struct DisplayModeCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(DisplayModeCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplaySurfaceCreateFlagsKHR.html>"]
+pub struct DisplaySurfaceCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(DisplaySurfaceCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAndroidSurfaceCreateFlagsKHR.html>"]
+pub struct AndroidSurfaceCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(AndroidSurfaceCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkViSurfaceCreateFlagsNN.html>"]
+pub struct ViSurfaceCreateFlagsNN(pub(crate) Flags);
+vk_bitflags_wrapped!(ViSurfaceCreateFlagsNN, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWaylandSurfaceCreateFlagsKHR.html>"]
+pub struct WaylandSurfaceCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(WaylandSurfaceCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWin32SurfaceCreateFlagsKHR.html>"]
+pub struct Win32SurfaceCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(Win32SurfaceCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkXlibSurfaceCreateFlagsKHR.html>"]
+pub struct XlibSurfaceCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(XlibSurfaceCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkXcbSurfaceCreateFlagsKHR.html>"]
+pub struct XcbSurfaceCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(XcbSurfaceCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDirectFBSurfaceCreateFlagsEXT.html>"]
+pub struct DirectFBSurfaceCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DirectFBSurfaceCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIOSSurfaceCreateFlagsMVK.html>"]
+pub struct IOSSurfaceCreateFlagsMVK(pub(crate) Flags);
+vk_bitflags_wrapped!(IOSSurfaceCreateFlagsMVK, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMacOSSurfaceCreateFlagsMVK.html>"]
+pub struct MacOSSurfaceCreateFlagsMVK(pub(crate) Flags);
+vk_bitflags_wrapped!(MacOSSurfaceCreateFlagsMVK, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMetalSurfaceCreateFlagsEXT.html>"]
+pub struct MetalSurfaceCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(MetalSurfaceCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImagePipeSurfaceCreateFlagsFUCHSIA.html>"]
+pub struct ImagePipeSurfaceCreateFlagsFUCHSIA(pub(crate) Flags);
+vk_bitflags_wrapped!(ImagePipeSurfaceCreateFlagsFUCHSIA, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStreamDescriptorSurfaceCreateFlagsGGP.html>"]
+pub struct StreamDescriptorSurfaceCreateFlagsGGP(pub(crate) Flags);
+vk_bitflags_wrapped!(StreamDescriptorSurfaceCreateFlagsGGP, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkHeadlessSurfaceCreateFlagsEXT.html>"]
+pub struct HeadlessSurfaceCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(HeadlessSurfaceCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkScreenSurfaceCreateFlagsQNX.html>"]
+pub struct ScreenSurfaceCreateFlagsQNX(pub(crate) Flags);
+vk_bitflags_wrapped!(ScreenSurfaceCreateFlagsQNX, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandPoolTrimFlags.html>"]
+pub struct CommandPoolTrimFlags(pub(crate) Flags);
+vk_bitflags_wrapped!(CommandPoolTrimFlags, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportSwizzleStateCreateFlagsNV.html>"]
+pub struct PipelineViewportSwizzleStateCreateFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineViewportSwizzleStateCreateFlagsNV, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineDiscardRectangleStateCreateFlagsEXT.html>"]
+pub struct PipelineDiscardRectangleStateCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineDiscardRectangleStateCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCoverageToColorStateCreateFlagsNV.html>"]
+pub struct PipelineCoverageToColorStateCreateFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCoverageToColorStateCreateFlagsNV, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCoverageModulationStateCreateFlagsNV.html>"]
+pub struct PipelineCoverageModulationStateCreateFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCoverageModulationStateCreateFlagsNV, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCoverageReductionStateCreateFlagsNV.html>"]
+pub struct PipelineCoverageReductionStateCreateFlagsNV(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineCoverageReductionStateCreateFlagsNV, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationCacheCreateFlagsEXT.html>"]
+pub struct ValidationCacheCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(ValidationCacheCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessengerCreateFlagsEXT.html>"]
+pub struct DebugUtilsMessengerCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DebugUtilsMessengerCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessengerCallbackDataFlagsEXT.html>"]
+pub struct DebugUtilsMessengerCallbackDataFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DebugUtilsMessengerCallbackDataFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceMemoryReportFlagsEXT.html>"]
+pub struct DeviceMemoryReportFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(DeviceMemoryReportFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationConservativeStateCreateFlagsEXT.html>"]
+pub struct PipelineRasterizationConservativeStateCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineRasterizationConservativeStateCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationStateStreamCreateFlagsEXT.html>"]
+pub struct PipelineRasterizationStateStreamCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineRasterizationStateStreamCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationDepthClipStateCreateFlagsEXT.html>"]
+pub struct PipelineRasterizationDepthClipStateCreateFlagsEXT(pub(crate) Flags);
+vk_bitflags_wrapped!(PipelineRasterizationDepthClipStateCreateFlagsEXT, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionParametersCreateFlagsKHR.html>"]
+pub struct VideoSessionParametersCreateFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoSessionParametersCreateFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoBeginCodingFlagsKHR.html>"]
+pub struct VideoBeginCodingFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoBeginCodingFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEndCodingFlagsKHR.html>"]
+pub struct VideoEndCodingFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEndCodingFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeFlagsKHR.html>"]
+pub struct VideoDecodeFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoDecodeFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeFlagsKHR.html>"]
+pub struct VideoEncodeFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeFlagsKHR, Flags);
+#[repr(transparent)]
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeRateControlFlagsKHR.html>"]
+pub struct VideoEncodeRateControlFlagsKHR(pub(crate) Flags);
+vk_bitflags_wrapped!(VideoEncodeRateControlFlagsKHR, Flags);
+define_handle!(
+ Instance,
+ INSTANCE,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInstance.html>"
+);
+define_handle ! (PhysicalDevice , PHYSICAL_DEVICE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevice.html>") ;
+define_handle!(
+ Device,
+ DEVICE,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDevice.html>"
+);
+define_handle!(
+ Queue,
+ QUEUE,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueue.html>"
+);
+define_handle ! (CommandBuffer , COMMAND_BUFFER , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBuffer.html>") ;
+handle_nondispatchable ! (DeviceMemory , DEVICE_MEMORY , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceMemory.html>") ;
+handle_nondispatchable ! (CommandPool , COMMAND_POOL , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandPool.html>") ;
+handle_nondispatchable!(
+ Buffer,
+ BUFFER,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBuffer.html>"
+);
+handle_nondispatchable!(
+ BufferView,
+ BUFFER_VIEW,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferView.html>"
+);
+handle_nondispatchable!(
+ Image,
+ IMAGE,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImage.html>"
+);
+handle_nondispatchable!(
+ ImageView,
+ IMAGE_VIEW,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageView.html>"
+);
+handle_nondispatchable ! (ShaderModule , SHADER_MODULE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderModule.html>") ;
+handle_nondispatchable!(
+ Pipeline,
+ PIPELINE,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipeline.html>"
+);
+handle_nondispatchable ! (PipelineLayout , PIPELINE_LAYOUT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineLayout.html>") ;
+handle_nondispatchable!(
+ Sampler,
+ SAMPLER,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSampler.html>"
+);
+handle_nondispatchable ! (DescriptorSet , DESCRIPTOR_SET , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSet.html>") ;
+handle_nondispatchable ! (DescriptorSetLayout , DESCRIPTOR_SET_LAYOUT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayout.html>") ;
+handle_nondispatchable ! (DescriptorPool , DESCRIPTOR_POOL , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorPool.html>") ;
+handle_nondispatchable!(
+ Fence,
+ FENCE,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFence.html>"
+);
+handle_nondispatchable!(
+ Semaphore,
+ SEMAPHORE,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphore.html>"
+);
+handle_nondispatchable!(
+ Event,
+ EVENT,
+ doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkEvent.html>"
+);
+handle_nondispatchable!(
+ QueryPool,
+ QUERY_POOL,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPool.html>"
+);
+handle_nondispatchable ! (Framebuffer , FRAMEBUFFER , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFramebuffer.html>") ;
+handle_nondispatchable!(
+ RenderPass,
+ RENDER_PASS,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPass.html>"
+);
+handle_nondispatchable ! (PipelineCache , PIPELINE_CACHE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCache.html>") ;
+handle_nondispatchable ! (IndirectCommandsLayoutNV , INDIRECT_COMMANDS_LAYOUT_NV , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectCommandsLayoutNV.html>") ;
+handle_nondispatchable ! (DescriptorUpdateTemplate , DESCRIPTOR_UPDATE_TEMPLATE , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorUpdateTemplate.html>") ;
+handle_nondispatchable ! (SamplerYcbcrConversion , SAMPLER_YCBCR_CONVERSION , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerYcbcrConversion.html>") ;
+handle_nondispatchable ! (ValidationCacheEXT , VALIDATION_CACHE_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationCacheEXT.html>") ;
+handle_nondispatchable ! (AccelerationStructureKHR , ACCELERATION_STRUCTURE_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureKHR.html>") ;
+handle_nondispatchable ! (AccelerationStructureNV , ACCELERATION_STRUCTURE_NV , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureNV.html>") ;
+handle_nondispatchable ! (PerformanceConfigurationINTEL , PERFORMANCE_CONFIGURATION_INTEL , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceConfigurationINTEL.html>") ;
+handle_nondispatchable ! (BufferCollectionFUCHSIA , BUFFER_COLLECTION_FUCHSIA , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCollectionFUCHSIA.html>") ;
+handle_nondispatchable ! (DeferredOperationKHR , DEFERRED_OPERATION_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeferredOperationKHR.html>") ;
+handle_nondispatchable ! (PrivateDataSlot , PRIVATE_DATA_SLOT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPrivateDataSlot.html>") ;
+handle_nondispatchable ! (CuModuleNVX , CU_MODULE_NVX , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCuModuleNVX.html>") ;
+handle_nondispatchable ! (CuFunctionNVX , CU_FUNCTION_NVX , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCuFunctionNVX.html>") ;
+handle_nondispatchable ! (OpticalFlowSessionNV , OPTICAL_FLOW_SESSION_NV , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowSessionNV.html>") ;
+handle_nondispatchable ! (MicromapEXT , MICROMAP_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapEXT.html>") ;
+handle_nondispatchable!(
+ DisplayKHR,
+ DISPLAY_KHR,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayKHR.html>"
+);
+handle_nondispatchable ! (DisplayModeKHR , DISPLAY_MODE_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayModeKHR.html>") ;
+handle_nondispatchable!(
+ SurfaceKHR,
+ SURFACE_KHR,
+ doc =
+ "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceKHR.html>"
+);
+handle_nondispatchable ! (SwapchainKHR , SWAPCHAIN_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainKHR.html>") ;
+handle_nondispatchable ! (DebugReportCallbackEXT , DEBUG_REPORT_CALLBACK_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugReportCallbackEXT.html>") ;
+handle_nondispatchable ! (DebugUtilsMessengerEXT , DEBUG_UTILS_MESSENGER_EXT , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessengerEXT.html>") ;
+handle_nondispatchable ! (VideoSessionKHR , VIDEO_SESSION_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionKHR.html>") ;
+handle_nondispatchable ! (VideoSessionParametersKHR , VIDEO_SESSION_PARAMETERS_KHR , doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionParametersKHR.html>") ;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkInternalAllocationNotification.html>"]
+pub type PFN_vkInternalAllocationNotification = Option<
+ unsafe extern "system" fn(
+ p_user_data: *mut c_void,
+ size: usize,
+ allocation_type: InternalAllocationType,
+ allocation_scope: SystemAllocationScope,
+ ),
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkInternalFreeNotification.html>"]
+pub type PFN_vkInternalFreeNotification = Option<
+ unsafe extern "system" fn(
+ p_user_data: *mut c_void,
+ size: usize,
+ allocation_type: InternalAllocationType,
+ allocation_scope: SystemAllocationScope,
+ ),
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkReallocationFunction.html>"]
+pub type PFN_vkReallocationFunction = Option<
+ unsafe extern "system" fn(
+ p_user_data: *mut c_void,
+ p_original: *mut c_void,
+ size: usize,
+ alignment: usize,
+ allocation_scope: SystemAllocationScope,
+ ) -> *mut c_void,
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkAllocationFunction.html>"]
+pub type PFN_vkAllocationFunction = Option<
+ unsafe extern "system" fn(
+ p_user_data: *mut c_void,
+ size: usize,
+ alignment: usize,
+ allocation_scope: SystemAllocationScope,
+ ) -> *mut c_void,
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkFreeFunction.html>"]
+pub type PFN_vkFreeFunction =
+ Option<unsafe extern "system" fn(p_user_data: *mut c_void, p_memory: *mut c_void)>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkVoidFunction.html>"]
+pub type PFN_vkVoidFunction = Option<unsafe extern "system" fn()>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkDebugReportCallbackEXT.html>"]
+pub type PFN_vkDebugReportCallbackEXT = Option<
+ unsafe extern "system" fn(
+ flags: DebugReportFlagsEXT,
+ object_type: DebugReportObjectTypeEXT,
+ object: u64,
+ location: usize,
+ message_code: i32,
+ p_layer_prefix: *const c_char,
+ p_message: *const c_char,
+ p_user_data: *mut c_void,
+ ) -> Bool32,
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkDebugUtilsMessengerCallbackEXT.html>"]
+pub type PFN_vkDebugUtilsMessengerCallbackEXT = Option<
+ unsafe extern "system" fn(
+ message_severity: DebugUtilsMessageSeverityFlagsEXT,
+ message_types: DebugUtilsMessageTypeFlagsEXT,
+ p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
+ p_user_data: *mut c_void,
+ ) -> Bool32,
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkDeviceMemoryReportCallbackEXT.html>"]
+pub type PFN_vkDeviceMemoryReportCallbackEXT = Option<
+ unsafe extern "system" fn(
+ p_callback_data: *const DeviceMemoryReportCallbackDataEXT,
+ p_user_data: *mut c_void,
+ ),
+>;
+#[allow(non_camel_case_types)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/PFN_vkGetInstanceProcAddrLUNARG.html>"]
+pub type PFN_vkGetInstanceProcAddrLUNARG = Option<
+ unsafe extern "system" fn(instance: Instance, p_name: *const c_char) -> PFN_vkVoidFunction,
+>;
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBaseOutStructure.html>"]
+pub struct BaseOutStructure {
+ pub s_type: StructureType,
+ pub p_next: *mut Self,
+}
+impl ::std::default::Default for BaseOutStructure {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: unsafe { ::std::mem::zeroed() },
+ p_next: ::std::ptr::null_mut(),
+ }
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBaseInStructure.html>"]
+pub struct BaseInStructure {
+ pub s_type: StructureType,
+ pub p_next: *const Self,
+}
+impl ::std::default::Default for BaseInStructure {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: unsafe { ::std::mem::zeroed() },
+ p_next: ::std::ptr::null(),
+ }
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOffset2D.html>"]
+pub struct Offset2D {
+ pub x: i32,
+ pub y: i32,
+}
+impl Offset2D {
+ pub fn builder<'a>() -> Offset2DBuilder<'a> {
+ Offset2DBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Offset2DBuilder<'a> {
+ inner: Offset2D,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for Offset2DBuilder<'a> {
+ type Target = Offset2D;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Offset2DBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Offset2DBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: i32) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: i32) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Offset2D {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOffset3D.html>"]
+pub struct Offset3D {
+ pub x: i32,
+ pub y: i32,
+ pub z: i32,
+}
+impl Offset3D {
+ pub fn builder<'a>() -> Offset3DBuilder<'a> {
+ Offset3DBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Offset3DBuilder<'a> {
+ inner: Offset3D,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for Offset3DBuilder<'a> {
+ type Target = Offset3D;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Offset3DBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Offset3DBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: i32) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: i32) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[inline]
+ pub fn z(mut self, z: i32) -> Self {
+ self.inner.z = z;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Offset3D {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExtent2D.html>"]
+pub struct Extent2D {
+ pub width: u32,
+ pub height: u32,
+}
+impl Extent2D {
+ pub fn builder<'a>() -> Extent2DBuilder<'a> {
+ Extent2DBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Extent2DBuilder<'a> {
+ inner: Extent2D,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for Extent2DBuilder<'a> {
+ type Target = Extent2D;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Extent2DBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Extent2DBuilder<'a> {
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Extent2D {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExtent3D.html>"]
+pub struct Extent3D {
+ pub width: u32,
+ pub height: u32,
+ pub depth: u32,
+}
+impl Extent3D {
+ pub fn builder<'a>() -> Extent3DBuilder<'a> {
+ Extent3DBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Extent3DBuilder<'a> {
+ inner: Extent3D,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for Extent3DBuilder<'a> {
+ type Target = Extent3D;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Extent3DBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Extent3DBuilder<'a> {
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn depth(mut self, depth: u32) -> Self {
+ self.inner.depth = depth;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Extent3D {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkViewport.html>"]
+pub struct Viewport {
+ pub x: f32,
+ pub y: f32,
+ pub width: f32,
+ pub height: f32,
+ pub min_depth: f32,
+ pub max_depth: f32,
+}
+impl Viewport {
+ pub fn builder<'a>() -> ViewportBuilder<'a> {
+ ViewportBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ViewportBuilder<'a> {
+ inner: Viewport,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ViewportBuilder<'a> {
+ type Target = Viewport;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ViewportBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ViewportBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: f32) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: f32) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[inline]
+ pub fn width(mut self, width: f32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: f32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn min_depth(mut self, min_depth: f32) -> Self {
+ self.inner.min_depth = min_depth;
+ self
+ }
+ #[inline]
+ pub fn max_depth(mut self, max_depth: f32) -> Self {
+ self.inner.max_depth = max_depth;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Viewport {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRect2D.html>"]
+pub struct Rect2D {
+ pub offset: Offset2D,
+ pub extent: Extent2D,
+}
+impl Rect2D {
+ pub fn builder<'a>() -> Rect2DBuilder<'a> {
+ Rect2DBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Rect2DBuilder<'a> {
+ inner: Rect2D,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for Rect2DBuilder<'a> {
+ type Target = Rect2D;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Rect2DBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Rect2DBuilder<'a> {
+ #[inline]
+ pub fn offset(mut self, offset: Offset2D) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent2D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Rect2D {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkClearRect.html>"]
+pub struct ClearRect {
+ pub rect: Rect2D,
+ pub base_array_layer: u32,
+ pub layer_count: u32,
+}
+impl ClearRect {
+ pub fn builder<'a>() -> ClearRectBuilder<'a> {
+ ClearRectBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ClearRectBuilder<'a> {
+ inner: ClearRect,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ClearRectBuilder<'a> {
+ type Target = ClearRect;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ClearRectBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ClearRectBuilder<'a> {
+ #[inline]
+ pub fn rect(mut self, rect: Rect2D) -> Self {
+ self.inner.rect = rect;
+ self
+ }
+ #[inline]
+ pub fn base_array_layer(mut self, base_array_layer: u32) -> Self {
+ self.inner.base_array_layer = base_array_layer;
+ self
+ }
+ #[inline]
+ pub fn layer_count(mut self, layer_count: u32) -> Self {
+ self.inner.layer_count = layer_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ClearRect {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkComponentMapping.html>"]
+pub struct ComponentMapping {
+ pub r: ComponentSwizzle,
+ pub g: ComponentSwizzle,
+ pub b: ComponentSwizzle,
+ pub a: ComponentSwizzle,
+}
+impl ComponentMapping {
+ pub fn builder<'a>() -> ComponentMappingBuilder<'a> {
+ ComponentMappingBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ComponentMappingBuilder<'a> {
+ inner: ComponentMapping,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ComponentMappingBuilder<'a> {
+ type Target = ComponentMapping;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ComponentMappingBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ComponentMappingBuilder<'a> {
+ #[inline]
+ pub fn r(mut self, r: ComponentSwizzle) -> Self {
+ self.inner.r = r;
+ self
+ }
+ #[inline]
+ pub fn g(mut self, g: ComponentSwizzle) -> Self {
+ self.inner.g = g;
+ self
+ }
+ #[inline]
+ pub fn b(mut self, b: ComponentSwizzle) -> Self {
+ self.inner.b = b;
+ self
+ }
+ #[inline]
+ pub fn a(mut self, a: ComponentSwizzle) -> Self {
+ self.inner.a = a;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ComponentMapping {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceProperties.html>"]
+pub struct PhysicalDeviceProperties {
+ pub api_version: u32,
+ pub driver_version: u32,
+ pub vendor_id: u32,
+ pub device_id: u32,
+ pub device_type: PhysicalDeviceType,
+ pub device_name: [c_char; MAX_PHYSICAL_DEVICE_NAME_SIZE],
+ pub pipeline_cache_uuid: [u8; UUID_SIZE],
+ pub limits: PhysicalDeviceLimits,
+ pub sparse_properties: PhysicalDeviceSparseProperties,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PhysicalDeviceProperties {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PhysicalDeviceProperties")
+ .field("api_version", &self.api_version)
+ .field("driver_version", &self.driver_version)
+ .field("vendor_id", &self.vendor_id)
+ .field("device_id", &self.device_id)
+ .field("device_type", &self.device_type)
+ .field("device_name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.device_name.as_ptr())
+ })
+ .field("pipeline_cache_uuid", &self.pipeline_cache_uuid)
+ .field("limits", &self.limits)
+ .field("sparse_properties", &self.sparse_properties)
+ .finish()
+ }
+}
+impl ::std::default::Default for PhysicalDeviceProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ api_version: u32::default(),
+ driver_version: u32::default(),
+ vendor_id: u32::default(),
+ device_id: u32::default(),
+ device_type: PhysicalDeviceType::default(),
+ device_name: unsafe { ::std::mem::zeroed() },
+ pipeline_cache_uuid: unsafe { ::std::mem::zeroed() },
+ limits: PhysicalDeviceLimits::default(),
+ sparse_properties: PhysicalDeviceSparseProperties::default(),
+ }
+ }
+}
+impl PhysicalDeviceProperties {
+ pub fn builder<'a>() -> PhysicalDevicePropertiesBuilder<'a> {
+ PhysicalDevicePropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePropertiesBuilder<'a> {
+ inner: PhysicalDeviceProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDevicePropertiesBuilder<'a> {
+ type Target = PhysicalDeviceProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePropertiesBuilder<'a> {
+ #[inline]
+ pub fn api_version(mut self, api_version: u32) -> Self {
+ self.inner.api_version = api_version;
+ self
+ }
+ #[inline]
+ pub fn driver_version(mut self, driver_version: u32) -> Self {
+ self.inner.driver_version = driver_version;
+ self
+ }
+ #[inline]
+ pub fn vendor_id(mut self, vendor_id: u32) -> Self {
+ self.inner.vendor_id = vendor_id;
+ self
+ }
+ #[inline]
+ pub fn device_id(mut self, device_id: u32) -> Self {
+ self.inner.device_id = device_id;
+ self
+ }
+ #[inline]
+ pub fn device_type(mut self, device_type: PhysicalDeviceType) -> Self {
+ self.inner.device_type = device_type;
+ self
+ }
+ #[inline]
+ pub fn device_name(mut self, device_name: [c_char; MAX_PHYSICAL_DEVICE_NAME_SIZE]) -> Self {
+ self.inner.device_name = device_name;
+ self
+ }
+ #[inline]
+ pub fn pipeline_cache_uuid(mut self, pipeline_cache_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.pipeline_cache_uuid = pipeline_cache_uuid;
+ self
+ }
+ #[inline]
+ pub fn limits(mut self, limits: PhysicalDeviceLimits) -> Self {
+ self.inner.limits = limits;
+ self
+ }
+ #[inline]
+ pub fn sparse_properties(mut self, sparse_properties: PhysicalDeviceSparseProperties) -> Self {
+ self.inner.sparse_properties = sparse_properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExtensionProperties.html>"]
+pub struct ExtensionProperties {
+ pub extension_name: [c_char; MAX_EXTENSION_NAME_SIZE],
+ pub spec_version: u32,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for ExtensionProperties {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("ExtensionProperties")
+ .field("extension_name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.extension_name.as_ptr())
+ })
+ .field("spec_version", &self.spec_version)
+ .finish()
+ }
+}
+impl ::std::default::Default for ExtensionProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ extension_name: unsafe { ::std::mem::zeroed() },
+ spec_version: u32::default(),
+ }
+ }
+}
+impl ExtensionProperties {
+ pub fn builder<'a>() -> ExtensionPropertiesBuilder<'a> {
+ ExtensionPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExtensionPropertiesBuilder<'a> {
+ inner: ExtensionProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ExtensionPropertiesBuilder<'a> {
+ type Target = ExtensionProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExtensionPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExtensionPropertiesBuilder<'a> {
+ #[inline]
+ pub fn extension_name(mut self, extension_name: [c_char; MAX_EXTENSION_NAME_SIZE]) -> Self {
+ self.inner.extension_name = extension_name;
+ self
+ }
+ #[inline]
+ pub fn spec_version(mut self, spec_version: u32) -> Self {
+ self.inner.spec_version = spec_version;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExtensionProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkLayerProperties.html>"]
+pub struct LayerProperties {
+ pub layer_name: [c_char; MAX_EXTENSION_NAME_SIZE],
+ pub spec_version: u32,
+ pub implementation_version: u32,
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for LayerProperties {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("LayerProperties")
+ .field("layer_name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.layer_name.as_ptr())
+ })
+ .field("spec_version", &self.spec_version)
+ .field("implementation_version", &self.implementation_version)
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .finish()
+ }
+}
+impl ::std::default::Default for LayerProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ layer_name: unsafe { ::std::mem::zeroed() },
+ spec_version: u32::default(),
+ implementation_version: u32::default(),
+ description: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+impl LayerProperties {
+ pub fn builder<'a>() -> LayerPropertiesBuilder<'a> {
+ LayerPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct LayerPropertiesBuilder<'a> {
+ inner: LayerProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for LayerPropertiesBuilder<'a> {
+ type Target = LayerProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for LayerPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> LayerPropertiesBuilder<'a> {
+ #[inline]
+ pub fn layer_name(mut self, layer_name: [c_char; MAX_EXTENSION_NAME_SIZE]) -> Self {
+ self.inner.layer_name = layer_name;
+ self
+ }
+ #[inline]
+ pub fn spec_version(mut self, spec_version: u32) -> Self {
+ self.inner.spec_version = spec_version;
+ self
+ }
+ #[inline]
+ pub fn implementation_version(mut self, implementation_version: u32) -> Self {
+ self.inner.implementation_version = implementation_version;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> LayerProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkApplicationInfo.html>"]
+pub struct ApplicationInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_application_name: *const c_char,
+ pub application_version: u32,
+ pub p_engine_name: *const c_char,
+ pub engine_version: u32,
+ pub api_version: u32,
+}
+impl ::std::default::Default for ApplicationInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_application_name: ::std::ptr::null(),
+ application_version: u32::default(),
+ p_engine_name: ::std::ptr::null(),
+ engine_version: u32::default(),
+ api_version: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ApplicationInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::APPLICATION_INFO;
+}
+impl ApplicationInfo {
+ pub fn builder<'a>() -> ApplicationInfoBuilder<'a> {
+ ApplicationInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ApplicationInfoBuilder<'a> {
+ inner: ApplicationInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ApplicationInfoBuilder<'a> {
+ type Target = ApplicationInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ApplicationInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ApplicationInfoBuilder<'a> {
+ #[inline]
+ pub fn application_name(mut self, application_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_application_name = application_name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn application_version(mut self, application_version: u32) -> Self {
+ self.inner.application_version = application_version;
+ self
+ }
+ #[inline]
+ pub fn engine_name(mut self, engine_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_engine_name = engine_name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn engine_version(mut self, engine_version: u32) -> Self {
+ self.inner.engine_version = engine_version;
+ self
+ }
+ #[inline]
+ pub fn api_version(mut self, api_version: u32) -> Self {
+ self.inner.api_version = api_version;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ApplicationInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAllocationCallbacks.html>"]
+pub struct AllocationCallbacks {
+ pub p_user_data: *mut c_void,
+ pub pfn_allocation: PFN_vkAllocationFunction,
+ pub pfn_reallocation: PFN_vkReallocationFunction,
+ pub pfn_free: PFN_vkFreeFunction,
+ pub pfn_internal_allocation: PFN_vkInternalAllocationNotification,
+ pub pfn_internal_free: PFN_vkInternalFreeNotification,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AllocationCallbacks {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AllocationCallbacks")
+ .field("p_user_data", &self.p_user_data)
+ .field(
+ "pfn_allocation",
+ &(self.pfn_allocation.map(|x| x as *const ())),
+ )
+ .field(
+ "pfn_reallocation",
+ &(self.pfn_reallocation.map(|x| x as *const ())),
+ )
+ .field("pfn_free", &(self.pfn_free.map(|x| x as *const ())))
+ .field(
+ "pfn_internal_allocation",
+ &(self.pfn_internal_allocation.map(|x| x as *const ())),
+ )
+ .field(
+ "pfn_internal_free",
+ &(self.pfn_internal_free.map(|x| x as *const ())),
+ )
+ .finish()
+ }
+}
+impl ::std::default::Default for AllocationCallbacks {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ p_user_data: ::std::ptr::null_mut(),
+ pfn_allocation: PFN_vkAllocationFunction::default(),
+ pfn_reallocation: PFN_vkReallocationFunction::default(),
+ pfn_free: PFN_vkFreeFunction::default(),
+ pfn_internal_allocation: PFN_vkInternalAllocationNotification::default(),
+ pfn_internal_free: PFN_vkInternalFreeNotification::default(),
+ }
+ }
+}
+impl AllocationCallbacks {
+ pub fn builder<'a>() -> AllocationCallbacksBuilder<'a> {
+ AllocationCallbacksBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AllocationCallbacksBuilder<'a> {
+ inner: AllocationCallbacks,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AllocationCallbacksBuilder<'a> {
+ type Target = AllocationCallbacks;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AllocationCallbacksBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AllocationCallbacksBuilder<'a> {
+ #[inline]
+ pub fn user_data(mut self, user_data: *mut c_void) -> Self {
+ self.inner.p_user_data = user_data;
+ self
+ }
+ #[inline]
+ pub fn pfn_allocation(mut self, pfn_allocation: PFN_vkAllocationFunction) -> Self {
+ self.inner.pfn_allocation = pfn_allocation;
+ self
+ }
+ #[inline]
+ pub fn pfn_reallocation(mut self, pfn_reallocation: PFN_vkReallocationFunction) -> Self {
+ self.inner.pfn_reallocation = pfn_reallocation;
+ self
+ }
+ #[inline]
+ pub fn pfn_free(mut self, pfn_free: PFN_vkFreeFunction) -> Self {
+ self.inner.pfn_free = pfn_free;
+ self
+ }
+ #[inline]
+ pub fn pfn_internal_allocation(
+ mut self,
+ pfn_internal_allocation: PFN_vkInternalAllocationNotification,
+ ) -> Self {
+ self.inner.pfn_internal_allocation = pfn_internal_allocation;
+ self
+ }
+ #[inline]
+ pub fn pfn_internal_free(mut self, pfn_internal_free: PFN_vkInternalFreeNotification) -> Self {
+ self.inner.pfn_internal_free = pfn_internal_free;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AllocationCallbacks {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceQueueCreateInfo.html>"]
+pub struct DeviceQueueCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DeviceQueueCreateFlags,
+ pub queue_family_index: u32,
+ pub queue_count: u32,
+ pub p_queue_priorities: *const f32,
+}
+impl ::std::default::Default for DeviceQueueCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DeviceQueueCreateFlags::default(),
+ queue_family_index: u32::default(),
+ queue_count: u32::default(),
+ p_queue_priorities: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceQueueCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_QUEUE_CREATE_INFO;
+}
+impl DeviceQueueCreateInfo {
+ pub fn builder<'a>() -> DeviceQueueCreateInfoBuilder<'a> {
+ DeviceQueueCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceQueueCreateInfoBuilder<'a> {
+ inner: DeviceQueueCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDeviceQueueCreateInfo {}
+impl<'a> ::std::ops::Deref for DeviceQueueCreateInfoBuilder<'a> {
+ type Target = DeviceQueueCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceQueueCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceQueueCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceQueueCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn queue_family_index(mut self, queue_family_index: u32) -> Self {
+ self.inner.queue_family_index = queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn queue_priorities(mut self, queue_priorities: &'a [f32]) -> Self {
+ self.inner.queue_count = queue_priorities.len() as _;
+ self.inner.p_queue_priorities = queue_priorities.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDeviceQueueCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceQueueCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceCreateInfo.html>"]
+pub struct DeviceCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DeviceCreateFlags,
+ pub queue_create_info_count: u32,
+ pub p_queue_create_infos: *const DeviceQueueCreateInfo,
+ pub enabled_layer_count: u32,
+ pub pp_enabled_layer_names: *const *const c_char,
+ pub enabled_extension_count: u32,
+ pub pp_enabled_extension_names: *const *const c_char,
+ pub p_enabled_features: *const PhysicalDeviceFeatures,
+}
+impl ::std::default::Default for DeviceCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DeviceCreateFlags::default(),
+ queue_create_info_count: u32::default(),
+ p_queue_create_infos: ::std::ptr::null(),
+ enabled_layer_count: u32::default(),
+ pp_enabled_layer_names: ::std::ptr::null(),
+ enabled_extension_count: u32::default(),
+ pp_enabled_extension_names: ::std::ptr::null(),
+ p_enabled_features: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_CREATE_INFO;
+}
+impl DeviceCreateInfo {
+ pub fn builder<'a>() -> DeviceCreateInfoBuilder<'a> {
+ DeviceCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceCreateInfoBuilder<'a> {
+ inner: DeviceCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDeviceCreateInfo {}
+impl<'a> ::std::ops::Deref for DeviceCreateInfoBuilder<'a> {
+ type Target = DeviceCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn queue_create_infos(mut self, queue_create_infos: &'a [DeviceQueueCreateInfo]) -> Self {
+ self.inner.queue_create_info_count = queue_create_infos.len() as _;
+ self.inner.p_queue_create_infos = queue_create_infos.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn enabled_layer_names(mut self, enabled_layer_names: &'a [*const c_char]) -> Self {
+ self.inner.enabled_layer_count = enabled_layer_names.len() as _;
+ self.inner.pp_enabled_layer_names = enabled_layer_names.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn enabled_extension_names(mut self, enabled_extension_names: &'a [*const c_char]) -> Self {
+ self.inner.enabled_extension_count = enabled_extension_names.len() as _;
+ self.inner.pp_enabled_extension_names = enabled_extension_names.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn enabled_features(mut self, enabled_features: &'a PhysicalDeviceFeatures) -> Self {
+ self.inner.p_enabled_features = enabled_features;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDeviceCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInstanceCreateInfo.html>"]
+pub struct InstanceCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: InstanceCreateFlags,
+ pub p_application_info: *const ApplicationInfo,
+ pub enabled_layer_count: u32,
+ pub pp_enabled_layer_names: *const *const c_char,
+ pub enabled_extension_count: u32,
+ pub pp_enabled_extension_names: *const *const c_char,
+}
+impl ::std::default::Default for InstanceCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: InstanceCreateFlags::default(),
+ p_application_info: ::std::ptr::null(),
+ enabled_layer_count: u32::default(),
+ pp_enabled_layer_names: ::std::ptr::null(),
+ enabled_extension_count: u32::default(),
+ pp_enabled_extension_names: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for InstanceCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::INSTANCE_CREATE_INFO;
+}
+impl InstanceCreateInfo {
+ pub fn builder<'a>() -> InstanceCreateInfoBuilder<'a> {
+ InstanceCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct InstanceCreateInfoBuilder<'a> {
+ inner: InstanceCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsInstanceCreateInfo {}
+impl<'a> ::std::ops::Deref for InstanceCreateInfoBuilder<'a> {
+ type Target = InstanceCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for InstanceCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> InstanceCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: InstanceCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn application_info(mut self, application_info: &'a ApplicationInfo) -> Self {
+ self.inner.p_application_info = application_info;
+ self
+ }
+ #[inline]
+ pub fn enabled_layer_names(mut self, enabled_layer_names: &'a [*const c_char]) -> Self {
+ self.inner.enabled_layer_count = enabled_layer_names.len() as _;
+ self.inner.pp_enabled_layer_names = enabled_layer_names.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn enabled_extension_names(mut self, enabled_extension_names: &'a [*const c_char]) -> Self {
+ self.inner.enabled_extension_count = enabled_extension_names.len() as _;
+ self.inner.pp_enabled_extension_names = enabled_extension_names.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsInstanceCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> InstanceCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyProperties.html>"]
+pub struct QueueFamilyProperties {
+ pub queue_flags: QueueFlags,
+ pub queue_count: u32,
+ pub timestamp_valid_bits: u32,
+ pub min_image_transfer_granularity: Extent3D,
+}
+impl QueueFamilyProperties {
+ pub fn builder<'a>() -> QueueFamilyPropertiesBuilder<'a> {
+ QueueFamilyPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyPropertiesBuilder<'a> {
+ inner: QueueFamilyProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for QueueFamilyPropertiesBuilder<'a> {
+ type Target = QueueFamilyProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyPropertiesBuilder<'a> {
+ #[inline]
+ pub fn queue_flags(mut self, queue_flags: QueueFlags) -> Self {
+ self.inner.queue_flags = queue_flags;
+ self
+ }
+ #[inline]
+ pub fn queue_count(mut self, queue_count: u32) -> Self {
+ self.inner.queue_count = queue_count;
+ self
+ }
+ #[inline]
+ pub fn timestamp_valid_bits(mut self, timestamp_valid_bits: u32) -> Self {
+ self.inner.timestamp_valid_bits = timestamp_valid_bits;
+ self
+ }
+ #[inline]
+ pub fn min_image_transfer_granularity(
+ mut self,
+ min_image_transfer_granularity: Extent3D,
+ ) -> Self {
+ self.inner.min_image_transfer_granularity = min_image_transfer_granularity;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryProperties.html>"]
+pub struct PhysicalDeviceMemoryProperties {
+ pub memory_type_count: u32,
+ pub memory_types: [MemoryType; MAX_MEMORY_TYPES],
+ pub memory_heap_count: u32,
+ pub memory_heaps: [MemoryHeap; MAX_MEMORY_HEAPS],
+}
+impl ::std::default::Default for PhysicalDeviceMemoryProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ memory_type_count: u32::default(),
+ memory_types: unsafe { ::std::mem::zeroed() },
+ memory_heap_count: u32::default(),
+ memory_heaps: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+impl PhysicalDeviceMemoryProperties {
+ pub fn builder<'a>() -> PhysicalDeviceMemoryPropertiesBuilder<'a> {
+ PhysicalDeviceMemoryPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMemoryPropertiesBuilder<'a> {
+ inner: PhysicalDeviceMemoryProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceMemoryProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMemoryPropertiesBuilder<'a> {
+ #[inline]
+ pub fn memory_type_count(mut self, memory_type_count: u32) -> Self {
+ self.inner.memory_type_count = memory_type_count;
+ self
+ }
+ #[inline]
+ pub fn memory_types(mut self, memory_types: [MemoryType; MAX_MEMORY_TYPES]) -> Self {
+ self.inner.memory_types = memory_types;
+ self
+ }
+ #[inline]
+ pub fn memory_heap_count(mut self, memory_heap_count: u32) -> Self {
+ self.inner.memory_heap_count = memory_heap_count;
+ self
+ }
+ #[inline]
+ pub fn memory_heaps(mut self, memory_heaps: [MemoryHeap; MAX_MEMORY_HEAPS]) -> Self {
+ self.inner.memory_heaps = memory_heaps;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMemoryProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryAllocateInfo.html>"]
+pub struct MemoryAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub allocation_size: DeviceSize,
+ pub memory_type_index: u32,
+}
+impl ::std::default::Default for MemoryAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ allocation_size: DeviceSize::default(),
+ memory_type_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryAllocateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_ALLOCATE_INFO;
+}
+impl MemoryAllocateInfo {
+ pub fn builder<'a>() -> MemoryAllocateInfoBuilder<'a> {
+ MemoryAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryAllocateInfoBuilder<'a> {
+ inner: MemoryAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsMemoryAllocateInfo {}
+impl<'a> ::std::ops::Deref for MemoryAllocateInfoBuilder<'a> {
+ type Target = MemoryAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn allocation_size(mut self, allocation_size: DeviceSize) -> Self {
+ self.inner.allocation_size = allocation_size;
+ self
+ }
+ #[inline]
+ pub fn memory_type_index(mut self, memory_type_index: u32) -> Self {
+ self.inner.memory_type_index = memory_type_index;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsMemoryAllocateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryRequirements.html>"]
+pub struct MemoryRequirements {
+ pub size: DeviceSize,
+ pub alignment: DeviceSize,
+ pub memory_type_bits: u32,
+}
+impl MemoryRequirements {
+ pub fn builder<'a>() -> MemoryRequirementsBuilder<'a> {
+ MemoryRequirementsBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryRequirementsBuilder<'a> {
+ inner: MemoryRequirements,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryRequirementsBuilder<'a> {
+ type Target = MemoryRequirements;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryRequirementsBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryRequirementsBuilder<'a> {
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn alignment(mut self, alignment: DeviceSize) -> Self {
+ self.inner.alignment = alignment;
+ self
+ }
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryRequirements {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageFormatProperties.html>"]
+pub struct SparseImageFormatProperties {
+ pub aspect_mask: ImageAspectFlags,
+ pub image_granularity: Extent3D,
+ pub flags: SparseImageFormatFlags,
+}
+impl SparseImageFormatProperties {
+ pub fn builder<'a>() -> SparseImageFormatPropertiesBuilder<'a> {
+ SparseImageFormatPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageFormatPropertiesBuilder<'a> {
+ inner: SparseImageFormatProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageFormatPropertiesBuilder<'a> {
+ type Target = SparseImageFormatProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageFormatPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageFormatPropertiesBuilder<'a> {
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[inline]
+ pub fn image_granularity(mut self, image_granularity: Extent3D) -> Self {
+ self.inner.image_granularity = image_granularity;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: SparseImageFormatFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageFormatProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageMemoryRequirements.html>"]
+pub struct SparseImageMemoryRequirements {
+ pub format_properties: SparseImageFormatProperties,
+ pub image_mip_tail_first_lod: u32,
+ pub image_mip_tail_size: DeviceSize,
+ pub image_mip_tail_offset: DeviceSize,
+ pub image_mip_tail_stride: DeviceSize,
+}
+impl SparseImageMemoryRequirements {
+ pub fn builder<'a>() -> SparseImageMemoryRequirementsBuilder<'a> {
+ SparseImageMemoryRequirementsBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageMemoryRequirementsBuilder<'a> {
+ inner: SparseImageMemoryRequirements,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageMemoryRequirementsBuilder<'a> {
+ type Target = SparseImageMemoryRequirements;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageMemoryRequirementsBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageMemoryRequirementsBuilder<'a> {
+ #[inline]
+ pub fn format_properties(mut self, format_properties: SparseImageFormatProperties) -> Self {
+ self.inner.format_properties = format_properties;
+ self
+ }
+ #[inline]
+ pub fn image_mip_tail_first_lod(mut self, image_mip_tail_first_lod: u32) -> Self {
+ self.inner.image_mip_tail_first_lod = image_mip_tail_first_lod;
+ self
+ }
+ #[inline]
+ pub fn image_mip_tail_size(mut self, image_mip_tail_size: DeviceSize) -> Self {
+ self.inner.image_mip_tail_size = image_mip_tail_size;
+ self
+ }
+ #[inline]
+ pub fn image_mip_tail_offset(mut self, image_mip_tail_offset: DeviceSize) -> Self {
+ self.inner.image_mip_tail_offset = image_mip_tail_offset;
+ self
+ }
+ #[inline]
+ pub fn image_mip_tail_stride(mut self, image_mip_tail_stride: DeviceSize) -> Self {
+ self.inner.image_mip_tail_stride = image_mip_tail_stride;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageMemoryRequirements {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryType.html>"]
+pub struct MemoryType {
+ pub property_flags: MemoryPropertyFlags,
+ pub heap_index: u32,
+}
+impl MemoryType {
+ pub fn builder<'a>() -> MemoryTypeBuilder<'a> {
+ MemoryTypeBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryTypeBuilder<'a> {
+ inner: MemoryType,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryTypeBuilder<'a> {
+ type Target = MemoryType;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryTypeBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryTypeBuilder<'a> {
+ #[inline]
+ pub fn property_flags(mut self, property_flags: MemoryPropertyFlags) -> Self {
+ self.inner.property_flags = property_flags;
+ self
+ }
+ #[inline]
+ pub fn heap_index(mut self, heap_index: u32) -> Self {
+ self.inner.heap_index = heap_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryType {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryHeap.html>"]
+pub struct MemoryHeap {
+ pub size: DeviceSize,
+ pub flags: MemoryHeapFlags,
+}
+impl MemoryHeap {
+ pub fn builder<'a>() -> MemoryHeapBuilder<'a> {
+ MemoryHeapBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryHeapBuilder<'a> {
+ inner: MemoryHeap,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryHeapBuilder<'a> {
+ type Target = MemoryHeap;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryHeapBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryHeapBuilder<'a> {
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: MemoryHeapFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryHeap {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMappedMemoryRange.html>"]
+pub struct MappedMemoryRange {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+ pub offset: DeviceSize,
+ pub size: DeviceSize,
+}
+impl ::std::default::Default for MappedMemoryRange {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ offset: DeviceSize::default(),
+ size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MappedMemoryRange {
+ const STRUCTURE_TYPE: StructureType = StructureType::MAPPED_MEMORY_RANGE;
+}
+impl MappedMemoryRange {
+ pub fn builder<'a>() -> MappedMemoryRangeBuilder<'a> {
+ MappedMemoryRangeBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MappedMemoryRangeBuilder<'a> {
+ inner: MappedMemoryRange,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MappedMemoryRangeBuilder<'a> {
+ type Target = MappedMemoryRange;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MappedMemoryRangeBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MappedMemoryRangeBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MappedMemoryRange {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFormatProperties.html>"]
+pub struct FormatProperties {
+ pub linear_tiling_features: FormatFeatureFlags,
+ pub optimal_tiling_features: FormatFeatureFlags,
+ pub buffer_features: FormatFeatureFlags,
+}
+impl FormatProperties {
+ pub fn builder<'a>() -> FormatPropertiesBuilder<'a> {
+ FormatPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FormatPropertiesBuilder<'a> {
+ inner: FormatProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for FormatPropertiesBuilder<'a> {
+ type Target = FormatProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FormatPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FormatPropertiesBuilder<'a> {
+ #[inline]
+ pub fn linear_tiling_features(mut self, linear_tiling_features: FormatFeatureFlags) -> Self {
+ self.inner.linear_tiling_features = linear_tiling_features;
+ self
+ }
+ #[inline]
+ pub fn optimal_tiling_features(mut self, optimal_tiling_features: FormatFeatureFlags) -> Self {
+ self.inner.optimal_tiling_features = optimal_tiling_features;
+ self
+ }
+ #[inline]
+ pub fn buffer_features(mut self, buffer_features: FormatFeatureFlags) -> Self {
+ self.inner.buffer_features = buffer_features;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FormatProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageFormatProperties.html>"]
+pub struct ImageFormatProperties {
+ pub max_extent: Extent3D,
+ pub max_mip_levels: u32,
+ pub max_array_layers: u32,
+ pub sample_counts: SampleCountFlags,
+ pub max_resource_size: DeviceSize,
+}
+impl ImageFormatProperties {
+ pub fn builder<'a>() -> ImageFormatPropertiesBuilder<'a> {
+ ImageFormatPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageFormatPropertiesBuilder<'a> {
+ inner: ImageFormatProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageFormatPropertiesBuilder<'a> {
+ type Target = ImageFormatProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageFormatPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageFormatPropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_extent(mut self, max_extent: Extent3D) -> Self {
+ self.inner.max_extent = max_extent;
+ self
+ }
+ #[inline]
+ pub fn max_mip_levels(mut self, max_mip_levels: u32) -> Self {
+ self.inner.max_mip_levels = max_mip_levels;
+ self
+ }
+ #[inline]
+ pub fn max_array_layers(mut self, max_array_layers: u32) -> Self {
+ self.inner.max_array_layers = max_array_layers;
+ self
+ }
+ #[inline]
+ pub fn sample_counts(mut self, sample_counts: SampleCountFlags) -> Self {
+ self.inner.sample_counts = sample_counts;
+ self
+ }
+ #[inline]
+ pub fn max_resource_size(mut self, max_resource_size: DeviceSize) -> Self {
+ self.inner.max_resource_size = max_resource_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageFormatProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorBufferInfo.html>"]
+pub struct DescriptorBufferInfo {
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+ pub range: DeviceSize,
+}
+impl DescriptorBufferInfo {
+ pub fn builder<'a>() -> DescriptorBufferInfoBuilder<'a> {
+ DescriptorBufferInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorBufferInfoBuilder<'a> {
+ inner: DescriptorBufferInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorBufferInfoBuilder<'a> {
+ type Target = DescriptorBufferInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorBufferInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorBufferInfoBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn range(mut self, range: DeviceSize) -> Self {
+ self.inner.range = range;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorBufferInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorImageInfo.html>"]
+pub struct DescriptorImageInfo {
+ pub sampler: Sampler,
+ pub image_view: ImageView,
+ pub image_layout: ImageLayout,
+}
+impl DescriptorImageInfo {
+ pub fn builder<'a>() -> DescriptorImageInfoBuilder<'a> {
+ DescriptorImageInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorImageInfoBuilder<'a> {
+ inner: DescriptorImageInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorImageInfoBuilder<'a> {
+ type Target = DescriptorImageInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorImageInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorImageInfoBuilder<'a> {
+ #[inline]
+ pub fn sampler(mut self, sampler: Sampler) -> Self {
+ self.inner.sampler = sampler;
+ self
+ }
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[inline]
+ pub fn image_layout(mut self, image_layout: ImageLayout) -> Self {
+ self.inner.image_layout = image_layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorImageInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWriteDescriptorSet.html>"]
+pub struct WriteDescriptorSet {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub dst_set: DescriptorSet,
+ pub dst_binding: u32,
+ pub dst_array_element: u32,
+ pub descriptor_count: u32,
+ pub descriptor_type: DescriptorType,
+ pub p_image_info: *const DescriptorImageInfo,
+ pub p_buffer_info: *const DescriptorBufferInfo,
+ pub p_texel_buffer_view: *const BufferView,
+}
+impl ::std::default::Default for WriteDescriptorSet {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ dst_set: DescriptorSet::default(),
+ dst_binding: u32::default(),
+ dst_array_element: u32::default(),
+ descriptor_count: u32::default(),
+ descriptor_type: DescriptorType::default(),
+ p_image_info: ::std::ptr::null(),
+ p_buffer_info: ::std::ptr::null(),
+ p_texel_buffer_view: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for WriteDescriptorSet {
+ const STRUCTURE_TYPE: StructureType = StructureType::WRITE_DESCRIPTOR_SET;
+}
+impl WriteDescriptorSet {
+ pub fn builder<'a>() -> WriteDescriptorSetBuilder<'a> {
+ WriteDescriptorSetBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct WriteDescriptorSetBuilder<'a> {
+ inner: WriteDescriptorSet,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsWriteDescriptorSet {}
+impl<'a> ::std::ops::Deref for WriteDescriptorSetBuilder<'a> {
+ type Target = WriteDescriptorSet;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for WriteDescriptorSetBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> WriteDescriptorSetBuilder<'a> {
+ #[inline]
+ pub fn dst_set(mut self, dst_set: DescriptorSet) -> Self {
+ self.inner.dst_set = dst_set;
+ self
+ }
+ #[inline]
+ pub fn dst_binding(mut self, dst_binding: u32) -> Self {
+ self.inner.dst_binding = dst_binding;
+ self
+ }
+ #[inline]
+ pub fn dst_array_element(mut self, dst_array_element: u32) -> Self {
+ self.inner.dst_array_element = dst_array_element;
+ self
+ }
+ #[inline]
+ pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self {
+ self.inner.descriptor_type = descriptor_type;
+ self
+ }
+ #[inline]
+ pub fn image_info(mut self, image_info: &'a [DescriptorImageInfo]) -> Self {
+ self.inner.descriptor_count = image_info.len() as _;
+ self.inner.p_image_info = image_info.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn buffer_info(mut self, buffer_info: &'a [DescriptorBufferInfo]) -> Self {
+ self.inner.descriptor_count = buffer_info.len() as _;
+ self.inner.p_buffer_info = buffer_info.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn texel_buffer_view(mut self, texel_buffer_view: &'a [BufferView]) -> Self {
+ self.inner.descriptor_count = texel_buffer_view.len() as _;
+ self.inner.p_texel_buffer_view = texel_buffer_view.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsWriteDescriptorSet>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> WriteDescriptorSet {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyDescriptorSet.html>"]
+pub struct CopyDescriptorSet {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_set: DescriptorSet,
+ pub src_binding: u32,
+ pub src_array_element: u32,
+ pub dst_set: DescriptorSet,
+ pub dst_binding: u32,
+ pub dst_array_element: u32,
+ pub descriptor_count: u32,
+}
+impl ::std::default::Default for CopyDescriptorSet {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_set: DescriptorSet::default(),
+ src_binding: u32::default(),
+ src_array_element: u32::default(),
+ dst_set: DescriptorSet::default(),
+ dst_binding: u32::default(),
+ dst_array_element: u32::default(),
+ descriptor_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyDescriptorSet {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_DESCRIPTOR_SET;
+}
+impl CopyDescriptorSet {
+ pub fn builder<'a>() -> CopyDescriptorSetBuilder<'a> {
+ CopyDescriptorSetBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyDescriptorSetBuilder<'a> {
+ inner: CopyDescriptorSet,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyDescriptorSetBuilder<'a> {
+ type Target = CopyDescriptorSet;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyDescriptorSetBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyDescriptorSetBuilder<'a> {
+ #[inline]
+ pub fn src_set(mut self, src_set: DescriptorSet) -> Self {
+ self.inner.src_set = src_set;
+ self
+ }
+ #[inline]
+ pub fn src_binding(mut self, src_binding: u32) -> Self {
+ self.inner.src_binding = src_binding;
+ self
+ }
+ #[inline]
+ pub fn src_array_element(mut self, src_array_element: u32) -> Self {
+ self.inner.src_array_element = src_array_element;
+ self
+ }
+ #[inline]
+ pub fn dst_set(mut self, dst_set: DescriptorSet) -> Self {
+ self.inner.dst_set = dst_set;
+ self
+ }
+ #[inline]
+ pub fn dst_binding(mut self, dst_binding: u32) -> Self {
+ self.inner.dst_binding = dst_binding;
+ self
+ }
+ #[inline]
+ pub fn dst_array_element(mut self, dst_array_element: u32) -> Self {
+ self.inner.dst_array_element = dst_array_element;
+ self
+ }
+ #[inline]
+ pub fn descriptor_count(mut self, descriptor_count: u32) -> Self {
+ self.inner.descriptor_count = descriptor_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyDescriptorSet {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCreateInfo.html>"]
+pub struct BufferCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: BufferCreateFlags,
+ pub size: DeviceSize,
+ pub usage: BufferUsageFlags,
+ pub sharing_mode: SharingMode,
+ pub queue_family_index_count: u32,
+ pub p_queue_family_indices: *const u32,
+}
+impl ::std::default::Default for BufferCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: BufferCreateFlags::default(),
+ size: DeviceSize::default(),
+ usage: BufferUsageFlags::default(),
+ sharing_mode: SharingMode::default(),
+ queue_family_index_count: u32::default(),
+ p_queue_family_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_CREATE_INFO;
+}
+impl BufferCreateInfo {
+ pub fn builder<'a>() -> BufferCreateInfoBuilder<'a> {
+ BufferCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCreateInfoBuilder<'a> {
+ inner: BufferCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsBufferCreateInfo {}
+impl<'a> ::std::ops::Deref for BufferCreateInfoBuilder<'a> {
+ type Target = BufferCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: BufferCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: BufferUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> Self {
+ self.inner.sharing_mode = sharing_mode;
+ self
+ }
+ #[inline]
+ pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self {
+ self.inner.queue_family_index_count = queue_family_indices.len() as _;
+ self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsBufferCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferViewCreateInfo.html>"]
+pub struct BufferViewCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: BufferViewCreateFlags,
+ pub buffer: Buffer,
+ pub format: Format,
+ pub offset: DeviceSize,
+ pub range: DeviceSize,
+}
+impl ::std::default::Default for BufferViewCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: BufferViewCreateFlags::default(),
+ buffer: Buffer::default(),
+ format: Format::default(),
+ offset: DeviceSize::default(),
+ range: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferViewCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_VIEW_CREATE_INFO;
+}
+impl BufferViewCreateInfo {
+ pub fn builder<'a>() -> BufferViewCreateInfoBuilder<'a> {
+ BufferViewCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferViewCreateInfoBuilder<'a> {
+ inner: BufferViewCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsBufferViewCreateInfo {}
+impl<'a> ::std::ops::Deref for BufferViewCreateInfoBuilder<'a> {
+ type Target = BufferViewCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferViewCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferViewCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: BufferViewCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn range(mut self, range: DeviceSize) -> Self {
+ self.inner.range = range;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsBufferViewCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferViewCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageSubresource.html>"]
+pub struct ImageSubresource {
+ pub aspect_mask: ImageAspectFlags,
+ pub mip_level: u32,
+ pub array_layer: u32,
+}
+impl ImageSubresource {
+ pub fn builder<'a>() -> ImageSubresourceBuilder<'a> {
+ ImageSubresourceBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageSubresourceBuilder<'a> {
+ inner: ImageSubresource,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageSubresourceBuilder<'a> {
+ type Target = ImageSubresource;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageSubresourceBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageSubresourceBuilder<'a> {
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[inline]
+ pub fn mip_level(mut self, mip_level: u32) -> Self {
+ self.inner.mip_level = mip_level;
+ self
+ }
+ #[inline]
+ pub fn array_layer(mut self, array_layer: u32) -> Self {
+ self.inner.array_layer = array_layer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageSubresource {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageSubresourceLayers.html>"]
+pub struct ImageSubresourceLayers {
+ pub aspect_mask: ImageAspectFlags,
+ pub mip_level: u32,
+ pub base_array_layer: u32,
+ pub layer_count: u32,
+}
+impl ImageSubresourceLayers {
+ pub fn builder<'a>() -> ImageSubresourceLayersBuilder<'a> {
+ ImageSubresourceLayersBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageSubresourceLayersBuilder<'a> {
+ inner: ImageSubresourceLayers,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageSubresourceLayersBuilder<'a> {
+ type Target = ImageSubresourceLayers;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageSubresourceLayersBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageSubresourceLayersBuilder<'a> {
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[inline]
+ pub fn mip_level(mut self, mip_level: u32) -> Self {
+ self.inner.mip_level = mip_level;
+ self
+ }
+ #[inline]
+ pub fn base_array_layer(mut self, base_array_layer: u32) -> Self {
+ self.inner.base_array_layer = base_array_layer;
+ self
+ }
+ #[inline]
+ pub fn layer_count(mut self, layer_count: u32) -> Self {
+ self.inner.layer_count = layer_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageSubresourceLayers {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageSubresourceRange.html>"]
+pub struct ImageSubresourceRange {
+ pub aspect_mask: ImageAspectFlags,
+ pub base_mip_level: u32,
+ pub level_count: u32,
+ pub base_array_layer: u32,
+ pub layer_count: u32,
+}
+impl ImageSubresourceRange {
+ pub fn builder<'a>() -> ImageSubresourceRangeBuilder<'a> {
+ ImageSubresourceRangeBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageSubresourceRangeBuilder<'a> {
+ inner: ImageSubresourceRange,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageSubresourceRangeBuilder<'a> {
+ type Target = ImageSubresourceRange;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageSubresourceRangeBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageSubresourceRangeBuilder<'a> {
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[inline]
+ pub fn base_mip_level(mut self, base_mip_level: u32) -> Self {
+ self.inner.base_mip_level = base_mip_level;
+ self
+ }
+ #[inline]
+ pub fn level_count(mut self, level_count: u32) -> Self {
+ self.inner.level_count = level_count;
+ self
+ }
+ #[inline]
+ pub fn base_array_layer(mut self, base_array_layer: u32) -> Self {
+ self.inner.base_array_layer = base_array_layer;
+ self
+ }
+ #[inline]
+ pub fn layer_count(mut self, layer_count: u32) -> Self {
+ self.inner.layer_count = layer_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageSubresourceRange {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryBarrier.html>"]
+pub struct MemoryBarrier {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_access_mask: AccessFlags,
+ pub dst_access_mask: AccessFlags,
+}
+impl ::std::default::Default for MemoryBarrier {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_access_mask: AccessFlags::default(),
+ dst_access_mask: AccessFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryBarrier {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_BARRIER;
+}
+impl MemoryBarrier {
+ pub fn builder<'a>() -> MemoryBarrierBuilder<'a> {
+ MemoryBarrierBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryBarrierBuilder<'a> {
+ inner: MemoryBarrier,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryBarrierBuilder<'a> {
+ type Target = MemoryBarrier;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryBarrierBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryBarrierBuilder<'a> {
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryBarrier {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferMemoryBarrier.html>"]
+pub struct BufferMemoryBarrier {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_access_mask: AccessFlags,
+ pub dst_access_mask: AccessFlags,
+ pub src_queue_family_index: u32,
+ pub dst_queue_family_index: u32,
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+ pub size: DeviceSize,
+}
+impl ::std::default::Default for BufferMemoryBarrier {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_access_mask: AccessFlags::default(),
+ dst_access_mask: AccessFlags::default(),
+ src_queue_family_index: u32::default(),
+ dst_queue_family_index: u32::default(),
+ buffer: Buffer::default(),
+ offset: DeviceSize::default(),
+ size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferMemoryBarrier {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_MEMORY_BARRIER;
+}
+impl BufferMemoryBarrier {
+ pub fn builder<'a>() -> BufferMemoryBarrierBuilder<'a> {
+ BufferMemoryBarrierBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferMemoryBarrierBuilder<'a> {
+ inner: BufferMemoryBarrier,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferMemoryBarrierBuilder<'a> {
+ type Target = BufferMemoryBarrier;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferMemoryBarrierBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferMemoryBarrierBuilder<'a> {
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[inline]
+ pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self {
+ self.inner.src_queue_family_index = src_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self {
+ self.inner.dst_queue_family_index = dst_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferMemoryBarrier {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageMemoryBarrier.html>"]
+pub struct ImageMemoryBarrier {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_access_mask: AccessFlags,
+ pub dst_access_mask: AccessFlags,
+ pub old_layout: ImageLayout,
+ pub new_layout: ImageLayout,
+ pub src_queue_family_index: u32,
+ pub dst_queue_family_index: u32,
+ pub image: Image,
+ pub subresource_range: ImageSubresourceRange,
+}
+impl ::std::default::Default for ImageMemoryBarrier {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_access_mask: AccessFlags::default(),
+ dst_access_mask: AccessFlags::default(),
+ old_layout: ImageLayout::default(),
+ new_layout: ImageLayout::default(),
+ src_queue_family_index: u32::default(),
+ dst_queue_family_index: u32::default(),
+ image: Image::default(),
+ subresource_range: ImageSubresourceRange::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageMemoryBarrier {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_MEMORY_BARRIER;
+}
+impl ImageMemoryBarrier {
+ pub fn builder<'a>() -> ImageMemoryBarrierBuilder<'a> {
+ ImageMemoryBarrierBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageMemoryBarrierBuilder<'a> {
+ inner: ImageMemoryBarrier,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageMemoryBarrier {}
+impl<'a> ::std::ops::Deref for ImageMemoryBarrierBuilder<'a> {
+ type Target = ImageMemoryBarrier;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageMemoryBarrierBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageMemoryBarrierBuilder<'a> {
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[inline]
+ pub fn old_layout(mut self, old_layout: ImageLayout) -> Self {
+ self.inner.old_layout = old_layout;
+ self
+ }
+ #[inline]
+ pub fn new_layout(mut self, new_layout: ImageLayout) -> Self {
+ self.inner.new_layout = new_layout;
+ self
+ }
+ #[inline]
+ pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self {
+ self.inner.src_queue_family_index = src_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self {
+ self.inner.dst_queue_family_index = dst_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self {
+ self.inner.subresource_range = subresource_range;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageMemoryBarrier>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageMemoryBarrier {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCreateInfo.html>"]
+pub struct ImageCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ImageCreateFlags,
+ pub image_type: ImageType,
+ pub format: Format,
+ pub extent: Extent3D,
+ pub mip_levels: u32,
+ pub array_layers: u32,
+ pub samples: SampleCountFlags,
+ pub tiling: ImageTiling,
+ pub usage: ImageUsageFlags,
+ pub sharing_mode: SharingMode,
+ pub queue_family_index_count: u32,
+ pub p_queue_family_indices: *const u32,
+ pub initial_layout: ImageLayout,
+}
+impl ::std::default::Default for ImageCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ImageCreateFlags::default(),
+ image_type: ImageType::default(),
+ format: Format::default(),
+ extent: Extent3D::default(),
+ mip_levels: u32::default(),
+ array_layers: u32::default(),
+ samples: SampleCountFlags::default(),
+ tiling: ImageTiling::default(),
+ usage: ImageUsageFlags::default(),
+ sharing_mode: SharingMode::default(),
+ queue_family_index_count: u32::default(),
+ p_queue_family_indices: ::std::ptr::null(),
+ initial_layout: ImageLayout::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_CREATE_INFO;
+}
+impl ImageCreateInfo {
+ pub fn builder<'a>() -> ImageCreateInfoBuilder<'a> {
+ ImageCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageCreateInfoBuilder<'a> {
+ inner: ImageCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageCreateInfo {}
+impl<'a> ::std::ops::Deref for ImageCreateInfoBuilder<'a> {
+ type Target = ImageCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ImageCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn image_type(mut self, image_type: ImageType) -> Self {
+ self.inner.image_type = image_type;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent3D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[inline]
+ pub fn mip_levels(mut self, mip_levels: u32) -> Self {
+ self.inner.mip_levels = mip_levels;
+ self
+ }
+ #[inline]
+ pub fn array_layers(mut self, array_layers: u32) -> Self {
+ self.inner.array_layers = array_layers;
+ self
+ }
+ #[inline]
+ pub fn samples(mut self, samples: SampleCountFlags) -> Self {
+ self.inner.samples = samples;
+ self
+ }
+ #[inline]
+ pub fn tiling(mut self, tiling: ImageTiling) -> Self {
+ self.inner.tiling = tiling;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: ImageUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> Self {
+ self.inner.sharing_mode = sharing_mode;
+ self
+ }
+ #[inline]
+ pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self {
+ self.inner.queue_family_index_count = queue_family_indices.len() as _;
+ self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn initial_layout(mut self, initial_layout: ImageLayout) -> Self {
+ self.inner.initial_layout = initial_layout;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubresourceLayout.html>"]
+pub struct SubresourceLayout {
+ pub offset: DeviceSize,
+ pub size: DeviceSize,
+ pub row_pitch: DeviceSize,
+ pub array_pitch: DeviceSize,
+ pub depth_pitch: DeviceSize,
+}
+impl SubresourceLayout {
+ pub fn builder<'a>() -> SubresourceLayoutBuilder<'a> {
+ SubresourceLayoutBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubresourceLayoutBuilder<'a> {
+ inner: SubresourceLayout,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SubresourceLayoutBuilder<'a> {
+ type Target = SubresourceLayout;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubresourceLayoutBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubresourceLayoutBuilder<'a> {
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn row_pitch(mut self, row_pitch: DeviceSize) -> Self {
+ self.inner.row_pitch = row_pitch;
+ self
+ }
+ #[inline]
+ pub fn array_pitch(mut self, array_pitch: DeviceSize) -> Self {
+ self.inner.array_pitch = array_pitch;
+ self
+ }
+ #[inline]
+ pub fn depth_pitch(mut self, depth_pitch: DeviceSize) -> Self {
+ self.inner.depth_pitch = depth_pitch;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubresourceLayout {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewCreateInfo.html>"]
+pub struct ImageViewCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ImageViewCreateFlags,
+ pub image: Image,
+ pub view_type: ImageViewType,
+ pub format: Format,
+ pub components: ComponentMapping,
+ pub subresource_range: ImageSubresourceRange,
+}
+impl ::std::default::Default for ImageViewCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ImageViewCreateFlags::default(),
+ image: Image::default(),
+ view_type: ImageViewType::default(),
+ format: Format::default(),
+ components: ComponentMapping::default(),
+ subresource_range: ImageSubresourceRange::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_CREATE_INFO;
+}
+impl ImageViewCreateInfo {
+ pub fn builder<'a>() -> ImageViewCreateInfoBuilder<'a> {
+ ImageViewCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewCreateInfoBuilder<'a> {
+ inner: ImageViewCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageViewCreateInfo {}
+impl<'a> ::std::ops::Deref for ImageViewCreateInfoBuilder<'a> {
+ type Target = ImageViewCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ImageViewCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn view_type(mut self, view_type: ImageViewType) -> Self {
+ self.inner.view_type = view_type;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn components(mut self, components: ComponentMapping) -> Self {
+ self.inner.components = components;
+ self
+ }
+ #[inline]
+ pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self {
+ self.inner.subresource_range = subresource_range;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageViewCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCopy.html>"]
+pub struct BufferCopy {
+ pub src_offset: DeviceSize,
+ pub dst_offset: DeviceSize,
+ pub size: DeviceSize,
+}
+impl BufferCopy {
+ pub fn builder<'a>() -> BufferCopyBuilder<'a> {
+ BufferCopyBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCopyBuilder<'a> {
+ inner: BufferCopy,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferCopyBuilder<'a> {
+ type Target = BufferCopy;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCopyBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCopyBuilder<'a> {
+ #[inline]
+ pub fn src_offset(mut self, src_offset: DeviceSize) -> Self {
+ self.inner.src_offset = src_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_offset(mut self, dst_offset: DeviceSize) -> Self {
+ self.inner.dst_offset = dst_offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCopy {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseMemoryBind.html>"]
+pub struct SparseMemoryBind {
+ pub resource_offset: DeviceSize,
+ pub size: DeviceSize,
+ pub memory: DeviceMemory,
+ pub memory_offset: DeviceSize,
+ pub flags: SparseMemoryBindFlags,
+}
+impl SparseMemoryBind {
+ pub fn builder<'a>() -> SparseMemoryBindBuilder<'a> {
+ SparseMemoryBindBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseMemoryBindBuilder<'a> {
+ inner: SparseMemoryBind,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseMemoryBindBuilder<'a> {
+ type Target = SparseMemoryBind;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseMemoryBindBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseMemoryBindBuilder<'a> {
+ #[inline]
+ pub fn resource_offset(mut self, resource_offset: DeviceSize) -> Self {
+ self.inner.resource_offset = resource_offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self {
+ self.inner.memory_offset = memory_offset;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: SparseMemoryBindFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseMemoryBind {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageMemoryBind.html>"]
+pub struct SparseImageMemoryBind {
+ pub subresource: ImageSubresource,
+ pub offset: Offset3D,
+ pub extent: Extent3D,
+ pub memory: DeviceMemory,
+ pub memory_offset: DeviceSize,
+ pub flags: SparseMemoryBindFlags,
+}
+impl SparseImageMemoryBind {
+ pub fn builder<'a>() -> SparseImageMemoryBindBuilder<'a> {
+ SparseImageMemoryBindBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageMemoryBindBuilder<'a> {
+ inner: SparseImageMemoryBind,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageMemoryBindBuilder<'a> {
+ type Target = SparseImageMemoryBind;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageMemoryBindBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageMemoryBindBuilder<'a> {
+ #[inline]
+ pub fn subresource(mut self, subresource: ImageSubresource) -> Self {
+ self.inner.subresource = subresource;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: Offset3D) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent3D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self {
+ self.inner.memory_offset = memory_offset;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: SparseMemoryBindFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageMemoryBind {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseBufferMemoryBindInfo.html>"]
+pub struct SparseBufferMemoryBindInfo {
+ pub buffer: Buffer,
+ pub bind_count: u32,
+ pub p_binds: *const SparseMemoryBind,
+}
+impl ::std::default::Default for SparseBufferMemoryBindInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ buffer: Buffer::default(),
+ bind_count: u32::default(),
+ p_binds: ::std::ptr::null(),
+ }
+ }
+}
+impl SparseBufferMemoryBindInfo {
+ pub fn builder<'a>() -> SparseBufferMemoryBindInfoBuilder<'a> {
+ SparseBufferMemoryBindInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseBufferMemoryBindInfoBuilder<'a> {
+ inner: SparseBufferMemoryBindInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseBufferMemoryBindInfoBuilder<'a> {
+ type Target = SparseBufferMemoryBindInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseBufferMemoryBindInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseBufferMemoryBindInfoBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn binds(mut self, binds: &'a [SparseMemoryBind]) -> Self {
+ self.inner.bind_count = binds.len() as _;
+ self.inner.p_binds = binds.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseBufferMemoryBindInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageOpaqueMemoryBindInfo.html>"]
+pub struct SparseImageOpaqueMemoryBindInfo {
+ pub image: Image,
+ pub bind_count: u32,
+ pub p_binds: *const SparseMemoryBind,
+}
+impl ::std::default::Default for SparseImageOpaqueMemoryBindInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ image: Image::default(),
+ bind_count: u32::default(),
+ p_binds: ::std::ptr::null(),
+ }
+ }
+}
+impl SparseImageOpaqueMemoryBindInfo {
+ pub fn builder<'a>() -> SparseImageOpaqueMemoryBindInfoBuilder<'a> {
+ SparseImageOpaqueMemoryBindInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageOpaqueMemoryBindInfoBuilder<'a> {
+ inner: SparseImageOpaqueMemoryBindInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageOpaqueMemoryBindInfoBuilder<'a> {
+ type Target = SparseImageOpaqueMemoryBindInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageOpaqueMemoryBindInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageOpaqueMemoryBindInfoBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn binds(mut self, binds: &'a [SparseMemoryBind]) -> Self {
+ self.inner.bind_count = binds.len() as _;
+ self.inner.p_binds = binds.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageOpaqueMemoryBindInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageMemoryBindInfo.html>"]
+pub struct SparseImageMemoryBindInfo {
+ pub image: Image,
+ pub bind_count: u32,
+ pub p_binds: *const SparseImageMemoryBind,
+}
+impl ::std::default::Default for SparseImageMemoryBindInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ image: Image::default(),
+ bind_count: u32::default(),
+ p_binds: ::std::ptr::null(),
+ }
+ }
+}
+impl SparseImageMemoryBindInfo {
+ pub fn builder<'a>() -> SparseImageMemoryBindInfoBuilder<'a> {
+ SparseImageMemoryBindInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageMemoryBindInfoBuilder<'a> {
+ inner: SparseImageMemoryBindInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageMemoryBindInfoBuilder<'a> {
+ type Target = SparseImageMemoryBindInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageMemoryBindInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageMemoryBindInfoBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn binds(mut self, binds: &'a [SparseImageMemoryBind]) -> Self {
+ self.inner.bind_count = binds.len() as _;
+ self.inner.p_binds = binds.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageMemoryBindInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindSparseInfo.html>"]
+pub struct BindSparseInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub wait_semaphore_count: u32,
+ pub p_wait_semaphores: *const Semaphore,
+ pub buffer_bind_count: u32,
+ pub p_buffer_binds: *const SparseBufferMemoryBindInfo,
+ pub image_opaque_bind_count: u32,
+ pub p_image_opaque_binds: *const SparseImageOpaqueMemoryBindInfo,
+ pub image_bind_count: u32,
+ pub p_image_binds: *const SparseImageMemoryBindInfo,
+ pub signal_semaphore_count: u32,
+ pub p_signal_semaphores: *const Semaphore,
+}
+impl ::std::default::Default for BindSparseInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ wait_semaphore_count: u32::default(),
+ p_wait_semaphores: ::std::ptr::null(),
+ buffer_bind_count: u32::default(),
+ p_buffer_binds: ::std::ptr::null(),
+ image_opaque_bind_count: u32::default(),
+ p_image_opaque_binds: ::std::ptr::null(),
+ image_bind_count: u32::default(),
+ p_image_binds: ::std::ptr::null(),
+ signal_semaphore_count: u32::default(),
+ p_signal_semaphores: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindSparseInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_SPARSE_INFO;
+}
+impl BindSparseInfo {
+ pub fn builder<'a>() -> BindSparseInfoBuilder<'a> {
+ BindSparseInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindSparseInfoBuilder<'a> {
+ inner: BindSparseInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsBindSparseInfo {}
+impl<'a> ::std::ops::Deref for BindSparseInfoBuilder<'a> {
+ type Target = BindSparseInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindSparseInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindSparseInfoBuilder<'a> {
+ #[inline]
+ pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> Self {
+ self.inner.wait_semaphore_count = wait_semaphores.len() as _;
+ self.inner.p_wait_semaphores = wait_semaphores.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn buffer_binds(mut self, buffer_binds: &'a [SparseBufferMemoryBindInfo]) -> Self {
+ self.inner.buffer_bind_count = buffer_binds.len() as _;
+ self.inner.p_buffer_binds = buffer_binds.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn image_opaque_binds(
+ mut self,
+ image_opaque_binds: &'a [SparseImageOpaqueMemoryBindInfo],
+ ) -> Self {
+ self.inner.image_opaque_bind_count = image_opaque_binds.len() as _;
+ self.inner.p_image_opaque_binds = image_opaque_binds.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn image_binds(mut self, image_binds: &'a [SparseImageMemoryBindInfo]) -> Self {
+ self.inner.image_bind_count = image_binds.len() as _;
+ self.inner.p_image_binds = image_binds.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn signal_semaphores(mut self, signal_semaphores: &'a [Semaphore]) -> Self {
+ self.inner.signal_semaphore_count = signal_semaphores.len() as _;
+ self.inner.p_signal_semaphores = signal_semaphores.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsBindSparseInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindSparseInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCopy.html>"]
+pub struct ImageCopy {
+ pub src_subresource: ImageSubresourceLayers,
+ pub src_offset: Offset3D,
+ pub dst_subresource: ImageSubresourceLayers,
+ pub dst_offset: Offset3D,
+ pub extent: Extent3D,
+}
+impl ImageCopy {
+ pub fn builder<'a>() -> ImageCopyBuilder<'a> {
+ ImageCopyBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageCopyBuilder<'a> {
+ inner: ImageCopy,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageCopyBuilder<'a> {
+ type Target = ImageCopy;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageCopyBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageCopyBuilder<'a> {
+ #[inline]
+ pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.src_subresource = src_subresource;
+ self
+ }
+ #[inline]
+ pub fn src_offset(mut self, src_offset: Offset3D) -> Self {
+ self.inner.src_offset = src_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.dst_subresource = dst_subresource;
+ self
+ }
+ #[inline]
+ pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self {
+ self.inner.dst_offset = dst_offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent3D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageCopy {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageBlit.html>"]
+pub struct ImageBlit {
+ pub src_subresource: ImageSubresourceLayers,
+ pub src_offsets: [Offset3D; 2],
+ pub dst_subresource: ImageSubresourceLayers,
+ pub dst_offsets: [Offset3D; 2],
+}
+impl ::std::default::Default for ImageBlit {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ src_subresource: ImageSubresourceLayers::default(),
+ src_offsets: unsafe { ::std::mem::zeroed() },
+ dst_subresource: ImageSubresourceLayers::default(),
+ dst_offsets: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+impl ImageBlit {
+ pub fn builder<'a>() -> ImageBlitBuilder<'a> {
+ ImageBlitBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageBlitBuilder<'a> {
+ inner: ImageBlit,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageBlitBuilder<'a> {
+ type Target = ImageBlit;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageBlitBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageBlitBuilder<'a> {
+ #[inline]
+ pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.src_subresource = src_subresource;
+ self
+ }
+ #[inline]
+ pub fn src_offsets(mut self, src_offsets: [Offset3D; 2]) -> Self {
+ self.inner.src_offsets = src_offsets;
+ self
+ }
+ #[inline]
+ pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.dst_subresource = dst_subresource;
+ self
+ }
+ #[inline]
+ pub fn dst_offsets(mut self, dst_offsets: [Offset3D; 2]) -> Self {
+ self.inner.dst_offsets = dst_offsets;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageBlit {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferImageCopy.html>"]
+pub struct BufferImageCopy {
+ pub buffer_offset: DeviceSize,
+ pub buffer_row_length: u32,
+ pub buffer_image_height: u32,
+ pub image_subresource: ImageSubresourceLayers,
+ pub image_offset: Offset3D,
+ pub image_extent: Extent3D,
+}
+impl BufferImageCopy {
+ pub fn builder<'a>() -> BufferImageCopyBuilder<'a> {
+ BufferImageCopyBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferImageCopyBuilder<'a> {
+ inner: BufferImageCopy,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferImageCopyBuilder<'a> {
+ type Target = BufferImageCopy;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferImageCopyBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferImageCopyBuilder<'a> {
+ #[inline]
+ pub fn buffer_offset(mut self, buffer_offset: DeviceSize) -> Self {
+ self.inner.buffer_offset = buffer_offset;
+ self
+ }
+ #[inline]
+ pub fn buffer_row_length(mut self, buffer_row_length: u32) -> Self {
+ self.inner.buffer_row_length = buffer_row_length;
+ self
+ }
+ #[inline]
+ pub fn buffer_image_height(mut self, buffer_image_height: u32) -> Self {
+ self.inner.buffer_image_height = buffer_image_height;
+ self
+ }
+ #[inline]
+ pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.image_subresource = image_subresource;
+ self
+ }
+ #[inline]
+ pub fn image_offset(mut self, image_offset: Offset3D) -> Self {
+ self.inner.image_offset = image_offset;
+ self
+ }
+ #[inline]
+ pub fn image_extent(mut self, image_extent: Extent3D) -> Self {
+ self.inner.image_extent = image_extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferImageCopy {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMemoryIndirectCommandNV.html>"]
+pub struct CopyMemoryIndirectCommandNV {
+ pub src_address: DeviceAddress,
+ pub dst_address: DeviceAddress,
+ pub size: DeviceSize,
+}
+impl CopyMemoryIndirectCommandNV {
+ pub fn builder<'a>() -> CopyMemoryIndirectCommandNVBuilder<'a> {
+ CopyMemoryIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyMemoryIndirectCommandNVBuilder<'a> {
+ inner: CopyMemoryIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyMemoryIndirectCommandNVBuilder<'a> {
+ type Target = CopyMemoryIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyMemoryIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyMemoryIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn src_address(mut self, src_address: DeviceAddress) -> Self {
+ self.inner.src_address = src_address;
+ self
+ }
+ #[inline]
+ pub fn dst_address(mut self, dst_address: DeviceAddress) -> Self {
+ self.inner.dst_address = dst_address;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyMemoryIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMemoryToImageIndirectCommandNV.html>"]
+pub struct CopyMemoryToImageIndirectCommandNV {
+ pub src_address: DeviceAddress,
+ pub buffer_row_length: u32,
+ pub buffer_image_height: u32,
+ pub image_subresource: ImageSubresourceLayers,
+ pub image_offset: Offset3D,
+ pub image_extent: Extent3D,
+}
+impl CopyMemoryToImageIndirectCommandNV {
+ pub fn builder<'a>() -> CopyMemoryToImageIndirectCommandNVBuilder<'a> {
+ CopyMemoryToImageIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyMemoryToImageIndirectCommandNVBuilder<'a> {
+ inner: CopyMemoryToImageIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyMemoryToImageIndirectCommandNVBuilder<'a> {
+ type Target = CopyMemoryToImageIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyMemoryToImageIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyMemoryToImageIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn src_address(mut self, src_address: DeviceAddress) -> Self {
+ self.inner.src_address = src_address;
+ self
+ }
+ #[inline]
+ pub fn buffer_row_length(mut self, buffer_row_length: u32) -> Self {
+ self.inner.buffer_row_length = buffer_row_length;
+ self
+ }
+ #[inline]
+ pub fn buffer_image_height(mut self, buffer_image_height: u32) -> Self {
+ self.inner.buffer_image_height = buffer_image_height;
+ self
+ }
+ #[inline]
+ pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.image_subresource = image_subresource;
+ self
+ }
+ #[inline]
+ pub fn image_offset(mut self, image_offset: Offset3D) -> Self {
+ self.inner.image_offset = image_offset;
+ self
+ }
+ #[inline]
+ pub fn image_extent(mut self, image_extent: Extent3D) -> Self {
+ self.inner.image_extent = image_extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyMemoryToImageIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageResolve.html>"]
+pub struct ImageResolve {
+ pub src_subresource: ImageSubresourceLayers,
+ pub src_offset: Offset3D,
+ pub dst_subresource: ImageSubresourceLayers,
+ pub dst_offset: Offset3D,
+ pub extent: Extent3D,
+}
+impl ImageResolve {
+ pub fn builder<'a>() -> ImageResolveBuilder<'a> {
+ ImageResolveBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageResolveBuilder<'a> {
+ inner: ImageResolve,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageResolveBuilder<'a> {
+ type Target = ImageResolve;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageResolveBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageResolveBuilder<'a> {
+ #[inline]
+ pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.src_subresource = src_subresource;
+ self
+ }
+ #[inline]
+ pub fn src_offset(mut self, src_offset: Offset3D) -> Self {
+ self.inner.src_offset = src_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.dst_subresource = dst_subresource;
+ self
+ }
+ #[inline]
+ pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self {
+ self.inner.dst_offset = dst_offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent3D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageResolve {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderModuleCreateInfo.html>"]
+pub struct ShaderModuleCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ShaderModuleCreateFlags,
+ pub code_size: usize,
+ pub p_code: *const u32,
+}
+impl ::std::default::Default for ShaderModuleCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ShaderModuleCreateFlags::default(),
+ code_size: usize::default(),
+ p_code: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ShaderModuleCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SHADER_MODULE_CREATE_INFO;
+}
+impl ShaderModuleCreateInfo {
+ pub fn builder<'a>() -> ShaderModuleCreateInfoBuilder<'a> {
+ ShaderModuleCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ShaderModuleCreateInfoBuilder<'a> {
+ inner: ShaderModuleCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for ShaderModuleCreateInfoBuilder<'_> {}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for ShaderModuleCreateInfo {}
+pub unsafe trait ExtendsShaderModuleCreateInfo {}
+impl<'a> ::std::ops::Deref for ShaderModuleCreateInfoBuilder<'a> {
+ type Target = ShaderModuleCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ShaderModuleCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ShaderModuleCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ShaderModuleCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn code(mut self, code: &'a [u32]) -> Self {
+ self.inner.code_size = code.len() * 4;
+ self.inner.p_code = code.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsShaderModuleCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ShaderModuleCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayoutBinding.html>"]
+pub struct DescriptorSetLayoutBinding {
+ pub binding: u32,
+ pub descriptor_type: DescriptorType,
+ pub descriptor_count: u32,
+ pub stage_flags: ShaderStageFlags,
+ pub p_immutable_samplers: *const Sampler,
+}
+impl ::std::default::Default for DescriptorSetLayoutBinding {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ binding: u32::default(),
+ descriptor_type: DescriptorType::default(),
+ descriptor_count: u32::default(),
+ stage_flags: ShaderStageFlags::default(),
+ p_immutable_samplers: ::std::ptr::null(),
+ }
+ }
+}
+impl DescriptorSetLayoutBinding {
+ pub fn builder<'a>() -> DescriptorSetLayoutBindingBuilder<'a> {
+ DescriptorSetLayoutBindingBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetLayoutBindingBuilder<'a> {
+ inner: DescriptorSetLayoutBinding,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorSetLayoutBindingBuilder<'a> {
+ type Target = DescriptorSetLayoutBinding;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutBindingBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetLayoutBindingBuilder<'a> {
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[inline]
+ pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self {
+ self.inner.descriptor_type = descriptor_type;
+ self
+ }
+ #[inline]
+ pub fn descriptor_count(mut self, descriptor_count: u32) -> Self {
+ self.inner.descriptor_count = descriptor_count;
+ self
+ }
+ #[inline]
+ pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self {
+ self.inner.stage_flags = stage_flags;
+ self
+ }
+ #[inline]
+ pub fn immutable_samplers(mut self, immutable_samplers: &'a [Sampler]) -> Self {
+ self.inner.descriptor_count = immutable_samplers.len() as _;
+ self.inner.p_immutable_samplers = immutable_samplers.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetLayoutBinding {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayoutCreateInfo.html>"]
+pub struct DescriptorSetLayoutCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DescriptorSetLayoutCreateFlags,
+ pub binding_count: u32,
+ pub p_bindings: *const DescriptorSetLayoutBinding,
+}
+impl ::std::default::Default for DescriptorSetLayoutCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DescriptorSetLayoutCreateFlags::default(),
+ binding_count: u32::default(),
+ p_bindings: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetLayoutCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
+}
+impl DescriptorSetLayoutCreateInfo {
+ pub fn builder<'a>() -> DescriptorSetLayoutCreateInfoBuilder<'a> {
+ DescriptorSetLayoutCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetLayoutCreateInfoBuilder<'a> {
+ inner: DescriptorSetLayoutCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDescriptorSetLayoutCreateInfo {}
+impl<'a> ::std::ops::Deref for DescriptorSetLayoutCreateInfoBuilder<'a> {
+ type Target = DescriptorSetLayoutCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetLayoutCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DescriptorSetLayoutCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn bindings(mut self, bindings: &'a [DescriptorSetLayoutBinding]) -> Self {
+ self.inner.binding_count = bindings.len() as _;
+ self.inner.p_bindings = bindings.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDescriptorSetLayoutCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetLayoutCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorPoolSize.html>"]
+pub struct DescriptorPoolSize {
+ pub ty: DescriptorType,
+ pub descriptor_count: u32,
+}
+impl DescriptorPoolSize {
+ pub fn builder<'a>() -> DescriptorPoolSizeBuilder<'a> {
+ DescriptorPoolSizeBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorPoolSizeBuilder<'a> {
+ inner: DescriptorPoolSize,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorPoolSizeBuilder<'a> {
+ type Target = DescriptorPoolSize;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorPoolSizeBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorPoolSizeBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: DescriptorType) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn descriptor_count(mut self, descriptor_count: u32) -> Self {
+ self.inner.descriptor_count = descriptor_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorPoolSize {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorPoolCreateInfo.html>"]
+pub struct DescriptorPoolCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DescriptorPoolCreateFlags,
+ pub max_sets: u32,
+ pub pool_size_count: u32,
+ pub p_pool_sizes: *const DescriptorPoolSize,
+}
+impl ::std::default::Default for DescriptorPoolCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DescriptorPoolCreateFlags::default(),
+ max_sets: u32::default(),
+ pool_size_count: u32::default(),
+ p_pool_sizes: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorPoolCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_POOL_CREATE_INFO;
+}
+impl DescriptorPoolCreateInfo {
+ pub fn builder<'a>() -> DescriptorPoolCreateInfoBuilder<'a> {
+ DescriptorPoolCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorPoolCreateInfoBuilder<'a> {
+ inner: DescriptorPoolCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDescriptorPoolCreateInfo {}
+impl<'a> ::std::ops::Deref for DescriptorPoolCreateInfoBuilder<'a> {
+ type Target = DescriptorPoolCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorPoolCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorPoolCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DescriptorPoolCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn max_sets(mut self, max_sets: u32) -> Self {
+ self.inner.max_sets = max_sets;
+ self
+ }
+ #[inline]
+ pub fn pool_sizes(mut self, pool_sizes: &'a [DescriptorPoolSize]) -> Self {
+ self.inner.pool_size_count = pool_sizes.len() as _;
+ self.inner.p_pool_sizes = pool_sizes.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDescriptorPoolCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorPoolCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetAllocateInfo.html>"]
+pub struct DescriptorSetAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub descriptor_pool: DescriptorPool,
+ pub descriptor_set_count: u32,
+ pub p_set_layouts: *const DescriptorSetLayout,
+}
+impl ::std::default::Default for DescriptorSetAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ descriptor_pool: DescriptorPool::default(),
+ descriptor_set_count: u32::default(),
+ p_set_layouts: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetAllocateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_ALLOCATE_INFO;
+}
+impl DescriptorSetAllocateInfo {
+ pub fn builder<'a>() -> DescriptorSetAllocateInfoBuilder<'a> {
+ DescriptorSetAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetAllocateInfoBuilder<'a> {
+ inner: DescriptorSetAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDescriptorSetAllocateInfo {}
+impl<'a> ::std::ops::Deref for DescriptorSetAllocateInfoBuilder<'a> {
+ type Target = DescriptorSetAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn descriptor_pool(mut self, descriptor_pool: DescriptorPool) -> Self {
+ self.inner.descriptor_pool = descriptor_pool;
+ self
+ }
+ #[inline]
+ pub fn set_layouts(mut self, set_layouts: &'a [DescriptorSetLayout]) -> Self {
+ self.inner.descriptor_set_count = set_layouts.len() as _;
+ self.inner.p_set_layouts = set_layouts.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDescriptorSetAllocateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSpecializationMapEntry.html>"]
+pub struct SpecializationMapEntry {
+ pub constant_id: u32,
+ pub offset: u32,
+ pub size: usize,
+}
+impl SpecializationMapEntry {
+ pub fn builder<'a>() -> SpecializationMapEntryBuilder<'a> {
+ SpecializationMapEntryBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SpecializationMapEntryBuilder<'a> {
+ inner: SpecializationMapEntry,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SpecializationMapEntryBuilder<'a> {
+ type Target = SpecializationMapEntry;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SpecializationMapEntryBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SpecializationMapEntryBuilder<'a> {
+ #[inline]
+ pub fn constant_id(mut self, constant_id: u32) -> Self {
+ self.inner.constant_id = constant_id;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: u32) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: usize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SpecializationMapEntry {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSpecializationInfo.html>"]
+pub struct SpecializationInfo {
+ pub map_entry_count: u32,
+ pub p_map_entries: *const SpecializationMapEntry,
+ pub data_size: usize,
+ pub p_data: *const c_void,
+}
+impl ::std::default::Default for SpecializationInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ map_entry_count: u32::default(),
+ p_map_entries: ::std::ptr::null(),
+ data_size: usize::default(),
+ p_data: ::std::ptr::null(),
+ }
+ }
+}
+impl SpecializationInfo {
+ pub fn builder<'a>() -> SpecializationInfoBuilder<'a> {
+ SpecializationInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SpecializationInfoBuilder<'a> {
+ inner: SpecializationInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SpecializationInfoBuilder<'a> {
+ type Target = SpecializationInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SpecializationInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SpecializationInfoBuilder<'a> {
+ #[inline]
+ pub fn map_entries(mut self, map_entries: &'a [SpecializationMapEntry]) -> Self {
+ self.inner.map_entry_count = map_entries.len() as _;
+ self.inner.p_map_entries = map_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: &'a [u8]) -> Self {
+ self.inner.data_size = data.len();
+ self.inner.p_data = data.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SpecializationInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineShaderStageCreateInfo.html>"]
+pub struct PipelineShaderStageCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineShaderStageCreateFlags,
+ pub stage: ShaderStageFlags,
+ pub module: ShaderModule,
+ pub p_name: *const c_char,
+ pub p_specialization_info: *const SpecializationInfo,
+}
+impl ::std::default::Default for PipelineShaderStageCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineShaderStageCreateFlags::default(),
+ stage: ShaderStageFlags::default(),
+ module: ShaderModule::default(),
+ p_name: ::std::ptr::null(),
+ p_specialization_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineShaderStageCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_SHADER_STAGE_CREATE_INFO;
+}
+impl PipelineShaderStageCreateInfo {
+ pub fn builder<'a>() -> PipelineShaderStageCreateInfoBuilder<'a> {
+ PipelineShaderStageCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineShaderStageCreateInfoBuilder<'a> {
+ inner: PipelineShaderStageCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineShaderStageCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineShaderStageCreateInfoBuilder<'a> {
+ type Target = PipelineShaderStageCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineShaderStageCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineShaderStageCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineShaderStageCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn stage(mut self, stage: ShaderStageFlags) -> Self {
+ self.inner.stage = stage;
+ self
+ }
+ #[inline]
+ pub fn module(mut self, module: ShaderModule) -> Self {
+ self.inner.module = module;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_name = name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn specialization_info(mut self, specialization_info: &'a SpecializationInfo) -> Self {
+ self.inner.p_specialization_info = specialization_info;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineShaderStageCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineShaderStageCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkComputePipelineCreateInfo.html>"]
+pub struct ComputePipelineCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCreateFlags,
+ pub stage: PipelineShaderStageCreateInfo,
+ pub layout: PipelineLayout,
+ pub base_pipeline_handle: Pipeline,
+ pub base_pipeline_index: i32,
+}
+impl ::std::default::Default for ComputePipelineCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCreateFlags::default(),
+ stage: PipelineShaderStageCreateInfo::default(),
+ layout: PipelineLayout::default(),
+ base_pipeline_handle: Pipeline::default(),
+ base_pipeline_index: i32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ComputePipelineCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMPUTE_PIPELINE_CREATE_INFO;
+}
+impl ComputePipelineCreateInfo {
+ pub fn builder<'a>() -> ComputePipelineCreateInfoBuilder<'a> {
+ ComputePipelineCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ComputePipelineCreateInfoBuilder<'a> {
+ inner: ComputePipelineCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsComputePipelineCreateInfo {}
+impl<'a> ::std::ops::Deref for ComputePipelineCreateInfoBuilder<'a> {
+ type Target = ComputePipelineCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ComputePipelineCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ComputePipelineCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn stage(mut self, stage: PipelineShaderStageCreateInfo) -> Self {
+ self.inner.stage = stage;
+ self
+ }
+ #[inline]
+ pub fn layout(mut self, layout: PipelineLayout) -> Self {
+ self.inner.layout = layout;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self {
+ self.inner.base_pipeline_handle = base_pipeline_handle;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self {
+ self.inner.base_pipeline_index = base_pipeline_index;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsComputePipelineCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ComputePipelineCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVertexInputBindingDescription.html>"]
+pub struct VertexInputBindingDescription {
+ pub binding: u32,
+ pub stride: u32,
+ pub input_rate: VertexInputRate,
+}
+impl VertexInputBindingDescription {
+ pub fn builder<'a>() -> VertexInputBindingDescriptionBuilder<'a> {
+ VertexInputBindingDescriptionBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VertexInputBindingDescriptionBuilder<'a> {
+ inner: VertexInputBindingDescription,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VertexInputBindingDescriptionBuilder<'a> {
+ type Target = VertexInputBindingDescription;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VertexInputBindingDescriptionBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VertexInputBindingDescriptionBuilder<'a> {
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: u32) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[inline]
+ pub fn input_rate(mut self, input_rate: VertexInputRate) -> Self {
+ self.inner.input_rate = input_rate;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VertexInputBindingDescription {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVertexInputAttributeDescription.html>"]
+pub struct VertexInputAttributeDescription {
+ pub location: u32,
+ pub binding: u32,
+ pub format: Format,
+ pub offset: u32,
+}
+impl VertexInputAttributeDescription {
+ pub fn builder<'a>() -> VertexInputAttributeDescriptionBuilder<'a> {
+ VertexInputAttributeDescriptionBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VertexInputAttributeDescriptionBuilder<'a> {
+ inner: VertexInputAttributeDescription,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VertexInputAttributeDescriptionBuilder<'a> {
+ type Target = VertexInputAttributeDescription;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VertexInputAttributeDescriptionBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VertexInputAttributeDescriptionBuilder<'a> {
+ #[inline]
+ pub fn location(mut self, location: u32) -> Self {
+ self.inner.location = location;
+ self
+ }
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: u32) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VertexInputAttributeDescription {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineVertexInputStateCreateInfo.html>"]
+pub struct PipelineVertexInputStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineVertexInputStateCreateFlags,
+ pub vertex_binding_description_count: u32,
+ pub p_vertex_binding_descriptions: *const VertexInputBindingDescription,
+ pub vertex_attribute_description_count: u32,
+ pub p_vertex_attribute_descriptions: *const VertexInputAttributeDescription,
+}
+impl ::std::default::Default for PipelineVertexInputStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineVertexInputStateCreateFlags::default(),
+ vertex_binding_description_count: u32::default(),
+ p_vertex_binding_descriptions: ::std::ptr::null(),
+ vertex_attribute_description_count: u32::default(),
+ p_vertex_attribute_descriptions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineVertexInputStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
+}
+impl PipelineVertexInputStateCreateInfo {
+ pub fn builder<'a>() -> PipelineVertexInputStateCreateInfoBuilder<'a> {
+ PipelineVertexInputStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineVertexInputStateCreateInfoBuilder<'a> {
+ inner: PipelineVertexInputStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineVertexInputStateCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineVertexInputStateCreateInfoBuilder<'a> {
+ type Target = PipelineVertexInputStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineVertexInputStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineVertexInputStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineVertexInputStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn vertex_binding_descriptions(
+ mut self,
+ vertex_binding_descriptions: &'a [VertexInputBindingDescription],
+ ) -> Self {
+ self.inner.vertex_binding_description_count = vertex_binding_descriptions.len() as _;
+ self.inner.p_vertex_binding_descriptions = vertex_binding_descriptions.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn vertex_attribute_descriptions(
+ mut self,
+ vertex_attribute_descriptions: &'a [VertexInputAttributeDescription],
+ ) -> Self {
+ self.inner.vertex_attribute_description_count = vertex_attribute_descriptions.len() as _;
+ self.inner.p_vertex_attribute_descriptions = vertex_attribute_descriptions.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineVertexInputStateCreateInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineVertexInputStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineInputAssemblyStateCreateInfo.html>"]
+pub struct PipelineInputAssemblyStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineInputAssemblyStateCreateFlags,
+ pub topology: PrimitiveTopology,
+ pub primitive_restart_enable: Bool32,
+}
+impl ::std::default::Default for PipelineInputAssemblyStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineInputAssemblyStateCreateFlags::default(),
+ topology: PrimitiveTopology::default(),
+ primitive_restart_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineInputAssemblyStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
+}
+impl PipelineInputAssemblyStateCreateInfo {
+ pub fn builder<'a>() -> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
+ PipelineInputAssemblyStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineInputAssemblyStateCreateInfoBuilder<'a> {
+ inner: PipelineInputAssemblyStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineInputAssemblyStateCreateInfoBuilder<'a> {
+ type Target = PipelineInputAssemblyStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineInputAssemblyStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineInputAssemblyStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineInputAssemblyStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn topology(mut self, topology: PrimitiveTopology) -> Self {
+ self.inner.topology = topology;
+ self
+ }
+ #[inline]
+ pub fn primitive_restart_enable(mut self, primitive_restart_enable: bool) -> Self {
+ self.inner.primitive_restart_enable = primitive_restart_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineInputAssemblyStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineTessellationStateCreateInfo.html>"]
+pub struct PipelineTessellationStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineTessellationStateCreateFlags,
+ pub patch_control_points: u32,
+}
+impl ::std::default::Default for PipelineTessellationStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineTessellationStateCreateFlags::default(),
+ patch_control_points: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineTessellationStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_TESSELLATION_STATE_CREATE_INFO;
+}
+impl PipelineTessellationStateCreateInfo {
+ pub fn builder<'a>() -> PipelineTessellationStateCreateInfoBuilder<'a> {
+ PipelineTessellationStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineTessellationStateCreateInfoBuilder<'a> {
+ inner: PipelineTessellationStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineTessellationStateCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineTessellationStateCreateInfoBuilder<'a> {
+ type Target = PipelineTessellationStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineTessellationStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineTessellationStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineTessellationStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn patch_control_points(mut self, patch_control_points: u32) -> Self {
+ self.inner.patch_control_points = patch_control_points;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineTessellationStateCreateInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineTessellationStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportStateCreateInfo.html>"]
+pub struct PipelineViewportStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineViewportStateCreateFlags,
+ pub viewport_count: u32,
+ pub p_viewports: *const Viewport,
+ pub scissor_count: u32,
+ pub p_scissors: *const Rect2D,
+}
+impl ::std::default::Default for PipelineViewportStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineViewportStateCreateFlags::default(),
+ viewport_count: u32::default(),
+ p_viewports: ::std::ptr::null(),
+ scissor_count: u32::default(),
+ p_scissors: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_VIEWPORT_STATE_CREATE_INFO;
+}
+impl PipelineViewportStateCreateInfo {
+ pub fn builder<'a>() -> PipelineViewportStateCreateInfoBuilder<'a> {
+ PipelineViewportStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportStateCreateInfoBuilder<'a> {
+ inner: PipelineViewportStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineViewportStateCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineViewportStateCreateInfoBuilder<'a> {
+ type Target = PipelineViewportStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineViewportStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn viewport_count(mut self, viewport_count: u32) -> Self {
+ self.inner.viewport_count = viewport_count;
+ self
+ }
+ #[inline]
+ pub fn viewports(mut self, viewports: &'a [Viewport]) -> Self {
+ self.inner.viewport_count = viewports.len() as _;
+ self.inner.p_viewports = viewports.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn scissor_count(mut self, scissor_count: u32) -> Self {
+ self.inner.scissor_count = scissor_count;
+ self
+ }
+ #[inline]
+ pub fn scissors(mut self, scissors: &'a [Rect2D]) -> Self {
+ self.inner.scissor_count = scissors.len() as _;
+ self.inner.p_scissors = scissors.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineViewportStateCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationStateCreateInfo.html>"]
+pub struct PipelineRasterizationStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineRasterizationStateCreateFlags,
+ pub depth_clamp_enable: Bool32,
+ pub rasterizer_discard_enable: Bool32,
+ pub polygon_mode: PolygonMode,
+ pub cull_mode: CullModeFlags,
+ pub front_face: FrontFace,
+ pub depth_bias_enable: Bool32,
+ pub depth_bias_constant_factor: f32,
+ pub depth_bias_clamp: f32,
+ pub depth_bias_slope_factor: f32,
+ pub line_width: f32,
+}
+impl ::std::default::Default for PipelineRasterizationStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineRasterizationStateCreateFlags::default(),
+ depth_clamp_enable: Bool32::default(),
+ rasterizer_discard_enable: Bool32::default(),
+ polygon_mode: PolygonMode::default(),
+ cull_mode: CullModeFlags::default(),
+ front_face: FrontFace::default(),
+ depth_bias_enable: Bool32::default(),
+ depth_bias_constant_factor: f32::default(),
+ depth_bias_clamp: f32::default(),
+ depth_bias_slope_factor: f32::default(),
+ line_width: f32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
+}
+impl PipelineRasterizationStateCreateInfo {
+ pub fn builder<'a>() -> PipelineRasterizationStateCreateInfoBuilder<'a> {
+ PipelineRasterizationStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationStateCreateInfoBuilder<'a> {
+ inner: PipelineRasterizationStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineRasterizationStateCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineRasterizationStateCreateInfoBuilder<'a> {
+ type Target = PipelineRasterizationStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRasterizationStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineRasterizationStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn depth_clamp_enable(mut self, depth_clamp_enable: bool) -> Self {
+ self.inner.depth_clamp_enable = depth_clamp_enable.into();
+ self
+ }
+ #[inline]
+ pub fn rasterizer_discard_enable(mut self, rasterizer_discard_enable: bool) -> Self {
+ self.inner.rasterizer_discard_enable = rasterizer_discard_enable.into();
+ self
+ }
+ #[inline]
+ pub fn polygon_mode(mut self, polygon_mode: PolygonMode) -> Self {
+ self.inner.polygon_mode = polygon_mode;
+ self
+ }
+ #[inline]
+ pub fn cull_mode(mut self, cull_mode: CullModeFlags) -> Self {
+ self.inner.cull_mode = cull_mode;
+ self
+ }
+ #[inline]
+ pub fn front_face(mut self, front_face: FrontFace) -> Self {
+ self.inner.front_face = front_face;
+ self
+ }
+ #[inline]
+ pub fn depth_bias_enable(mut self, depth_bias_enable: bool) -> Self {
+ self.inner.depth_bias_enable = depth_bias_enable.into();
+ self
+ }
+ #[inline]
+ pub fn depth_bias_constant_factor(mut self, depth_bias_constant_factor: f32) -> Self {
+ self.inner.depth_bias_constant_factor = depth_bias_constant_factor;
+ self
+ }
+ #[inline]
+ pub fn depth_bias_clamp(mut self, depth_bias_clamp: f32) -> Self {
+ self.inner.depth_bias_clamp = depth_bias_clamp;
+ self
+ }
+ #[inline]
+ pub fn depth_bias_slope_factor(mut self, depth_bias_slope_factor: f32) -> Self {
+ self.inner.depth_bias_slope_factor = depth_bias_slope_factor;
+ self
+ }
+ #[inline]
+ pub fn line_width(mut self, line_width: f32) -> Self {
+ self.inner.line_width = line_width;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineRasterizationStateCreateInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineMultisampleStateCreateInfo.html>"]
+pub struct PipelineMultisampleStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineMultisampleStateCreateFlags,
+ pub rasterization_samples: SampleCountFlags,
+ pub sample_shading_enable: Bool32,
+ pub min_sample_shading: f32,
+ pub p_sample_mask: *const SampleMask,
+ pub alpha_to_coverage_enable: Bool32,
+ pub alpha_to_one_enable: Bool32,
+}
+impl ::std::default::Default for PipelineMultisampleStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineMultisampleStateCreateFlags::default(),
+ rasterization_samples: SampleCountFlags::default(),
+ sample_shading_enable: Bool32::default(),
+ min_sample_shading: f32::default(),
+ p_sample_mask: ::std::ptr::null(),
+ alpha_to_coverage_enable: Bool32::default(),
+ alpha_to_one_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineMultisampleStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
+}
+impl PipelineMultisampleStateCreateInfo {
+ pub fn builder<'a>() -> PipelineMultisampleStateCreateInfoBuilder<'a> {
+ PipelineMultisampleStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineMultisampleStateCreateInfoBuilder<'a> {
+ inner: PipelineMultisampleStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineMultisampleStateCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineMultisampleStateCreateInfoBuilder<'a> {
+ type Target = PipelineMultisampleStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineMultisampleStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineMultisampleStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineMultisampleStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self {
+ self.inner.rasterization_samples = rasterization_samples;
+ self
+ }
+ #[inline]
+ pub fn sample_shading_enable(mut self, sample_shading_enable: bool) -> Self {
+ self.inner.sample_shading_enable = sample_shading_enable.into();
+ self
+ }
+ #[inline]
+ pub fn min_sample_shading(mut self, min_sample_shading: f32) -> Self {
+ self.inner.min_sample_shading = min_sample_shading;
+ self
+ }
+ #[doc = r" Sets `p_sample_mask` to `null` if the slice is empty. The mask will"]
+ #[doc = r" be treated as if it has all bits set to `1`."]
+ #[doc = r""]
+ #[doc = r" See <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineMultisampleStateCreateInfo.html#_description>"]
+ #[doc = r" for more details."]
+ #[inline]
+ pub fn sample_mask(mut self, sample_mask: &'a [SampleMask]) -> Self {
+ self.inner.p_sample_mask = if sample_mask.is_empty() {
+ std::ptr::null()
+ } else {
+ sample_mask.as_ptr()
+ };
+ self
+ }
+ #[inline]
+ pub fn alpha_to_coverage_enable(mut self, alpha_to_coverage_enable: bool) -> Self {
+ self.inner.alpha_to_coverage_enable = alpha_to_coverage_enable.into();
+ self
+ }
+ #[inline]
+ pub fn alpha_to_one_enable(mut self, alpha_to_one_enable: bool) -> Self {
+ self.inner.alpha_to_one_enable = alpha_to_one_enable.into();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineMultisampleStateCreateInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineMultisampleStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineColorBlendAttachmentState.html>"]
+pub struct PipelineColorBlendAttachmentState {
+ pub blend_enable: Bool32,
+ pub src_color_blend_factor: BlendFactor,
+ pub dst_color_blend_factor: BlendFactor,
+ pub color_blend_op: BlendOp,
+ pub src_alpha_blend_factor: BlendFactor,
+ pub dst_alpha_blend_factor: BlendFactor,
+ pub alpha_blend_op: BlendOp,
+ pub color_write_mask: ColorComponentFlags,
+}
+impl PipelineColorBlendAttachmentState {
+ pub fn builder<'a>() -> PipelineColorBlendAttachmentStateBuilder<'a> {
+ PipelineColorBlendAttachmentStateBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineColorBlendAttachmentStateBuilder<'a> {
+ inner: PipelineColorBlendAttachmentState,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineColorBlendAttachmentStateBuilder<'a> {
+ type Target = PipelineColorBlendAttachmentState;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineColorBlendAttachmentStateBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineColorBlendAttachmentStateBuilder<'a> {
+ #[inline]
+ pub fn blend_enable(mut self, blend_enable: bool) -> Self {
+ self.inner.blend_enable = blend_enable.into();
+ self
+ }
+ #[inline]
+ pub fn src_color_blend_factor(mut self, src_color_blend_factor: BlendFactor) -> Self {
+ self.inner.src_color_blend_factor = src_color_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn dst_color_blend_factor(mut self, dst_color_blend_factor: BlendFactor) -> Self {
+ self.inner.dst_color_blend_factor = dst_color_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn color_blend_op(mut self, color_blend_op: BlendOp) -> Self {
+ self.inner.color_blend_op = color_blend_op;
+ self
+ }
+ #[inline]
+ pub fn src_alpha_blend_factor(mut self, src_alpha_blend_factor: BlendFactor) -> Self {
+ self.inner.src_alpha_blend_factor = src_alpha_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn dst_alpha_blend_factor(mut self, dst_alpha_blend_factor: BlendFactor) -> Self {
+ self.inner.dst_alpha_blend_factor = dst_alpha_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn alpha_blend_op(mut self, alpha_blend_op: BlendOp) -> Self {
+ self.inner.alpha_blend_op = alpha_blend_op;
+ self
+ }
+ #[inline]
+ pub fn color_write_mask(mut self, color_write_mask: ColorComponentFlags) -> Self {
+ self.inner.color_write_mask = color_write_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineColorBlendAttachmentState {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineColorBlendStateCreateInfo.html>"]
+pub struct PipelineColorBlendStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineColorBlendStateCreateFlags,
+ pub logic_op_enable: Bool32,
+ pub logic_op: LogicOp,
+ pub attachment_count: u32,
+ pub p_attachments: *const PipelineColorBlendAttachmentState,
+ pub blend_constants: [f32; 4],
+}
+impl ::std::default::Default for PipelineColorBlendStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineColorBlendStateCreateFlags::default(),
+ logic_op_enable: Bool32::default(),
+ logic_op: LogicOp::default(),
+ attachment_count: u32::default(),
+ p_attachments: ::std::ptr::null(),
+ blend_constants: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineColorBlendStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
+}
+impl PipelineColorBlendStateCreateInfo {
+ pub fn builder<'a>() -> PipelineColorBlendStateCreateInfoBuilder<'a> {
+ PipelineColorBlendStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineColorBlendStateCreateInfoBuilder<'a> {
+ inner: PipelineColorBlendStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPipelineColorBlendStateCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineColorBlendStateCreateInfoBuilder<'a> {
+ type Target = PipelineColorBlendStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineColorBlendStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineColorBlendStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineColorBlendStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn logic_op_enable(mut self, logic_op_enable: bool) -> Self {
+ self.inner.logic_op_enable = logic_op_enable.into();
+ self
+ }
+ #[inline]
+ pub fn logic_op(mut self, logic_op: LogicOp) -> Self {
+ self.inner.logic_op = logic_op;
+ self
+ }
+ #[inline]
+ pub fn attachments(mut self, attachments: &'a [PipelineColorBlendAttachmentState]) -> Self {
+ self.inner.attachment_count = attachments.len() as _;
+ self.inner.p_attachments = attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn blend_constants(mut self, blend_constants: [f32; 4]) -> Self {
+ self.inner.blend_constants = blend_constants;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPipelineColorBlendStateCreateInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineColorBlendStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineDynamicStateCreateInfo.html>"]
+pub struct PipelineDynamicStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineDynamicStateCreateFlags,
+ pub dynamic_state_count: u32,
+ pub p_dynamic_states: *const DynamicState,
+}
+impl ::std::default::Default for PipelineDynamicStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineDynamicStateCreateFlags::default(),
+ dynamic_state_count: u32::default(),
+ p_dynamic_states: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineDynamicStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_DYNAMIC_STATE_CREATE_INFO;
+}
+impl PipelineDynamicStateCreateInfo {
+ pub fn builder<'a>() -> PipelineDynamicStateCreateInfoBuilder<'a> {
+ PipelineDynamicStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineDynamicStateCreateInfoBuilder<'a> {
+ inner: PipelineDynamicStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineDynamicStateCreateInfoBuilder<'a> {
+ type Target = PipelineDynamicStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineDynamicStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineDynamicStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineDynamicStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn dynamic_states(mut self, dynamic_states: &'a [DynamicState]) -> Self {
+ self.inner.dynamic_state_count = dynamic_states.len() as _;
+ self.inner.p_dynamic_states = dynamic_states.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineDynamicStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStencilOpState.html>"]
+pub struct StencilOpState {
+ pub fail_op: StencilOp,
+ pub pass_op: StencilOp,
+ pub depth_fail_op: StencilOp,
+ pub compare_op: CompareOp,
+ pub compare_mask: u32,
+ pub write_mask: u32,
+ pub reference: u32,
+}
+impl StencilOpState {
+ pub fn builder<'a>() -> StencilOpStateBuilder<'a> {
+ StencilOpStateBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct StencilOpStateBuilder<'a> {
+ inner: StencilOpState,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for StencilOpStateBuilder<'a> {
+ type Target = StencilOpState;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for StencilOpStateBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> StencilOpStateBuilder<'a> {
+ #[inline]
+ pub fn fail_op(mut self, fail_op: StencilOp) -> Self {
+ self.inner.fail_op = fail_op;
+ self
+ }
+ #[inline]
+ pub fn pass_op(mut self, pass_op: StencilOp) -> Self {
+ self.inner.pass_op = pass_op;
+ self
+ }
+ #[inline]
+ pub fn depth_fail_op(mut self, depth_fail_op: StencilOp) -> Self {
+ self.inner.depth_fail_op = depth_fail_op;
+ self
+ }
+ #[inline]
+ pub fn compare_op(mut self, compare_op: CompareOp) -> Self {
+ self.inner.compare_op = compare_op;
+ self
+ }
+ #[inline]
+ pub fn compare_mask(mut self, compare_mask: u32) -> Self {
+ self.inner.compare_mask = compare_mask;
+ self
+ }
+ #[inline]
+ pub fn write_mask(mut self, write_mask: u32) -> Self {
+ self.inner.write_mask = write_mask;
+ self
+ }
+ #[inline]
+ pub fn reference(mut self, reference: u32) -> Self {
+ self.inner.reference = reference;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> StencilOpState {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineDepthStencilStateCreateInfo.html>"]
+pub struct PipelineDepthStencilStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineDepthStencilStateCreateFlags,
+ pub depth_test_enable: Bool32,
+ pub depth_write_enable: Bool32,
+ pub depth_compare_op: CompareOp,
+ pub depth_bounds_test_enable: Bool32,
+ pub stencil_test_enable: Bool32,
+ pub front: StencilOpState,
+ pub back: StencilOpState,
+ pub min_depth_bounds: f32,
+ pub max_depth_bounds: f32,
+}
+impl ::std::default::Default for PipelineDepthStencilStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineDepthStencilStateCreateFlags::default(),
+ depth_test_enable: Bool32::default(),
+ depth_write_enable: Bool32::default(),
+ depth_compare_op: CompareOp::default(),
+ depth_bounds_test_enable: Bool32::default(),
+ stencil_test_enable: Bool32::default(),
+ front: StencilOpState::default(),
+ back: StencilOpState::default(),
+ min_depth_bounds: f32::default(),
+ max_depth_bounds: f32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineDepthStencilStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
+}
+impl PipelineDepthStencilStateCreateInfo {
+ pub fn builder<'a>() -> PipelineDepthStencilStateCreateInfoBuilder<'a> {
+ PipelineDepthStencilStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineDepthStencilStateCreateInfoBuilder<'a> {
+ inner: PipelineDepthStencilStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineDepthStencilStateCreateInfoBuilder<'a> {
+ type Target = PipelineDepthStencilStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineDepthStencilStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineDepthStencilStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineDepthStencilStateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn depth_test_enable(mut self, depth_test_enable: bool) -> Self {
+ self.inner.depth_test_enable = depth_test_enable.into();
+ self
+ }
+ #[inline]
+ pub fn depth_write_enable(mut self, depth_write_enable: bool) -> Self {
+ self.inner.depth_write_enable = depth_write_enable.into();
+ self
+ }
+ #[inline]
+ pub fn depth_compare_op(mut self, depth_compare_op: CompareOp) -> Self {
+ self.inner.depth_compare_op = depth_compare_op;
+ self
+ }
+ #[inline]
+ pub fn depth_bounds_test_enable(mut self, depth_bounds_test_enable: bool) -> Self {
+ self.inner.depth_bounds_test_enable = depth_bounds_test_enable.into();
+ self
+ }
+ #[inline]
+ pub fn stencil_test_enable(mut self, stencil_test_enable: bool) -> Self {
+ self.inner.stencil_test_enable = stencil_test_enable.into();
+ self
+ }
+ #[inline]
+ pub fn front(mut self, front: StencilOpState) -> Self {
+ self.inner.front = front;
+ self
+ }
+ #[inline]
+ pub fn back(mut self, back: StencilOpState) -> Self {
+ self.inner.back = back;
+ self
+ }
+ #[inline]
+ pub fn min_depth_bounds(mut self, min_depth_bounds: f32) -> Self {
+ self.inner.min_depth_bounds = min_depth_bounds;
+ self
+ }
+ #[inline]
+ pub fn max_depth_bounds(mut self, max_depth_bounds: f32) -> Self {
+ self.inner.max_depth_bounds = max_depth_bounds;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineDepthStencilStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGraphicsPipelineCreateInfo.html>"]
+pub struct GraphicsPipelineCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCreateFlags,
+ pub stage_count: u32,
+ pub p_stages: *const PipelineShaderStageCreateInfo,
+ pub p_vertex_input_state: *const PipelineVertexInputStateCreateInfo,
+ pub p_input_assembly_state: *const PipelineInputAssemblyStateCreateInfo,
+ pub p_tessellation_state: *const PipelineTessellationStateCreateInfo,
+ pub p_viewport_state: *const PipelineViewportStateCreateInfo,
+ pub p_rasterization_state: *const PipelineRasterizationStateCreateInfo,
+ pub p_multisample_state: *const PipelineMultisampleStateCreateInfo,
+ pub p_depth_stencil_state: *const PipelineDepthStencilStateCreateInfo,
+ pub p_color_blend_state: *const PipelineColorBlendStateCreateInfo,
+ pub p_dynamic_state: *const PipelineDynamicStateCreateInfo,
+ pub layout: PipelineLayout,
+ pub render_pass: RenderPass,
+ pub subpass: u32,
+ pub base_pipeline_handle: Pipeline,
+ pub base_pipeline_index: i32,
+}
+impl ::std::default::Default for GraphicsPipelineCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCreateFlags::default(),
+ stage_count: u32::default(),
+ p_stages: ::std::ptr::null(),
+ p_vertex_input_state: ::std::ptr::null(),
+ p_input_assembly_state: ::std::ptr::null(),
+ p_tessellation_state: ::std::ptr::null(),
+ p_viewport_state: ::std::ptr::null(),
+ p_rasterization_state: ::std::ptr::null(),
+ p_multisample_state: ::std::ptr::null(),
+ p_depth_stencil_state: ::std::ptr::null(),
+ p_color_blend_state: ::std::ptr::null(),
+ p_dynamic_state: ::std::ptr::null(),
+ layout: PipelineLayout::default(),
+ render_pass: RenderPass::default(),
+ subpass: u32::default(),
+ base_pipeline_handle: Pipeline::default(),
+ base_pipeline_index: i32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GraphicsPipelineCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_PIPELINE_CREATE_INFO;
+}
+impl GraphicsPipelineCreateInfo {
+ pub fn builder<'a>() -> GraphicsPipelineCreateInfoBuilder<'a> {
+ GraphicsPipelineCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GraphicsPipelineCreateInfoBuilder<'a> {
+ inner: GraphicsPipelineCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsGraphicsPipelineCreateInfo {}
+impl<'a> ::std::ops::Deref for GraphicsPipelineCreateInfoBuilder<'a> {
+ type Target = GraphicsPipelineCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GraphicsPipelineCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GraphicsPipelineCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo]) -> Self {
+ self.inner.stage_count = stages.len() as _;
+ self.inner.p_stages = stages.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn vertex_input_state(
+ mut self,
+ vertex_input_state: &'a PipelineVertexInputStateCreateInfo,
+ ) -> Self {
+ self.inner.p_vertex_input_state = vertex_input_state;
+ self
+ }
+ #[inline]
+ pub fn input_assembly_state(
+ mut self,
+ input_assembly_state: &'a PipelineInputAssemblyStateCreateInfo,
+ ) -> Self {
+ self.inner.p_input_assembly_state = input_assembly_state;
+ self
+ }
+ #[inline]
+ pub fn tessellation_state(
+ mut self,
+ tessellation_state: &'a PipelineTessellationStateCreateInfo,
+ ) -> Self {
+ self.inner.p_tessellation_state = tessellation_state;
+ self
+ }
+ #[inline]
+ pub fn viewport_state(mut self, viewport_state: &'a PipelineViewportStateCreateInfo) -> Self {
+ self.inner.p_viewport_state = viewport_state;
+ self
+ }
+ #[inline]
+ pub fn rasterization_state(
+ mut self,
+ rasterization_state: &'a PipelineRasterizationStateCreateInfo,
+ ) -> Self {
+ self.inner.p_rasterization_state = rasterization_state;
+ self
+ }
+ #[inline]
+ pub fn multisample_state(
+ mut self,
+ multisample_state: &'a PipelineMultisampleStateCreateInfo,
+ ) -> Self {
+ self.inner.p_multisample_state = multisample_state;
+ self
+ }
+ #[inline]
+ pub fn depth_stencil_state(
+ mut self,
+ depth_stencil_state: &'a PipelineDepthStencilStateCreateInfo,
+ ) -> Self {
+ self.inner.p_depth_stencil_state = depth_stencil_state;
+ self
+ }
+ #[inline]
+ pub fn color_blend_state(
+ mut self,
+ color_blend_state: &'a PipelineColorBlendStateCreateInfo,
+ ) -> Self {
+ self.inner.p_color_blend_state = color_blend_state;
+ self
+ }
+ #[inline]
+ pub fn dynamic_state(mut self, dynamic_state: &'a PipelineDynamicStateCreateInfo) -> Self {
+ self.inner.p_dynamic_state = dynamic_state;
+ self
+ }
+ #[inline]
+ pub fn layout(mut self, layout: PipelineLayout) -> Self {
+ self.inner.layout = layout;
+ self
+ }
+ #[inline]
+ pub fn render_pass(mut self, render_pass: RenderPass) -> Self {
+ self.inner.render_pass = render_pass;
+ self
+ }
+ #[inline]
+ pub fn subpass(mut self, subpass: u32) -> Self {
+ self.inner.subpass = subpass;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self {
+ self.inner.base_pipeline_handle = base_pipeline_handle;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self {
+ self.inner.base_pipeline_index = base_pipeline_index;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsGraphicsPipelineCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GraphicsPipelineCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCacheCreateInfo.html>"]
+pub struct PipelineCacheCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCacheCreateFlags,
+ pub initial_data_size: usize,
+ pub p_initial_data: *const c_void,
+}
+impl ::std::default::Default for PipelineCacheCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCacheCreateFlags::default(),
+ initial_data_size: usize::default(),
+ p_initial_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineCacheCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_CACHE_CREATE_INFO;
+}
+impl PipelineCacheCreateInfo {
+ pub fn builder<'a>() -> PipelineCacheCreateInfoBuilder<'a> {
+ PipelineCacheCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCacheCreateInfoBuilder<'a> {
+ inner: PipelineCacheCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineCacheCreateInfoBuilder<'a> {
+ type Target = PipelineCacheCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCacheCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCacheCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCacheCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn initial_data(mut self, initial_data: &'a [u8]) -> Self {
+ self.inner.initial_data_size = initial_data.len();
+ self.inner.p_initial_data = initial_data.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCacheCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCacheHeaderVersionOne.html>"]
+pub struct PipelineCacheHeaderVersionOne {
+ pub header_size: u32,
+ pub header_version: PipelineCacheHeaderVersion,
+ pub vendor_id: u32,
+ pub device_id: u32,
+ pub pipeline_cache_uuid: [u8; UUID_SIZE],
+}
+impl ::std::default::Default for PipelineCacheHeaderVersionOne {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ header_size: u32::default(),
+ header_version: PipelineCacheHeaderVersion::default(),
+ vendor_id: u32::default(),
+ device_id: u32::default(),
+ pipeline_cache_uuid: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+impl PipelineCacheHeaderVersionOne {
+ pub fn builder<'a>() -> PipelineCacheHeaderVersionOneBuilder<'a> {
+ PipelineCacheHeaderVersionOneBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCacheHeaderVersionOneBuilder<'a> {
+ inner: PipelineCacheHeaderVersionOne,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineCacheHeaderVersionOneBuilder<'a> {
+ type Target = PipelineCacheHeaderVersionOne;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCacheHeaderVersionOneBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCacheHeaderVersionOneBuilder<'a> {
+ #[inline]
+ pub fn header_size(mut self, header_size: u32) -> Self {
+ self.inner.header_size = header_size;
+ self
+ }
+ #[inline]
+ pub fn header_version(mut self, header_version: PipelineCacheHeaderVersion) -> Self {
+ self.inner.header_version = header_version;
+ self
+ }
+ #[inline]
+ pub fn vendor_id(mut self, vendor_id: u32) -> Self {
+ self.inner.vendor_id = vendor_id;
+ self
+ }
+ #[inline]
+ pub fn device_id(mut self, device_id: u32) -> Self {
+ self.inner.device_id = device_id;
+ self
+ }
+ #[inline]
+ pub fn pipeline_cache_uuid(mut self, pipeline_cache_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.pipeline_cache_uuid = pipeline_cache_uuid;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCacheHeaderVersionOne {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPushConstantRange.html>"]
+pub struct PushConstantRange {
+ pub stage_flags: ShaderStageFlags,
+ pub offset: u32,
+ pub size: u32,
+}
+impl PushConstantRange {
+ pub fn builder<'a>() -> PushConstantRangeBuilder<'a> {
+ PushConstantRangeBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PushConstantRangeBuilder<'a> {
+ inner: PushConstantRange,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PushConstantRangeBuilder<'a> {
+ type Target = PushConstantRange;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PushConstantRangeBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PushConstantRangeBuilder<'a> {
+ #[inline]
+ pub fn stage_flags(mut self, stage_flags: ShaderStageFlags) -> Self {
+ self.inner.stage_flags = stage_flags;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: u32) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: u32) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PushConstantRange {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineLayoutCreateInfo.html>"]
+pub struct PipelineLayoutCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineLayoutCreateFlags,
+ pub set_layout_count: u32,
+ pub p_set_layouts: *const DescriptorSetLayout,
+ pub push_constant_range_count: u32,
+ pub p_push_constant_ranges: *const PushConstantRange,
+}
+impl ::std::default::Default for PipelineLayoutCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineLayoutCreateFlags::default(),
+ set_layout_count: u32::default(),
+ p_set_layouts: ::std::ptr::null(),
+ push_constant_range_count: u32::default(),
+ p_push_constant_ranges: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineLayoutCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_LAYOUT_CREATE_INFO;
+}
+impl PipelineLayoutCreateInfo {
+ pub fn builder<'a>() -> PipelineLayoutCreateInfoBuilder<'a> {
+ PipelineLayoutCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineLayoutCreateInfoBuilder<'a> {
+ inner: PipelineLayoutCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineLayoutCreateInfoBuilder<'a> {
+ type Target = PipelineLayoutCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineLayoutCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineLayoutCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineLayoutCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn set_layouts(mut self, set_layouts: &'a [DescriptorSetLayout]) -> Self {
+ self.inner.set_layout_count = set_layouts.len() as _;
+ self.inner.p_set_layouts = set_layouts.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn push_constant_ranges(mut self, push_constant_ranges: &'a [PushConstantRange]) -> Self {
+ self.inner.push_constant_range_count = push_constant_ranges.len() as _;
+ self.inner.p_push_constant_ranges = push_constant_ranges.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineLayoutCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerCreateInfo.html>"]
+pub struct SamplerCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: SamplerCreateFlags,
+ pub mag_filter: Filter,
+ pub min_filter: Filter,
+ pub mipmap_mode: SamplerMipmapMode,
+ pub address_mode_u: SamplerAddressMode,
+ pub address_mode_v: SamplerAddressMode,
+ pub address_mode_w: SamplerAddressMode,
+ pub mip_lod_bias: f32,
+ pub anisotropy_enable: Bool32,
+ pub max_anisotropy: f32,
+ pub compare_enable: Bool32,
+ pub compare_op: CompareOp,
+ pub min_lod: f32,
+ pub max_lod: f32,
+ pub border_color: BorderColor,
+ pub unnormalized_coordinates: Bool32,
+}
+impl ::std::default::Default for SamplerCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: SamplerCreateFlags::default(),
+ mag_filter: Filter::default(),
+ min_filter: Filter::default(),
+ mipmap_mode: SamplerMipmapMode::default(),
+ address_mode_u: SamplerAddressMode::default(),
+ address_mode_v: SamplerAddressMode::default(),
+ address_mode_w: SamplerAddressMode::default(),
+ mip_lod_bias: f32::default(),
+ anisotropy_enable: Bool32::default(),
+ max_anisotropy: f32::default(),
+ compare_enable: Bool32::default(),
+ compare_op: CompareOp::default(),
+ min_lod: f32::default(),
+ max_lod: f32::default(),
+ border_color: BorderColor::default(),
+ unnormalized_coordinates: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_CREATE_INFO;
+}
+impl SamplerCreateInfo {
+ pub fn builder<'a>() -> SamplerCreateInfoBuilder<'a> {
+ SamplerCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerCreateInfoBuilder<'a> {
+ inner: SamplerCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSamplerCreateInfo {}
+impl<'a> ::std::ops::Deref for SamplerCreateInfoBuilder<'a> {
+ type Target = SamplerCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SamplerCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn mag_filter(mut self, mag_filter: Filter) -> Self {
+ self.inner.mag_filter = mag_filter;
+ self
+ }
+ #[inline]
+ pub fn min_filter(mut self, min_filter: Filter) -> Self {
+ self.inner.min_filter = min_filter;
+ self
+ }
+ #[inline]
+ pub fn mipmap_mode(mut self, mipmap_mode: SamplerMipmapMode) -> Self {
+ self.inner.mipmap_mode = mipmap_mode;
+ self
+ }
+ #[inline]
+ pub fn address_mode_u(mut self, address_mode_u: SamplerAddressMode) -> Self {
+ self.inner.address_mode_u = address_mode_u;
+ self
+ }
+ #[inline]
+ pub fn address_mode_v(mut self, address_mode_v: SamplerAddressMode) -> Self {
+ self.inner.address_mode_v = address_mode_v;
+ self
+ }
+ #[inline]
+ pub fn address_mode_w(mut self, address_mode_w: SamplerAddressMode) -> Self {
+ self.inner.address_mode_w = address_mode_w;
+ self
+ }
+ #[inline]
+ pub fn mip_lod_bias(mut self, mip_lod_bias: f32) -> Self {
+ self.inner.mip_lod_bias = mip_lod_bias;
+ self
+ }
+ #[inline]
+ pub fn anisotropy_enable(mut self, anisotropy_enable: bool) -> Self {
+ self.inner.anisotropy_enable = anisotropy_enable.into();
+ self
+ }
+ #[inline]
+ pub fn max_anisotropy(mut self, max_anisotropy: f32) -> Self {
+ self.inner.max_anisotropy = max_anisotropy;
+ self
+ }
+ #[inline]
+ pub fn compare_enable(mut self, compare_enable: bool) -> Self {
+ self.inner.compare_enable = compare_enable.into();
+ self
+ }
+ #[inline]
+ pub fn compare_op(mut self, compare_op: CompareOp) -> Self {
+ self.inner.compare_op = compare_op;
+ self
+ }
+ #[inline]
+ pub fn min_lod(mut self, min_lod: f32) -> Self {
+ self.inner.min_lod = min_lod;
+ self
+ }
+ #[inline]
+ pub fn max_lod(mut self, max_lod: f32) -> Self {
+ self.inner.max_lod = max_lod;
+ self
+ }
+ #[inline]
+ pub fn border_color(mut self, border_color: BorderColor) -> Self {
+ self.inner.border_color = border_color;
+ self
+ }
+ #[inline]
+ pub fn unnormalized_coordinates(mut self, unnormalized_coordinates: bool) -> Self {
+ self.inner.unnormalized_coordinates = unnormalized_coordinates.into();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSamplerCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandPoolCreateInfo.html>"]
+pub struct CommandPoolCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: CommandPoolCreateFlags,
+ pub queue_family_index: u32,
+}
+impl ::std::default::Default for CommandPoolCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: CommandPoolCreateFlags::default(),
+ queue_family_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandPoolCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_POOL_CREATE_INFO;
+}
+impl CommandPoolCreateInfo {
+ pub fn builder<'a>() -> CommandPoolCreateInfoBuilder<'a> {
+ CommandPoolCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandPoolCreateInfoBuilder<'a> {
+ inner: CommandPoolCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CommandPoolCreateInfoBuilder<'a> {
+ type Target = CommandPoolCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandPoolCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandPoolCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: CommandPoolCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn queue_family_index(mut self, queue_family_index: u32) -> Self {
+ self.inner.queue_family_index = queue_family_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandPoolCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferAllocateInfo.html>"]
+pub struct CommandBufferAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub command_pool: CommandPool,
+ pub level: CommandBufferLevel,
+ pub command_buffer_count: u32,
+}
+impl ::std::default::Default for CommandBufferAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ command_pool: CommandPool::default(),
+ level: CommandBufferLevel::default(),
+ command_buffer_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferAllocateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_ALLOCATE_INFO;
+}
+impl CommandBufferAllocateInfo {
+ pub fn builder<'a>() -> CommandBufferAllocateInfoBuilder<'a> {
+ CommandBufferAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferAllocateInfoBuilder<'a> {
+ inner: CommandBufferAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CommandBufferAllocateInfoBuilder<'a> {
+ type Target = CommandBufferAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn command_pool(mut self, command_pool: CommandPool) -> Self {
+ self.inner.command_pool = command_pool;
+ self
+ }
+ #[inline]
+ pub fn level(mut self, level: CommandBufferLevel) -> Self {
+ self.inner.level = level;
+ self
+ }
+ #[inline]
+ pub fn command_buffer_count(mut self, command_buffer_count: u32) -> Self {
+ self.inner.command_buffer_count = command_buffer_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferInheritanceInfo.html>"]
+pub struct CommandBufferInheritanceInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub render_pass: RenderPass,
+ pub subpass: u32,
+ pub framebuffer: Framebuffer,
+ pub occlusion_query_enable: Bool32,
+ pub query_flags: QueryControlFlags,
+ pub pipeline_statistics: QueryPipelineStatisticFlags,
+}
+impl ::std::default::Default for CommandBufferInheritanceInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ render_pass: RenderPass::default(),
+ subpass: u32::default(),
+ framebuffer: Framebuffer::default(),
+ occlusion_query_enable: Bool32::default(),
+ query_flags: QueryControlFlags::default(),
+ pipeline_statistics: QueryPipelineStatisticFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferInheritanceInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_INFO;
+}
+impl CommandBufferInheritanceInfo {
+ pub fn builder<'a>() -> CommandBufferInheritanceInfoBuilder<'a> {
+ CommandBufferInheritanceInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferInheritanceInfoBuilder<'a> {
+ inner: CommandBufferInheritanceInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsCommandBufferInheritanceInfo {}
+impl<'a> ::std::ops::Deref for CommandBufferInheritanceInfoBuilder<'a> {
+ type Target = CommandBufferInheritanceInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferInheritanceInfoBuilder<'a> {
+ #[inline]
+ pub fn render_pass(mut self, render_pass: RenderPass) -> Self {
+ self.inner.render_pass = render_pass;
+ self
+ }
+ #[inline]
+ pub fn subpass(mut self, subpass: u32) -> Self {
+ self.inner.subpass = subpass;
+ self
+ }
+ #[inline]
+ pub fn framebuffer(mut self, framebuffer: Framebuffer) -> Self {
+ self.inner.framebuffer = framebuffer;
+ self
+ }
+ #[inline]
+ pub fn occlusion_query_enable(mut self, occlusion_query_enable: bool) -> Self {
+ self.inner.occlusion_query_enable = occlusion_query_enable.into();
+ self
+ }
+ #[inline]
+ pub fn query_flags(mut self, query_flags: QueryControlFlags) -> Self {
+ self.inner.query_flags = query_flags;
+ self
+ }
+ #[inline]
+ pub fn pipeline_statistics(mut self, pipeline_statistics: QueryPipelineStatisticFlags) -> Self {
+ self.inner.pipeline_statistics = pipeline_statistics;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsCommandBufferInheritanceInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferInheritanceInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferBeginInfo.html>"]
+pub struct CommandBufferBeginInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: CommandBufferUsageFlags,
+ pub p_inheritance_info: *const CommandBufferInheritanceInfo,
+}
+impl ::std::default::Default for CommandBufferBeginInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: CommandBufferUsageFlags::default(),
+ p_inheritance_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferBeginInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_BEGIN_INFO;
+}
+impl CommandBufferBeginInfo {
+ pub fn builder<'a>() -> CommandBufferBeginInfoBuilder<'a> {
+ CommandBufferBeginInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferBeginInfoBuilder<'a> {
+ inner: CommandBufferBeginInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsCommandBufferBeginInfo {}
+impl<'a> ::std::ops::Deref for CommandBufferBeginInfoBuilder<'a> {
+ type Target = CommandBufferBeginInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferBeginInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferBeginInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: CommandBufferUsageFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn inheritance_info(mut self, inheritance_info: &'a CommandBufferInheritanceInfo) -> Self {
+ self.inner.p_inheritance_info = inheritance_info;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsCommandBufferBeginInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferBeginInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassBeginInfo.html>"]
+pub struct RenderPassBeginInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub render_pass: RenderPass,
+ pub framebuffer: Framebuffer,
+ pub render_area: Rect2D,
+ pub clear_value_count: u32,
+ pub p_clear_values: *const ClearValue,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for RenderPassBeginInfo {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("RenderPassBeginInfo")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("render_pass", &self.render_pass)
+ .field("framebuffer", &self.framebuffer)
+ .field("render_area", &self.render_area)
+ .field("clear_value_count", &self.clear_value_count)
+ .field("p_clear_values", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for RenderPassBeginInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ render_pass: RenderPass::default(),
+ framebuffer: Framebuffer::default(),
+ render_area: Rect2D::default(),
+ clear_value_count: u32::default(),
+ p_clear_values: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassBeginInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_BEGIN_INFO;
+}
+impl RenderPassBeginInfo {
+ pub fn builder<'a>() -> RenderPassBeginInfoBuilder<'a> {
+ RenderPassBeginInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassBeginInfoBuilder<'a> {
+ inner: RenderPassBeginInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsRenderPassBeginInfo {}
+impl<'a> ::std::ops::Deref for RenderPassBeginInfoBuilder<'a> {
+ type Target = RenderPassBeginInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassBeginInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassBeginInfoBuilder<'a> {
+ #[inline]
+ pub fn render_pass(mut self, render_pass: RenderPass) -> Self {
+ self.inner.render_pass = render_pass;
+ self
+ }
+ #[inline]
+ pub fn framebuffer(mut self, framebuffer: Framebuffer) -> Self {
+ self.inner.framebuffer = framebuffer;
+ self
+ }
+ #[inline]
+ pub fn render_area(mut self, render_area: Rect2D) -> Self {
+ self.inner.render_area = render_area;
+ self
+ }
+ #[inline]
+ pub fn clear_values(mut self, clear_values: &'a [ClearValue]) -> Self {
+ self.inner.clear_value_count = clear_values.len() as _;
+ self.inner.p_clear_values = clear_values.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsRenderPassBeginInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassBeginInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkClearColorValue.html>"]
+pub union ClearColorValue {
+ pub float32: [f32; 4],
+ pub int32: [i32; 4],
+ pub uint32: [u32; 4],
+}
+impl ::std::default::Default for ClearColorValue {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkClearDepthStencilValue.html>"]
+pub struct ClearDepthStencilValue {
+ pub depth: f32,
+ pub stencil: u32,
+}
+impl ClearDepthStencilValue {
+ pub fn builder<'a>() -> ClearDepthStencilValueBuilder<'a> {
+ ClearDepthStencilValueBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ClearDepthStencilValueBuilder<'a> {
+ inner: ClearDepthStencilValue,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ClearDepthStencilValueBuilder<'a> {
+ type Target = ClearDepthStencilValue;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ClearDepthStencilValueBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ClearDepthStencilValueBuilder<'a> {
+ #[inline]
+ pub fn depth(mut self, depth: f32) -> Self {
+ self.inner.depth = depth;
+ self
+ }
+ #[inline]
+ pub fn stencil(mut self, stencil: u32) -> Self {
+ self.inner.stencil = stencil;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ClearDepthStencilValue {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkClearValue.html>"]
+pub union ClearValue {
+ pub color: ClearColorValue,
+ pub depth_stencil: ClearDepthStencilValue,
+}
+impl ::std::default::Default for ClearValue {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkClearAttachment.html>"]
+pub struct ClearAttachment {
+ pub aspect_mask: ImageAspectFlags,
+ pub color_attachment: u32,
+ pub clear_value: ClearValue,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for ClearAttachment {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("ClearAttachment")
+ .field("aspect_mask", &self.aspect_mask)
+ .field("color_attachment", &self.color_attachment)
+ .field("clear_value", &"union")
+ .finish()
+ }
+}
+impl ClearAttachment {
+ pub fn builder<'a>() -> ClearAttachmentBuilder<'a> {
+ ClearAttachmentBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ClearAttachmentBuilder<'a> {
+ inner: ClearAttachment,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ClearAttachmentBuilder<'a> {
+ type Target = ClearAttachment;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ClearAttachmentBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ClearAttachmentBuilder<'a> {
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[inline]
+ pub fn color_attachment(mut self, color_attachment: u32) -> Self {
+ self.inner.color_attachment = color_attachment;
+ self
+ }
+ #[inline]
+ pub fn clear_value(mut self, clear_value: ClearValue) -> Self {
+ self.inner.clear_value = clear_value;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ClearAttachment {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentDescription.html>"]
+pub struct AttachmentDescription {
+ pub flags: AttachmentDescriptionFlags,
+ pub format: Format,
+ pub samples: SampleCountFlags,
+ pub load_op: AttachmentLoadOp,
+ pub store_op: AttachmentStoreOp,
+ pub stencil_load_op: AttachmentLoadOp,
+ pub stencil_store_op: AttachmentStoreOp,
+ pub initial_layout: ImageLayout,
+ pub final_layout: ImageLayout,
+}
+impl AttachmentDescription {
+ pub fn builder<'a>() -> AttachmentDescriptionBuilder<'a> {
+ AttachmentDescriptionBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentDescriptionBuilder<'a> {
+ inner: AttachmentDescription,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AttachmentDescriptionBuilder<'a> {
+ type Target = AttachmentDescription;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentDescriptionBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentDescriptionBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: AttachmentDescriptionFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn samples(mut self, samples: SampleCountFlags) -> Self {
+ self.inner.samples = samples;
+ self
+ }
+ #[inline]
+ pub fn load_op(mut self, load_op: AttachmentLoadOp) -> Self {
+ self.inner.load_op = load_op;
+ self
+ }
+ #[inline]
+ pub fn store_op(mut self, store_op: AttachmentStoreOp) -> Self {
+ self.inner.store_op = store_op;
+ self
+ }
+ #[inline]
+ pub fn stencil_load_op(mut self, stencil_load_op: AttachmentLoadOp) -> Self {
+ self.inner.stencil_load_op = stencil_load_op;
+ self
+ }
+ #[inline]
+ pub fn stencil_store_op(mut self, stencil_store_op: AttachmentStoreOp) -> Self {
+ self.inner.stencil_store_op = stencil_store_op;
+ self
+ }
+ #[inline]
+ pub fn initial_layout(mut self, initial_layout: ImageLayout) -> Self {
+ self.inner.initial_layout = initial_layout;
+ self
+ }
+ #[inline]
+ pub fn final_layout(mut self, final_layout: ImageLayout) -> Self {
+ self.inner.final_layout = final_layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentDescription {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentReference.html>"]
+pub struct AttachmentReference {
+ pub attachment: u32,
+ pub layout: ImageLayout,
+}
+impl AttachmentReference {
+ pub fn builder<'a>() -> AttachmentReferenceBuilder<'a> {
+ AttachmentReferenceBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentReferenceBuilder<'a> {
+ inner: AttachmentReference,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AttachmentReferenceBuilder<'a> {
+ type Target = AttachmentReference;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentReferenceBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentReferenceBuilder<'a> {
+ #[inline]
+ pub fn attachment(mut self, attachment: u32) -> Self {
+ self.inner.attachment = attachment;
+ self
+ }
+ #[inline]
+ pub fn layout(mut self, layout: ImageLayout) -> Self {
+ self.inner.layout = layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentReference {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassDescription.html>"]
+pub struct SubpassDescription {
+ pub flags: SubpassDescriptionFlags,
+ pub pipeline_bind_point: PipelineBindPoint,
+ pub input_attachment_count: u32,
+ pub p_input_attachments: *const AttachmentReference,
+ pub color_attachment_count: u32,
+ pub p_color_attachments: *const AttachmentReference,
+ pub p_resolve_attachments: *const AttachmentReference,
+ pub p_depth_stencil_attachment: *const AttachmentReference,
+ pub preserve_attachment_count: u32,
+ pub p_preserve_attachments: *const u32,
+}
+impl ::std::default::Default for SubpassDescription {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ flags: SubpassDescriptionFlags::default(),
+ pipeline_bind_point: PipelineBindPoint::default(),
+ input_attachment_count: u32::default(),
+ p_input_attachments: ::std::ptr::null(),
+ color_attachment_count: u32::default(),
+ p_color_attachments: ::std::ptr::null(),
+ p_resolve_attachments: ::std::ptr::null(),
+ p_depth_stencil_attachment: ::std::ptr::null(),
+ preserve_attachment_count: u32::default(),
+ p_preserve_attachments: ::std::ptr::null(),
+ }
+ }
+}
+impl SubpassDescription {
+ pub fn builder<'a>() -> SubpassDescriptionBuilder<'a> {
+ SubpassDescriptionBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassDescriptionBuilder<'a> {
+ inner: SubpassDescription,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SubpassDescriptionBuilder<'a> {
+ type Target = SubpassDescription;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassDescriptionBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassDescriptionBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SubpassDescriptionFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self {
+ self.inner.pipeline_bind_point = pipeline_bind_point;
+ self
+ }
+ #[inline]
+ pub fn input_attachments(mut self, input_attachments: &'a [AttachmentReference]) -> Self {
+ self.inner.input_attachment_count = input_attachments.len() as _;
+ self.inner.p_input_attachments = input_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn color_attachments(mut self, color_attachments: &'a [AttachmentReference]) -> Self {
+ self.inner.color_attachment_count = color_attachments.len() as _;
+ self.inner.p_color_attachments = color_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn resolve_attachments(mut self, resolve_attachments: &'a [AttachmentReference]) -> Self {
+ self.inner.color_attachment_count = resolve_attachments.len() as _;
+ self.inner.p_resolve_attachments = resolve_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn depth_stencil_attachment(
+ mut self,
+ depth_stencil_attachment: &'a AttachmentReference,
+ ) -> Self {
+ self.inner.p_depth_stencil_attachment = depth_stencil_attachment;
+ self
+ }
+ #[inline]
+ pub fn preserve_attachments(mut self, preserve_attachments: &'a [u32]) -> Self {
+ self.inner.preserve_attachment_count = preserve_attachments.len() as _;
+ self.inner.p_preserve_attachments = preserve_attachments.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassDescription {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassDependency.html>"]
+pub struct SubpassDependency {
+ pub src_subpass: u32,
+ pub dst_subpass: u32,
+ pub src_stage_mask: PipelineStageFlags,
+ pub dst_stage_mask: PipelineStageFlags,
+ pub src_access_mask: AccessFlags,
+ pub dst_access_mask: AccessFlags,
+ pub dependency_flags: DependencyFlags,
+}
+impl SubpassDependency {
+ pub fn builder<'a>() -> SubpassDependencyBuilder<'a> {
+ SubpassDependencyBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassDependencyBuilder<'a> {
+ inner: SubpassDependency,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SubpassDependencyBuilder<'a> {
+ type Target = SubpassDependency;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassDependencyBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassDependencyBuilder<'a> {
+ #[inline]
+ pub fn src_subpass(mut self, src_subpass: u32) -> Self {
+ self.inner.src_subpass = src_subpass;
+ self
+ }
+ #[inline]
+ pub fn dst_subpass(mut self, dst_subpass: u32) -> Self {
+ self.inner.dst_subpass = dst_subpass;
+ self
+ }
+ #[inline]
+ pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags) -> Self {
+ self.inner.src_stage_mask = src_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags) -> Self {
+ self.inner.dst_stage_mask = dst_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self {
+ self.inner.dependency_flags = dependency_flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassDependency {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassCreateInfo.html>"]
+pub struct RenderPassCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: RenderPassCreateFlags,
+ pub attachment_count: u32,
+ pub p_attachments: *const AttachmentDescription,
+ pub subpass_count: u32,
+ pub p_subpasses: *const SubpassDescription,
+ pub dependency_count: u32,
+ pub p_dependencies: *const SubpassDependency,
+}
+impl ::std::default::Default for RenderPassCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: RenderPassCreateFlags::default(),
+ attachment_count: u32::default(),
+ p_attachments: ::std::ptr::null(),
+ subpass_count: u32::default(),
+ p_subpasses: ::std::ptr::null(),
+ dependency_count: u32::default(),
+ p_dependencies: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATE_INFO;
+}
+impl RenderPassCreateInfo {
+ pub fn builder<'a>() -> RenderPassCreateInfoBuilder<'a> {
+ RenderPassCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassCreateInfoBuilder<'a> {
+ inner: RenderPassCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsRenderPassCreateInfo {}
+impl<'a> ::std::ops::Deref for RenderPassCreateInfoBuilder<'a> {
+ type Target = RenderPassCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: RenderPassCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn attachments(mut self, attachments: &'a [AttachmentDescription]) -> Self {
+ self.inner.attachment_count = attachments.len() as _;
+ self.inner.p_attachments = attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn subpasses(mut self, subpasses: &'a [SubpassDescription]) -> Self {
+ self.inner.subpass_count = subpasses.len() as _;
+ self.inner.p_subpasses = subpasses.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn dependencies(mut self, dependencies: &'a [SubpassDependency]) -> Self {
+ self.inner.dependency_count = dependencies.len() as _;
+ self.inner.p_dependencies = dependencies.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsRenderPassCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkEventCreateInfo.html>"]
+pub struct EventCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: EventCreateFlags,
+}
+impl ::std::default::Default for EventCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: EventCreateFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for EventCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::EVENT_CREATE_INFO;
+}
+impl EventCreateInfo {
+ pub fn builder<'a>() -> EventCreateInfoBuilder<'a> {
+ EventCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct EventCreateInfoBuilder<'a> {
+ inner: EventCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsEventCreateInfo {}
+impl<'a> ::std::ops::Deref for EventCreateInfoBuilder<'a> {
+ type Target = EventCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for EventCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> EventCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: EventCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsEventCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> EventCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFenceCreateInfo.html>"]
+pub struct FenceCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: FenceCreateFlags,
+}
+impl ::std::default::Default for FenceCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: FenceCreateFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FenceCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::FENCE_CREATE_INFO;
+}
+impl FenceCreateInfo {
+ pub fn builder<'a>() -> FenceCreateInfoBuilder<'a> {
+ FenceCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FenceCreateInfoBuilder<'a> {
+ inner: FenceCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsFenceCreateInfo {}
+impl<'a> ::std::ops::Deref for FenceCreateInfoBuilder<'a> {
+ type Target = FenceCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FenceCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FenceCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: FenceCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsFenceCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FenceCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFeatures.html>"]
+pub struct PhysicalDeviceFeatures {
+ pub robust_buffer_access: Bool32,
+ pub full_draw_index_uint32: Bool32,
+ pub image_cube_array: Bool32,
+ pub independent_blend: Bool32,
+ pub geometry_shader: Bool32,
+ pub tessellation_shader: Bool32,
+ pub sample_rate_shading: Bool32,
+ pub dual_src_blend: Bool32,
+ pub logic_op: Bool32,
+ pub multi_draw_indirect: Bool32,
+ pub draw_indirect_first_instance: Bool32,
+ pub depth_clamp: Bool32,
+ pub depth_bias_clamp: Bool32,
+ pub fill_mode_non_solid: Bool32,
+ pub depth_bounds: Bool32,
+ pub wide_lines: Bool32,
+ pub large_points: Bool32,
+ pub alpha_to_one: Bool32,
+ pub multi_viewport: Bool32,
+ pub sampler_anisotropy: Bool32,
+ pub texture_compression_etc2: Bool32,
+ pub texture_compression_astc_ldr: Bool32,
+ pub texture_compression_bc: Bool32,
+ pub occlusion_query_precise: Bool32,
+ pub pipeline_statistics_query: Bool32,
+ pub vertex_pipeline_stores_and_atomics: Bool32,
+ pub fragment_stores_and_atomics: Bool32,
+ pub shader_tessellation_and_geometry_point_size: Bool32,
+ pub shader_image_gather_extended: Bool32,
+ pub shader_storage_image_extended_formats: Bool32,
+ pub shader_storage_image_multisample: Bool32,
+ pub shader_storage_image_read_without_format: Bool32,
+ pub shader_storage_image_write_without_format: Bool32,
+ pub shader_uniform_buffer_array_dynamic_indexing: Bool32,
+ pub shader_sampled_image_array_dynamic_indexing: Bool32,
+ pub shader_storage_buffer_array_dynamic_indexing: Bool32,
+ pub shader_storage_image_array_dynamic_indexing: Bool32,
+ pub shader_clip_distance: Bool32,
+ pub shader_cull_distance: Bool32,
+ pub shader_float64: Bool32,
+ pub shader_int64: Bool32,
+ pub shader_int16: Bool32,
+ pub shader_resource_residency: Bool32,
+ pub shader_resource_min_lod: Bool32,
+ pub sparse_binding: Bool32,
+ pub sparse_residency_buffer: Bool32,
+ pub sparse_residency_image2_d: Bool32,
+ pub sparse_residency_image3_d: Bool32,
+ pub sparse_residency2_samples: Bool32,
+ pub sparse_residency4_samples: Bool32,
+ pub sparse_residency8_samples: Bool32,
+ pub sparse_residency16_samples: Bool32,
+ pub sparse_residency_aliased: Bool32,
+ pub variable_multisample_rate: Bool32,
+ pub inherited_queries: Bool32,
+}
+impl PhysicalDeviceFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceFeaturesBuilder<'a> {
+ PhysicalDeviceFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFeaturesBuilder<'a> {
+ inner: PhysicalDeviceFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFeaturesBuilder<'a> {
+ #[inline]
+ pub fn robust_buffer_access(mut self, robust_buffer_access: bool) -> Self {
+ self.inner.robust_buffer_access = robust_buffer_access.into();
+ self
+ }
+ #[inline]
+ pub fn full_draw_index_uint32(mut self, full_draw_index_uint32: bool) -> Self {
+ self.inner.full_draw_index_uint32 = full_draw_index_uint32.into();
+ self
+ }
+ #[inline]
+ pub fn image_cube_array(mut self, image_cube_array: bool) -> Self {
+ self.inner.image_cube_array = image_cube_array.into();
+ self
+ }
+ #[inline]
+ pub fn independent_blend(mut self, independent_blend: bool) -> Self {
+ self.inner.independent_blend = independent_blend.into();
+ self
+ }
+ #[inline]
+ pub fn geometry_shader(mut self, geometry_shader: bool) -> Self {
+ self.inner.geometry_shader = geometry_shader.into();
+ self
+ }
+ #[inline]
+ pub fn tessellation_shader(mut self, tessellation_shader: bool) -> Self {
+ self.inner.tessellation_shader = tessellation_shader.into();
+ self
+ }
+ #[inline]
+ pub fn sample_rate_shading(mut self, sample_rate_shading: bool) -> Self {
+ self.inner.sample_rate_shading = sample_rate_shading.into();
+ self
+ }
+ #[inline]
+ pub fn dual_src_blend(mut self, dual_src_blend: bool) -> Self {
+ self.inner.dual_src_blend = dual_src_blend.into();
+ self
+ }
+ #[inline]
+ pub fn logic_op(mut self, logic_op: bool) -> Self {
+ self.inner.logic_op = logic_op.into();
+ self
+ }
+ #[inline]
+ pub fn multi_draw_indirect(mut self, multi_draw_indirect: bool) -> Self {
+ self.inner.multi_draw_indirect = multi_draw_indirect.into();
+ self
+ }
+ #[inline]
+ pub fn draw_indirect_first_instance(mut self, draw_indirect_first_instance: bool) -> Self {
+ self.inner.draw_indirect_first_instance = draw_indirect_first_instance.into();
+ self
+ }
+ #[inline]
+ pub fn depth_clamp(mut self, depth_clamp: bool) -> Self {
+ self.inner.depth_clamp = depth_clamp.into();
+ self
+ }
+ #[inline]
+ pub fn depth_bias_clamp(mut self, depth_bias_clamp: bool) -> Self {
+ self.inner.depth_bias_clamp = depth_bias_clamp.into();
+ self
+ }
+ #[inline]
+ pub fn fill_mode_non_solid(mut self, fill_mode_non_solid: bool) -> Self {
+ self.inner.fill_mode_non_solid = fill_mode_non_solid.into();
+ self
+ }
+ #[inline]
+ pub fn depth_bounds(mut self, depth_bounds: bool) -> Self {
+ self.inner.depth_bounds = depth_bounds.into();
+ self
+ }
+ #[inline]
+ pub fn wide_lines(mut self, wide_lines: bool) -> Self {
+ self.inner.wide_lines = wide_lines.into();
+ self
+ }
+ #[inline]
+ pub fn large_points(mut self, large_points: bool) -> Self {
+ self.inner.large_points = large_points.into();
+ self
+ }
+ #[inline]
+ pub fn alpha_to_one(mut self, alpha_to_one: bool) -> Self {
+ self.inner.alpha_to_one = alpha_to_one.into();
+ self
+ }
+ #[inline]
+ pub fn multi_viewport(mut self, multi_viewport: bool) -> Self {
+ self.inner.multi_viewport = multi_viewport.into();
+ self
+ }
+ #[inline]
+ pub fn sampler_anisotropy(mut self, sampler_anisotropy: bool) -> Self {
+ self.inner.sampler_anisotropy = sampler_anisotropy.into();
+ self
+ }
+ #[inline]
+ pub fn texture_compression_etc2(mut self, texture_compression_etc2: bool) -> Self {
+ self.inner.texture_compression_etc2 = texture_compression_etc2.into();
+ self
+ }
+ #[inline]
+ pub fn texture_compression_astc_ldr(mut self, texture_compression_astc_ldr: bool) -> Self {
+ self.inner.texture_compression_astc_ldr = texture_compression_astc_ldr.into();
+ self
+ }
+ #[inline]
+ pub fn texture_compression_bc(mut self, texture_compression_bc: bool) -> Self {
+ self.inner.texture_compression_bc = texture_compression_bc.into();
+ self
+ }
+ #[inline]
+ pub fn occlusion_query_precise(mut self, occlusion_query_precise: bool) -> Self {
+ self.inner.occlusion_query_precise = occlusion_query_precise.into();
+ self
+ }
+ #[inline]
+ pub fn pipeline_statistics_query(mut self, pipeline_statistics_query: bool) -> Self {
+ self.inner.pipeline_statistics_query = pipeline_statistics_query.into();
+ self
+ }
+ #[inline]
+ pub fn vertex_pipeline_stores_and_atomics(
+ mut self,
+ vertex_pipeline_stores_and_atomics: bool,
+ ) -> Self {
+ self.inner.vertex_pipeline_stores_and_atomics = vertex_pipeline_stores_and_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_stores_and_atomics(mut self, fragment_stores_and_atomics: bool) -> Self {
+ self.inner.fragment_stores_and_atomics = fragment_stores_and_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_tessellation_and_geometry_point_size(
+ mut self,
+ shader_tessellation_and_geometry_point_size: bool,
+ ) -> Self {
+ self.inner.shader_tessellation_and_geometry_point_size =
+ shader_tessellation_and_geometry_point_size.into();
+ self
+ }
+ #[inline]
+ pub fn shader_image_gather_extended(mut self, shader_image_gather_extended: bool) -> Self {
+ self.inner.shader_image_gather_extended = shader_image_gather_extended.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_extended_formats(
+ mut self,
+ shader_storage_image_extended_formats: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_extended_formats =
+ shader_storage_image_extended_formats.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_multisample(
+ mut self,
+ shader_storage_image_multisample: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_multisample = shader_storage_image_multisample.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_read_without_format(
+ mut self,
+ shader_storage_image_read_without_format: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_read_without_format =
+ shader_storage_image_read_without_format.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_write_without_format(
+ mut self,
+ shader_storage_image_write_without_format: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_write_without_format =
+ shader_storage_image_write_without_format.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_buffer_array_dynamic_indexing(
+ mut self,
+ shader_uniform_buffer_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner.shader_uniform_buffer_array_dynamic_indexing =
+ shader_uniform_buffer_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_sampled_image_array_dynamic_indexing(
+ mut self,
+ shader_sampled_image_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner.shader_sampled_image_array_dynamic_indexing =
+ shader_sampled_image_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_buffer_array_dynamic_indexing(
+ mut self,
+ shader_storage_buffer_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner.shader_storage_buffer_array_dynamic_indexing =
+ shader_storage_buffer_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_array_dynamic_indexing(
+ mut self,
+ shader_storage_image_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_array_dynamic_indexing =
+ shader_storage_image_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_clip_distance(mut self, shader_clip_distance: bool) -> Self {
+ self.inner.shader_clip_distance = shader_clip_distance.into();
+ self
+ }
+ #[inline]
+ pub fn shader_cull_distance(mut self, shader_cull_distance: bool) -> Self {
+ self.inner.shader_cull_distance = shader_cull_distance.into();
+ self
+ }
+ #[inline]
+ pub fn shader_float64(mut self, shader_float64: bool) -> Self {
+ self.inner.shader_float64 = shader_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_int64(mut self, shader_int64: bool) -> Self {
+ self.inner.shader_int64 = shader_int64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_int16(mut self, shader_int16: bool) -> Self {
+ self.inner.shader_int16 = shader_int16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_resource_residency(mut self, shader_resource_residency: bool) -> Self {
+ self.inner.shader_resource_residency = shader_resource_residency.into();
+ self
+ }
+ #[inline]
+ pub fn shader_resource_min_lod(mut self, shader_resource_min_lod: bool) -> Self {
+ self.inner.shader_resource_min_lod = shader_resource_min_lod.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_binding(mut self, sparse_binding: bool) -> Self {
+ self.inner.sparse_binding = sparse_binding.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency_buffer(mut self, sparse_residency_buffer: bool) -> Self {
+ self.inner.sparse_residency_buffer = sparse_residency_buffer.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency_image2_d(mut self, sparse_residency_image2_d: bool) -> Self {
+ self.inner.sparse_residency_image2_d = sparse_residency_image2_d.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency_image3_d(mut self, sparse_residency_image3_d: bool) -> Self {
+ self.inner.sparse_residency_image3_d = sparse_residency_image3_d.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency2_samples(mut self, sparse_residency2_samples: bool) -> Self {
+ self.inner.sparse_residency2_samples = sparse_residency2_samples.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency4_samples(mut self, sparse_residency4_samples: bool) -> Self {
+ self.inner.sparse_residency4_samples = sparse_residency4_samples.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency8_samples(mut self, sparse_residency8_samples: bool) -> Self {
+ self.inner.sparse_residency8_samples = sparse_residency8_samples.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency16_samples(mut self, sparse_residency16_samples: bool) -> Self {
+ self.inner.sparse_residency16_samples = sparse_residency16_samples.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_residency_aliased(mut self, sparse_residency_aliased: bool) -> Self {
+ self.inner.sparse_residency_aliased = sparse_residency_aliased.into();
+ self
+ }
+ #[inline]
+ pub fn variable_multisample_rate(mut self, variable_multisample_rate: bool) -> Self {
+ self.inner.variable_multisample_rate = variable_multisample_rate.into();
+ self
+ }
+ #[inline]
+ pub fn inherited_queries(mut self, inherited_queries: bool) -> Self {
+ self.inner.inherited_queries = inherited_queries.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSparseProperties.html>"]
+pub struct PhysicalDeviceSparseProperties {
+ pub residency_standard2_d_block_shape: Bool32,
+ pub residency_standard2_d_multisample_block_shape: Bool32,
+ pub residency_standard3_d_block_shape: Bool32,
+ pub residency_aligned_mip_size: Bool32,
+ pub residency_non_resident_strict: Bool32,
+}
+impl PhysicalDeviceSparseProperties {
+ pub fn builder<'a>() -> PhysicalDeviceSparsePropertiesBuilder<'a> {
+ PhysicalDeviceSparsePropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSparsePropertiesBuilder<'a> {
+ inner: PhysicalDeviceSparseProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSparsePropertiesBuilder<'a> {
+ type Target = PhysicalDeviceSparseProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSparsePropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSparsePropertiesBuilder<'a> {
+ #[inline]
+ pub fn residency_standard2_d_block_shape(
+ mut self,
+ residency_standard2_d_block_shape: bool,
+ ) -> Self {
+ self.inner.residency_standard2_d_block_shape = residency_standard2_d_block_shape.into();
+ self
+ }
+ #[inline]
+ pub fn residency_standard2_d_multisample_block_shape(
+ mut self,
+ residency_standard2_d_multisample_block_shape: bool,
+ ) -> Self {
+ self.inner.residency_standard2_d_multisample_block_shape =
+ residency_standard2_d_multisample_block_shape.into();
+ self
+ }
+ #[inline]
+ pub fn residency_standard3_d_block_shape(
+ mut self,
+ residency_standard3_d_block_shape: bool,
+ ) -> Self {
+ self.inner.residency_standard3_d_block_shape = residency_standard3_d_block_shape.into();
+ self
+ }
+ #[inline]
+ pub fn residency_aligned_mip_size(mut self, residency_aligned_mip_size: bool) -> Self {
+ self.inner.residency_aligned_mip_size = residency_aligned_mip_size.into();
+ self
+ }
+ #[inline]
+ pub fn residency_non_resident_strict(mut self, residency_non_resident_strict: bool) -> Self {
+ self.inner.residency_non_resident_strict = residency_non_resident_strict.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSparseProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceLimits.html>"]
+pub struct PhysicalDeviceLimits {
+ pub max_image_dimension1_d: u32,
+ pub max_image_dimension2_d: u32,
+ pub max_image_dimension3_d: u32,
+ pub max_image_dimension_cube: u32,
+ pub max_image_array_layers: u32,
+ pub max_texel_buffer_elements: u32,
+ pub max_uniform_buffer_range: u32,
+ pub max_storage_buffer_range: u32,
+ pub max_push_constants_size: u32,
+ pub max_memory_allocation_count: u32,
+ pub max_sampler_allocation_count: u32,
+ pub buffer_image_granularity: DeviceSize,
+ pub sparse_address_space_size: DeviceSize,
+ pub max_bound_descriptor_sets: u32,
+ pub max_per_stage_descriptor_samplers: u32,
+ pub max_per_stage_descriptor_uniform_buffers: u32,
+ pub max_per_stage_descriptor_storage_buffers: u32,
+ pub max_per_stage_descriptor_sampled_images: u32,
+ pub max_per_stage_descriptor_storage_images: u32,
+ pub max_per_stage_descriptor_input_attachments: u32,
+ pub max_per_stage_resources: u32,
+ pub max_descriptor_set_samplers: u32,
+ pub max_descriptor_set_uniform_buffers: u32,
+ pub max_descriptor_set_uniform_buffers_dynamic: u32,
+ pub max_descriptor_set_storage_buffers: u32,
+ pub max_descriptor_set_storage_buffers_dynamic: u32,
+ pub max_descriptor_set_sampled_images: u32,
+ pub max_descriptor_set_storage_images: u32,
+ pub max_descriptor_set_input_attachments: u32,
+ pub max_vertex_input_attributes: u32,
+ pub max_vertex_input_bindings: u32,
+ pub max_vertex_input_attribute_offset: u32,
+ pub max_vertex_input_binding_stride: u32,
+ pub max_vertex_output_components: u32,
+ pub max_tessellation_generation_level: u32,
+ pub max_tessellation_patch_size: u32,
+ pub max_tessellation_control_per_vertex_input_components: u32,
+ pub max_tessellation_control_per_vertex_output_components: u32,
+ pub max_tessellation_control_per_patch_output_components: u32,
+ pub max_tessellation_control_total_output_components: u32,
+ pub max_tessellation_evaluation_input_components: u32,
+ pub max_tessellation_evaluation_output_components: u32,
+ pub max_geometry_shader_invocations: u32,
+ pub max_geometry_input_components: u32,
+ pub max_geometry_output_components: u32,
+ pub max_geometry_output_vertices: u32,
+ pub max_geometry_total_output_components: u32,
+ pub max_fragment_input_components: u32,
+ pub max_fragment_output_attachments: u32,
+ pub max_fragment_dual_src_attachments: u32,
+ pub max_fragment_combined_output_resources: u32,
+ pub max_compute_shared_memory_size: u32,
+ pub max_compute_work_group_count: [u32; 3],
+ pub max_compute_work_group_invocations: u32,
+ pub max_compute_work_group_size: [u32; 3],
+ pub sub_pixel_precision_bits: u32,
+ pub sub_texel_precision_bits: u32,
+ pub mipmap_precision_bits: u32,
+ pub max_draw_indexed_index_value: u32,
+ pub max_draw_indirect_count: u32,
+ pub max_sampler_lod_bias: f32,
+ pub max_sampler_anisotropy: f32,
+ pub max_viewports: u32,
+ pub max_viewport_dimensions: [u32; 2],
+ pub viewport_bounds_range: [f32; 2],
+ pub viewport_sub_pixel_bits: u32,
+ pub min_memory_map_alignment: usize,
+ pub min_texel_buffer_offset_alignment: DeviceSize,
+ pub min_uniform_buffer_offset_alignment: DeviceSize,
+ pub min_storage_buffer_offset_alignment: DeviceSize,
+ pub min_texel_offset: i32,
+ pub max_texel_offset: u32,
+ pub min_texel_gather_offset: i32,
+ pub max_texel_gather_offset: u32,
+ pub min_interpolation_offset: f32,
+ pub max_interpolation_offset: f32,
+ pub sub_pixel_interpolation_offset_bits: u32,
+ pub max_framebuffer_width: u32,
+ pub max_framebuffer_height: u32,
+ pub max_framebuffer_layers: u32,
+ pub framebuffer_color_sample_counts: SampleCountFlags,
+ pub framebuffer_depth_sample_counts: SampleCountFlags,
+ pub framebuffer_stencil_sample_counts: SampleCountFlags,
+ pub framebuffer_no_attachments_sample_counts: SampleCountFlags,
+ pub max_color_attachments: u32,
+ pub sampled_image_color_sample_counts: SampleCountFlags,
+ pub sampled_image_integer_sample_counts: SampleCountFlags,
+ pub sampled_image_depth_sample_counts: SampleCountFlags,
+ pub sampled_image_stencil_sample_counts: SampleCountFlags,
+ pub storage_image_sample_counts: SampleCountFlags,
+ pub max_sample_mask_words: u32,
+ pub timestamp_compute_and_graphics: Bool32,
+ pub timestamp_period: f32,
+ pub max_clip_distances: u32,
+ pub max_cull_distances: u32,
+ pub max_combined_clip_and_cull_distances: u32,
+ pub discrete_queue_priorities: u32,
+ pub point_size_range: [f32; 2],
+ pub line_width_range: [f32; 2],
+ pub point_size_granularity: f32,
+ pub line_width_granularity: f32,
+ pub strict_lines: Bool32,
+ pub standard_sample_locations: Bool32,
+ pub optimal_buffer_copy_offset_alignment: DeviceSize,
+ pub optimal_buffer_copy_row_pitch_alignment: DeviceSize,
+ pub non_coherent_atom_size: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceLimits {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ max_image_dimension1_d: u32::default(),
+ max_image_dimension2_d: u32::default(),
+ max_image_dimension3_d: u32::default(),
+ max_image_dimension_cube: u32::default(),
+ max_image_array_layers: u32::default(),
+ max_texel_buffer_elements: u32::default(),
+ max_uniform_buffer_range: u32::default(),
+ max_storage_buffer_range: u32::default(),
+ max_push_constants_size: u32::default(),
+ max_memory_allocation_count: u32::default(),
+ max_sampler_allocation_count: u32::default(),
+ buffer_image_granularity: DeviceSize::default(),
+ sparse_address_space_size: DeviceSize::default(),
+ max_bound_descriptor_sets: u32::default(),
+ max_per_stage_descriptor_samplers: u32::default(),
+ max_per_stage_descriptor_uniform_buffers: u32::default(),
+ max_per_stage_descriptor_storage_buffers: u32::default(),
+ max_per_stage_descriptor_sampled_images: u32::default(),
+ max_per_stage_descriptor_storage_images: u32::default(),
+ max_per_stage_descriptor_input_attachments: u32::default(),
+ max_per_stage_resources: u32::default(),
+ max_descriptor_set_samplers: u32::default(),
+ max_descriptor_set_uniform_buffers: u32::default(),
+ max_descriptor_set_uniform_buffers_dynamic: u32::default(),
+ max_descriptor_set_storage_buffers: u32::default(),
+ max_descriptor_set_storage_buffers_dynamic: u32::default(),
+ max_descriptor_set_sampled_images: u32::default(),
+ max_descriptor_set_storage_images: u32::default(),
+ max_descriptor_set_input_attachments: u32::default(),
+ max_vertex_input_attributes: u32::default(),
+ max_vertex_input_bindings: u32::default(),
+ max_vertex_input_attribute_offset: u32::default(),
+ max_vertex_input_binding_stride: u32::default(),
+ max_vertex_output_components: u32::default(),
+ max_tessellation_generation_level: u32::default(),
+ max_tessellation_patch_size: u32::default(),
+ max_tessellation_control_per_vertex_input_components: u32::default(),
+ max_tessellation_control_per_vertex_output_components: u32::default(),
+ max_tessellation_control_per_patch_output_components: u32::default(),
+ max_tessellation_control_total_output_components: u32::default(),
+ max_tessellation_evaluation_input_components: u32::default(),
+ max_tessellation_evaluation_output_components: u32::default(),
+ max_geometry_shader_invocations: u32::default(),
+ max_geometry_input_components: u32::default(),
+ max_geometry_output_components: u32::default(),
+ max_geometry_output_vertices: u32::default(),
+ max_geometry_total_output_components: u32::default(),
+ max_fragment_input_components: u32::default(),
+ max_fragment_output_attachments: u32::default(),
+ max_fragment_dual_src_attachments: u32::default(),
+ max_fragment_combined_output_resources: u32::default(),
+ max_compute_shared_memory_size: u32::default(),
+ max_compute_work_group_count: unsafe { ::std::mem::zeroed() },
+ max_compute_work_group_invocations: u32::default(),
+ max_compute_work_group_size: unsafe { ::std::mem::zeroed() },
+ sub_pixel_precision_bits: u32::default(),
+ sub_texel_precision_bits: u32::default(),
+ mipmap_precision_bits: u32::default(),
+ max_draw_indexed_index_value: u32::default(),
+ max_draw_indirect_count: u32::default(),
+ max_sampler_lod_bias: f32::default(),
+ max_sampler_anisotropy: f32::default(),
+ max_viewports: u32::default(),
+ max_viewport_dimensions: unsafe { ::std::mem::zeroed() },
+ viewport_bounds_range: unsafe { ::std::mem::zeroed() },
+ viewport_sub_pixel_bits: u32::default(),
+ min_memory_map_alignment: usize::default(),
+ min_texel_buffer_offset_alignment: DeviceSize::default(),
+ min_uniform_buffer_offset_alignment: DeviceSize::default(),
+ min_storage_buffer_offset_alignment: DeviceSize::default(),
+ min_texel_offset: i32::default(),
+ max_texel_offset: u32::default(),
+ min_texel_gather_offset: i32::default(),
+ max_texel_gather_offset: u32::default(),
+ min_interpolation_offset: f32::default(),
+ max_interpolation_offset: f32::default(),
+ sub_pixel_interpolation_offset_bits: u32::default(),
+ max_framebuffer_width: u32::default(),
+ max_framebuffer_height: u32::default(),
+ max_framebuffer_layers: u32::default(),
+ framebuffer_color_sample_counts: SampleCountFlags::default(),
+ framebuffer_depth_sample_counts: SampleCountFlags::default(),
+ framebuffer_stencil_sample_counts: SampleCountFlags::default(),
+ framebuffer_no_attachments_sample_counts: SampleCountFlags::default(),
+ max_color_attachments: u32::default(),
+ sampled_image_color_sample_counts: SampleCountFlags::default(),
+ sampled_image_integer_sample_counts: SampleCountFlags::default(),
+ sampled_image_depth_sample_counts: SampleCountFlags::default(),
+ sampled_image_stencil_sample_counts: SampleCountFlags::default(),
+ storage_image_sample_counts: SampleCountFlags::default(),
+ max_sample_mask_words: u32::default(),
+ timestamp_compute_and_graphics: Bool32::default(),
+ timestamp_period: f32::default(),
+ max_clip_distances: u32::default(),
+ max_cull_distances: u32::default(),
+ max_combined_clip_and_cull_distances: u32::default(),
+ discrete_queue_priorities: u32::default(),
+ point_size_range: unsafe { ::std::mem::zeroed() },
+ line_width_range: unsafe { ::std::mem::zeroed() },
+ point_size_granularity: f32::default(),
+ line_width_granularity: f32::default(),
+ strict_lines: Bool32::default(),
+ standard_sample_locations: Bool32::default(),
+ optimal_buffer_copy_offset_alignment: DeviceSize::default(),
+ optimal_buffer_copy_row_pitch_alignment: DeviceSize::default(),
+ non_coherent_atom_size: DeviceSize::default(),
+ }
+ }
+}
+impl PhysicalDeviceLimits {
+ pub fn builder<'a>() -> PhysicalDeviceLimitsBuilder<'a> {
+ PhysicalDeviceLimitsBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceLimitsBuilder<'a> {
+ inner: PhysicalDeviceLimits,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceLimitsBuilder<'a> {
+ type Target = PhysicalDeviceLimits;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceLimitsBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceLimitsBuilder<'a> {
+ #[inline]
+ pub fn max_image_dimension1_d(mut self, max_image_dimension1_d: u32) -> Self {
+ self.inner.max_image_dimension1_d = max_image_dimension1_d;
+ self
+ }
+ #[inline]
+ pub fn max_image_dimension2_d(mut self, max_image_dimension2_d: u32) -> Self {
+ self.inner.max_image_dimension2_d = max_image_dimension2_d;
+ self
+ }
+ #[inline]
+ pub fn max_image_dimension3_d(mut self, max_image_dimension3_d: u32) -> Self {
+ self.inner.max_image_dimension3_d = max_image_dimension3_d;
+ self
+ }
+ #[inline]
+ pub fn max_image_dimension_cube(mut self, max_image_dimension_cube: u32) -> Self {
+ self.inner.max_image_dimension_cube = max_image_dimension_cube;
+ self
+ }
+ #[inline]
+ pub fn max_image_array_layers(mut self, max_image_array_layers: u32) -> Self {
+ self.inner.max_image_array_layers = max_image_array_layers;
+ self
+ }
+ #[inline]
+ pub fn max_texel_buffer_elements(mut self, max_texel_buffer_elements: u32) -> Self {
+ self.inner.max_texel_buffer_elements = max_texel_buffer_elements;
+ self
+ }
+ #[inline]
+ pub fn max_uniform_buffer_range(mut self, max_uniform_buffer_range: u32) -> Self {
+ self.inner.max_uniform_buffer_range = max_uniform_buffer_range;
+ self
+ }
+ #[inline]
+ pub fn max_storage_buffer_range(mut self, max_storage_buffer_range: u32) -> Self {
+ self.inner.max_storage_buffer_range = max_storage_buffer_range;
+ self
+ }
+ #[inline]
+ pub fn max_push_constants_size(mut self, max_push_constants_size: u32) -> Self {
+ self.inner.max_push_constants_size = max_push_constants_size;
+ self
+ }
+ #[inline]
+ pub fn max_memory_allocation_count(mut self, max_memory_allocation_count: u32) -> Self {
+ self.inner.max_memory_allocation_count = max_memory_allocation_count;
+ self
+ }
+ #[inline]
+ pub fn max_sampler_allocation_count(mut self, max_sampler_allocation_count: u32) -> Self {
+ self.inner.max_sampler_allocation_count = max_sampler_allocation_count;
+ self
+ }
+ #[inline]
+ pub fn buffer_image_granularity(mut self, buffer_image_granularity: DeviceSize) -> Self {
+ self.inner.buffer_image_granularity = buffer_image_granularity;
+ self
+ }
+ #[inline]
+ pub fn sparse_address_space_size(mut self, sparse_address_space_size: DeviceSize) -> Self {
+ self.inner.sparse_address_space_size = sparse_address_space_size;
+ self
+ }
+ #[inline]
+ pub fn max_bound_descriptor_sets(mut self, max_bound_descriptor_sets: u32) -> Self {
+ self.inner.max_bound_descriptor_sets = max_bound_descriptor_sets;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_samplers(
+ mut self,
+ max_per_stage_descriptor_samplers: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_samplers = max_per_stage_descriptor_samplers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_uniform_buffers(
+ mut self,
+ max_per_stage_descriptor_uniform_buffers: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_uniform_buffers =
+ max_per_stage_descriptor_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_storage_buffers(
+ mut self,
+ max_per_stage_descriptor_storage_buffers: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_storage_buffers =
+ max_per_stage_descriptor_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_sampled_images(
+ mut self,
+ max_per_stage_descriptor_sampled_images: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_sampled_images =
+ max_per_stage_descriptor_sampled_images;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_storage_images(
+ mut self,
+ max_per_stage_descriptor_storage_images: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_storage_images =
+ max_per_stage_descriptor_storage_images;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_input_attachments(
+ mut self,
+ max_per_stage_descriptor_input_attachments: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_input_attachments =
+ max_per_stage_descriptor_input_attachments;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_resources(mut self, max_per_stage_resources: u32) -> Self {
+ self.inner.max_per_stage_resources = max_per_stage_resources;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_samplers(mut self, max_descriptor_set_samplers: u32) -> Self {
+ self.inner.max_descriptor_set_samplers = max_descriptor_set_samplers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_uniform_buffers(
+ mut self,
+ max_descriptor_set_uniform_buffers: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_uniform_buffers = max_descriptor_set_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_uniform_buffers_dynamic(
+ mut self,
+ max_descriptor_set_uniform_buffers_dynamic: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_uniform_buffers_dynamic =
+ max_descriptor_set_uniform_buffers_dynamic;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_storage_buffers(
+ mut self,
+ max_descriptor_set_storage_buffers: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_storage_buffers = max_descriptor_set_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_storage_buffers_dynamic(
+ mut self,
+ max_descriptor_set_storage_buffers_dynamic: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_storage_buffers_dynamic =
+ max_descriptor_set_storage_buffers_dynamic;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_sampled_images(
+ mut self,
+ max_descriptor_set_sampled_images: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_sampled_images = max_descriptor_set_sampled_images;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_storage_images(
+ mut self,
+ max_descriptor_set_storage_images: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_storage_images = max_descriptor_set_storage_images;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_input_attachments(
+ mut self,
+ max_descriptor_set_input_attachments: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_input_attachments = max_descriptor_set_input_attachments;
+ self
+ }
+ #[inline]
+ pub fn max_vertex_input_attributes(mut self, max_vertex_input_attributes: u32) -> Self {
+ self.inner.max_vertex_input_attributes = max_vertex_input_attributes;
+ self
+ }
+ #[inline]
+ pub fn max_vertex_input_bindings(mut self, max_vertex_input_bindings: u32) -> Self {
+ self.inner.max_vertex_input_bindings = max_vertex_input_bindings;
+ self
+ }
+ #[inline]
+ pub fn max_vertex_input_attribute_offset(
+ mut self,
+ max_vertex_input_attribute_offset: u32,
+ ) -> Self {
+ self.inner.max_vertex_input_attribute_offset = max_vertex_input_attribute_offset;
+ self
+ }
+ #[inline]
+ pub fn max_vertex_input_binding_stride(mut self, max_vertex_input_binding_stride: u32) -> Self {
+ self.inner.max_vertex_input_binding_stride = max_vertex_input_binding_stride;
+ self
+ }
+ #[inline]
+ pub fn max_vertex_output_components(mut self, max_vertex_output_components: u32) -> Self {
+ self.inner.max_vertex_output_components = max_vertex_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_generation_level(
+ mut self,
+ max_tessellation_generation_level: u32,
+ ) -> Self {
+ self.inner.max_tessellation_generation_level = max_tessellation_generation_level;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_patch_size(mut self, max_tessellation_patch_size: u32) -> Self {
+ self.inner.max_tessellation_patch_size = max_tessellation_patch_size;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_control_per_vertex_input_components(
+ mut self,
+ max_tessellation_control_per_vertex_input_components: u32,
+ ) -> Self {
+ self.inner
+ .max_tessellation_control_per_vertex_input_components =
+ max_tessellation_control_per_vertex_input_components;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_control_per_vertex_output_components(
+ mut self,
+ max_tessellation_control_per_vertex_output_components: u32,
+ ) -> Self {
+ self.inner
+ .max_tessellation_control_per_vertex_output_components =
+ max_tessellation_control_per_vertex_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_control_per_patch_output_components(
+ mut self,
+ max_tessellation_control_per_patch_output_components: u32,
+ ) -> Self {
+ self.inner
+ .max_tessellation_control_per_patch_output_components =
+ max_tessellation_control_per_patch_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_control_total_output_components(
+ mut self,
+ max_tessellation_control_total_output_components: u32,
+ ) -> Self {
+ self.inner.max_tessellation_control_total_output_components =
+ max_tessellation_control_total_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_evaluation_input_components(
+ mut self,
+ max_tessellation_evaluation_input_components: u32,
+ ) -> Self {
+ self.inner.max_tessellation_evaluation_input_components =
+ max_tessellation_evaluation_input_components;
+ self
+ }
+ #[inline]
+ pub fn max_tessellation_evaluation_output_components(
+ mut self,
+ max_tessellation_evaluation_output_components: u32,
+ ) -> Self {
+ self.inner.max_tessellation_evaluation_output_components =
+ max_tessellation_evaluation_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_geometry_shader_invocations(mut self, max_geometry_shader_invocations: u32) -> Self {
+ self.inner.max_geometry_shader_invocations = max_geometry_shader_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_geometry_input_components(mut self, max_geometry_input_components: u32) -> Self {
+ self.inner.max_geometry_input_components = max_geometry_input_components;
+ self
+ }
+ #[inline]
+ pub fn max_geometry_output_components(mut self, max_geometry_output_components: u32) -> Self {
+ self.inner.max_geometry_output_components = max_geometry_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_geometry_output_vertices(mut self, max_geometry_output_vertices: u32) -> Self {
+ self.inner.max_geometry_output_vertices = max_geometry_output_vertices;
+ self
+ }
+ #[inline]
+ pub fn max_geometry_total_output_components(
+ mut self,
+ max_geometry_total_output_components: u32,
+ ) -> Self {
+ self.inner.max_geometry_total_output_components = max_geometry_total_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_input_components(mut self, max_fragment_input_components: u32) -> Self {
+ self.inner.max_fragment_input_components = max_fragment_input_components;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_output_attachments(mut self, max_fragment_output_attachments: u32) -> Self {
+ self.inner.max_fragment_output_attachments = max_fragment_output_attachments;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_dual_src_attachments(
+ mut self,
+ max_fragment_dual_src_attachments: u32,
+ ) -> Self {
+ self.inner.max_fragment_dual_src_attachments = max_fragment_dual_src_attachments;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_combined_output_resources(
+ mut self,
+ max_fragment_combined_output_resources: u32,
+ ) -> Self {
+ self.inner.max_fragment_combined_output_resources = max_fragment_combined_output_resources;
+ self
+ }
+ #[inline]
+ pub fn max_compute_shared_memory_size(mut self, max_compute_shared_memory_size: u32) -> Self {
+ self.inner.max_compute_shared_memory_size = max_compute_shared_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_compute_work_group_count(mut self, max_compute_work_group_count: [u32; 3]) -> Self {
+ self.inner.max_compute_work_group_count = max_compute_work_group_count;
+ self
+ }
+ #[inline]
+ pub fn max_compute_work_group_invocations(
+ mut self,
+ max_compute_work_group_invocations: u32,
+ ) -> Self {
+ self.inner.max_compute_work_group_invocations = max_compute_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_compute_work_group_size(mut self, max_compute_work_group_size: [u32; 3]) -> Self {
+ self.inner.max_compute_work_group_size = max_compute_work_group_size;
+ self
+ }
+ #[inline]
+ pub fn sub_pixel_precision_bits(mut self, sub_pixel_precision_bits: u32) -> Self {
+ self.inner.sub_pixel_precision_bits = sub_pixel_precision_bits;
+ self
+ }
+ #[inline]
+ pub fn sub_texel_precision_bits(mut self, sub_texel_precision_bits: u32) -> Self {
+ self.inner.sub_texel_precision_bits = sub_texel_precision_bits;
+ self
+ }
+ #[inline]
+ pub fn mipmap_precision_bits(mut self, mipmap_precision_bits: u32) -> Self {
+ self.inner.mipmap_precision_bits = mipmap_precision_bits;
+ self
+ }
+ #[inline]
+ pub fn max_draw_indexed_index_value(mut self, max_draw_indexed_index_value: u32) -> Self {
+ self.inner.max_draw_indexed_index_value = max_draw_indexed_index_value;
+ self
+ }
+ #[inline]
+ pub fn max_draw_indirect_count(mut self, max_draw_indirect_count: u32) -> Self {
+ self.inner.max_draw_indirect_count = max_draw_indirect_count;
+ self
+ }
+ #[inline]
+ pub fn max_sampler_lod_bias(mut self, max_sampler_lod_bias: f32) -> Self {
+ self.inner.max_sampler_lod_bias = max_sampler_lod_bias;
+ self
+ }
+ #[inline]
+ pub fn max_sampler_anisotropy(mut self, max_sampler_anisotropy: f32) -> Self {
+ self.inner.max_sampler_anisotropy = max_sampler_anisotropy;
+ self
+ }
+ #[inline]
+ pub fn max_viewports(mut self, max_viewports: u32) -> Self {
+ self.inner.max_viewports = max_viewports;
+ self
+ }
+ #[inline]
+ pub fn max_viewport_dimensions(mut self, max_viewport_dimensions: [u32; 2]) -> Self {
+ self.inner.max_viewport_dimensions = max_viewport_dimensions;
+ self
+ }
+ #[inline]
+ pub fn viewport_bounds_range(mut self, viewport_bounds_range: [f32; 2]) -> Self {
+ self.inner.viewport_bounds_range = viewport_bounds_range;
+ self
+ }
+ #[inline]
+ pub fn viewport_sub_pixel_bits(mut self, viewport_sub_pixel_bits: u32) -> Self {
+ self.inner.viewport_sub_pixel_bits = viewport_sub_pixel_bits;
+ self
+ }
+ #[inline]
+ pub fn min_memory_map_alignment(mut self, min_memory_map_alignment: usize) -> Self {
+ self.inner.min_memory_map_alignment = min_memory_map_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_texel_buffer_offset_alignment(
+ mut self,
+ min_texel_buffer_offset_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.min_texel_buffer_offset_alignment = min_texel_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_uniform_buffer_offset_alignment(
+ mut self,
+ min_uniform_buffer_offset_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.min_uniform_buffer_offset_alignment = min_uniform_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_storage_buffer_offset_alignment(
+ mut self,
+ min_storage_buffer_offset_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.min_storage_buffer_offset_alignment = min_storage_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_texel_offset(mut self, min_texel_offset: i32) -> Self {
+ self.inner.min_texel_offset = min_texel_offset;
+ self
+ }
+ #[inline]
+ pub fn max_texel_offset(mut self, max_texel_offset: u32) -> Self {
+ self.inner.max_texel_offset = max_texel_offset;
+ self
+ }
+ #[inline]
+ pub fn min_texel_gather_offset(mut self, min_texel_gather_offset: i32) -> Self {
+ self.inner.min_texel_gather_offset = min_texel_gather_offset;
+ self
+ }
+ #[inline]
+ pub fn max_texel_gather_offset(mut self, max_texel_gather_offset: u32) -> Self {
+ self.inner.max_texel_gather_offset = max_texel_gather_offset;
+ self
+ }
+ #[inline]
+ pub fn min_interpolation_offset(mut self, min_interpolation_offset: f32) -> Self {
+ self.inner.min_interpolation_offset = min_interpolation_offset;
+ self
+ }
+ #[inline]
+ pub fn max_interpolation_offset(mut self, max_interpolation_offset: f32) -> Self {
+ self.inner.max_interpolation_offset = max_interpolation_offset;
+ self
+ }
+ #[inline]
+ pub fn sub_pixel_interpolation_offset_bits(
+ mut self,
+ sub_pixel_interpolation_offset_bits: u32,
+ ) -> Self {
+ self.inner.sub_pixel_interpolation_offset_bits = sub_pixel_interpolation_offset_bits;
+ self
+ }
+ #[inline]
+ pub fn max_framebuffer_width(mut self, max_framebuffer_width: u32) -> Self {
+ self.inner.max_framebuffer_width = max_framebuffer_width;
+ self
+ }
+ #[inline]
+ pub fn max_framebuffer_height(mut self, max_framebuffer_height: u32) -> Self {
+ self.inner.max_framebuffer_height = max_framebuffer_height;
+ self
+ }
+ #[inline]
+ pub fn max_framebuffer_layers(mut self, max_framebuffer_layers: u32) -> Self {
+ self.inner.max_framebuffer_layers = max_framebuffer_layers;
+ self
+ }
+ #[inline]
+ pub fn framebuffer_color_sample_counts(
+ mut self,
+ framebuffer_color_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.framebuffer_color_sample_counts = framebuffer_color_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn framebuffer_depth_sample_counts(
+ mut self,
+ framebuffer_depth_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.framebuffer_depth_sample_counts = framebuffer_depth_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn framebuffer_stencil_sample_counts(
+ mut self,
+ framebuffer_stencil_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.framebuffer_stencil_sample_counts = framebuffer_stencil_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn framebuffer_no_attachments_sample_counts(
+ mut self,
+ framebuffer_no_attachments_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.framebuffer_no_attachments_sample_counts =
+ framebuffer_no_attachments_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn max_color_attachments(mut self, max_color_attachments: u32) -> Self {
+ self.inner.max_color_attachments = max_color_attachments;
+ self
+ }
+ #[inline]
+ pub fn sampled_image_color_sample_counts(
+ mut self,
+ sampled_image_color_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.sampled_image_color_sample_counts = sampled_image_color_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn sampled_image_integer_sample_counts(
+ mut self,
+ sampled_image_integer_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.sampled_image_integer_sample_counts = sampled_image_integer_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn sampled_image_depth_sample_counts(
+ mut self,
+ sampled_image_depth_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.sampled_image_depth_sample_counts = sampled_image_depth_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn sampled_image_stencil_sample_counts(
+ mut self,
+ sampled_image_stencil_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.sampled_image_stencil_sample_counts = sampled_image_stencil_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn storage_image_sample_counts(
+ mut self,
+ storage_image_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.storage_image_sample_counts = storage_image_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn max_sample_mask_words(mut self, max_sample_mask_words: u32) -> Self {
+ self.inner.max_sample_mask_words = max_sample_mask_words;
+ self
+ }
+ #[inline]
+ pub fn timestamp_compute_and_graphics(mut self, timestamp_compute_and_graphics: bool) -> Self {
+ self.inner.timestamp_compute_and_graphics = timestamp_compute_and_graphics.into();
+ self
+ }
+ #[inline]
+ pub fn timestamp_period(mut self, timestamp_period: f32) -> Self {
+ self.inner.timestamp_period = timestamp_period;
+ self
+ }
+ #[inline]
+ pub fn max_clip_distances(mut self, max_clip_distances: u32) -> Self {
+ self.inner.max_clip_distances = max_clip_distances;
+ self
+ }
+ #[inline]
+ pub fn max_cull_distances(mut self, max_cull_distances: u32) -> Self {
+ self.inner.max_cull_distances = max_cull_distances;
+ self
+ }
+ #[inline]
+ pub fn max_combined_clip_and_cull_distances(
+ mut self,
+ max_combined_clip_and_cull_distances: u32,
+ ) -> Self {
+ self.inner.max_combined_clip_and_cull_distances = max_combined_clip_and_cull_distances;
+ self
+ }
+ #[inline]
+ pub fn discrete_queue_priorities(mut self, discrete_queue_priorities: u32) -> Self {
+ self.inner.discrete_queue_priorities = discrete_queue_priorities;
+ self
+ }
+ #[inline]
+ pub fn point_size_range(mut self, point_size_range: [f32; 2]) -> Self {
+ self.inner.point_size_range = point_size_range;
+ self
+ }
+ #[inline]
+ pub fn line_width_range(mut self, line_width_range: [f32; 2]) -> Self {
+ self.inner.line_width_range = line_width_range;
+ self
+ }
+ #[inline]
+ pub fn point_size_granularity(mut self, point_size_granularity: f32) -> Self {
+ self.inner.point_size_granularity = point_size_granularity;
+ self
+ }
+ #[inline]
+ pub fn line_width_granularity(mut self, line_width_granularity: f32) -> Self {
+ self.inner.line_width_granularity = line_width_granularity;
+ self
+ }
+ #[inline]
+ pub fn strict_lines(mut self, strict_lines: bool) -> Self {
+ self.inner.strict_lines = strict_lines.into();
+ self
+ }
+ #[inline]
+ pub fn standard_sample_locations(mut self, standard_sample_locations: bool) -> Self {
+ self.inner.standard_sample_locations = standard_sample_locations.into();
+ self
+ }
+ #[inline]
+ pub fn optimal_buffer_copy_offset_alignment(
+ mut self,
+ optimal_buffer_copy_offset_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.optimal_buffer_copy_offset_alignment = optimal_buffer_copy_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn optimal_buffer_copy_row_pitch_alignment(
+ mut self,
+ optimal_buffer_copy_row_pitch_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.optimal_buffer_copy_row_pitch_alignment =
+ optimal_buffer_copy_row_pitch_alignment;
+ self
+ }
+ #[inline]
+ pub fn non_coherent_atom_size(mut self, non_coherent_atom_size: DeviceSize) -> Self {
+ self.inner.non_coherent_atom_size = non_coherent_atom_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceLimits {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreCreateInfo.html>"]
+pub struct SemaphoreCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: SemaphoreCreateFlags,
+}
+impl ::std::default::Default for SemaphoreCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: SemaphoreCreateFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_CREATE_INFO;
+}
+impl SemaphoreCreateInfo {
+ pub fn builder<'a>() -> SemaphoreCreateInfoBuilder<'a> {
+ SemaphoreCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreCreateInfoBuilder<'a> {
+ inner: SemaphoreCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSemaphoreCreateInfo {}
+impl<'a> ::std::ops::Deref for SemaphoreCreateInfoBuilder<'a> {
+ type Target = SemaphoreCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SemaphoreCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSemaphoreCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPoolCreateInfo.html>"]
+pub struct QueryPoolCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: QueryPoolCreateFlags,
+ pub query_type: QueryType,
+ pub query_count: u32,
+ pub pipeline_statistics: QueryPipelineStatisticFlags,
+}
+impl ::std::default::Default for QueryPoolCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: QueryPoolCreateFlags::default(),
+ query_type: QueryType::default(),
+ query_count: u32::default(),
+ pipeline_statistics: QueryPipelineStatisticFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueryPoolCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::QUERY_POOL_CREATE_INFO;
+}
+impl QueryPoolCreateInfo {
+ pub fn builder<'a>() -> QueryPoolCreateInfoBuilder<'a> {
+ QueryPoolCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueryPoolCreateInfoBuilder<'a> {
+ inner: QueryPoolCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsQueryPoolCreateInfo {}
+impl<'a> ::std::ops::Deref for QueryPoolCreateInfoBuilder<'a> {
+ type Target = QueryPoolCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueryPoolCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueryPoolCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: QueryPoolCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn query_type(mut self, query_type: QueryType) -> Self {
+ self.inner.query_type = query_type;
+ self
+ }
+ #[inline]
+ pub fn query_count(mut self, query_count: u32) -> Self {
+ self.inner.query_count = query_count;
+ self
+ }
+ #[inline]
+ pub fn pipeline_statistics(mut self, pipeline_statistics: QueryPipelineStatisticFlags) -> Self {
+ self.inner.pipeline_statistics = pipeline_statistics;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsQueryPoolCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueryPoolCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFramebufferCreateInfo.html>"]
+pub struct FramebufferCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: FramebufferCreateFlags,
+ pub render_pass: RenderPass,
+ pub attachment_count: u32,
+ pub p_attachments: *const ImageView,
+ pub width: u32,
+ pub height: u32,
+ pub layers: u32,
+}
+impl ::std::default::Default for FramebufferCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: FramebufferCreateFlags::default(),
+ render_pass: RenderPass::default(),
+ attachment_count: u32::default(),
+ p_attachments: ::std::ptr::null(),
+ width: u32::default(),
+ height: u32::default(),
+ layers: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FramebufferCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_CREATE_INFO;
+}
+impl FramebufferCreateInfo {
+ pub fn builder<'a>() -> FramebufferCreateInfoBuilder<'a> {
+ FramebufferCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FramebufferCreateInfoBuilder<'a> {
+ inner: FramebufferCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsFramebufferCreateInfo {}
+impl<'a> ::std::ops::Deref for FramebufferCreateInfoBuilder<'a> {
+ type Target = FramebufferCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FramebufferCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FramebufferCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: FramebufferCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn render_pass(mut self, render_pass: RenderPass) -> Self {
+ self.inner.render_pass = render_pass;
+ self
+ }
+ #[inline]
+ pub fn attachments(mut self, attachments: &'a [ImageView]) -> Self {
+ self.inner.attachment_count = attachments.len() as _;
+ self.inner.p_attachments = attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn layers(mut self, layers: u32) -> Self {
+ self.inner.layers = layers;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsFramebufferCreateInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FramebufferCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrawIndirectCommand.html>"]
+pub struct DrawIndirectCommand {
+ pub vertex_count: u32,
+ pub instance_count: u32,
+ pub first_vertex: u32,
+ pub first_instance: u32,
+}
+impl DrawIndirectCommand {
+ pub fn builder<'a>() -> DrawIndirectCommandBuilder<'a> {
+ DrawIndirectCommandBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrawIndirectCommandBuilder<'a> {
+ inner: DrawIndirectCommand,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DrawIndirectCommandBuilder<'a> {
+ type Target = DrawIndirectCommand;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrawIndirectCommandBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrawIndirectCommandBuilder<'a> {
+ #[inline]
+ pub fn vertex_count(mut self, vertex_count: u32) -> Self {
+ self.inner.vertex_count = vertex_count;
+ self
+ }
+ #[inline]
+ pub fn instance_count(mut self, instance_count: u32) -> Self {
+ self.inner.instance_count = instance_count;
+ self
+ }
+ #[inline]
+ pub fn first_vertex(mut self, first_vertex: u32) -> Self {
+ self.inner.first_vertex = first_vertex;
+ self
+ }
+ #[inline]
+ pub fn first_instance(mut self, first_instance: u32) -> Self {
+ self.inner.first_instance = first_instance;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrawIndirectCommand {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrawIndexedIndirectCommand.html>"]
+pub struct DrawIndexedIndirectCommand {
+ pub index_count: u32,
+ pub instance_count: u32,
+ pub first_index: u32,
+ pub vertex_offset: i32,
+ pub first_instance: u32,
+}
+impl DrawIndexedIndirectCommand {
+ pub fn builder<'a>() -> DrawIndexedIndirectCommandBuilder<'a> {
+ DrawIndexedIndirectCommandBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrawIndexedIndirectCommandBuilder<'a> {
+ inner: DrawIndexedIndirectCommand,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DrawIndexedIndirectCommandBuilder<'a> {
+ type Target = DrawIndexedIndirectCommand;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrawIndexedIndirectCommandBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrawIndexedIndirectCommandBuilder<'a> {
+ #[inline]
+ pub fn index_count(mut self, index_count: u32) -> Self {
+ self.inner.index_count = index_count;
+ self
+ }
+ #[inline]
+ pub fn instance_count(mut self, instance_count: u32) -> Self {
+ self.inner.instance_count = instance_count;
+ self
+ }
+ #[inline]
+ pub fn first_index(mut self, first_index: u32) -> Self {
+ self.inner.first_index = first_index;
+ self
+ }
+ #[inline]
+ pub fn vertex_offset(mut self, vertex_offset: i32) -> Self {
+ self.inner.vertex_offset = vertex_offset;
+ self
+ }
+ #[inline]
+ pub fn first_instance(mut self, first_instance: u32) -> Self {
+ self.inner.first_instance = first_instance;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrawIndexedIndirectCommand {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDispatchIndirectCommand.html>"]
+pub struct DispatchIndirectCommand {
+ pub x: u32,
+ pub y: u32,
+ pub z: u32,
+}
+impl DispatchIndirectCommand {
+ pub fn builder<'a>() -> DispatchIndirectCommandBuilder<'a> {
+ DispatchIndirectCommandBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DispatchIndirectCommandBuilder<'a> {
+ inner: DispatchIndirectCommand,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DispatchIndirectCommandBuilder<'a> {
+ type Target = DispatchIndirectCommand;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DispatchIndirectCommandBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DispatchIndirectCommandBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: u32) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: u32) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[inline]
+ pub fn z(mut self, z: u32) -> Self {
+ self.inner.z = z;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DispatchIndirectCommand {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMultiDrawInfoEXT.html>"]
+pub struct MultiDrawInfoEXT {
+ pub first_vertex: u32,
+ pub vertex_count: u32,
+}
+impl MultiDrawInfoEXT {
+ pub fn builder<'a>() -> MultiDrawInfoEXTBuilder<'a> {
+ MultiDrawInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MultiDrawInfoEXTBuilder<'a> {
+ inner: MultiDrawInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MultiDrawInfoEXTBuilder<'a> {
+ type Target = MultiDrawInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MultiDrawInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MultiDrawInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn first_vertex(mut self, first_vertex: u32) -> Self {
+ self.inner.first_vertex = first_vertex;
+ self
+ }
+ #[inline]
+ pub fn vertex_count(mut self, vertex_count: u32) -> Self {
+ self.inner.vertex_count = vertex_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MultiDrawInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMultiDrawIndexedInfoEXT.html>"]
+pub struct MultiDrawIndexedInfoEXT {
+ pub first_index: u32,
+ pub index_count: u32,
+ pub vertex_offset: i32,
+}
+impl MultiDrawIndexedInfoEXT {
+ pub fn builder<'a>() -> MultiDrawIndexedInfoEXTBuilder<'a> {
+ MultiDrawIndexedInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MultiDrawIndexedInfoEXTBuilder<'a> {
+ inner: MultiDrawIndexedInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MultiDrawIndexedInfoEXTBuilder<'a> {
+ type Target = MultiDrawIndexedInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MultiDrawIndexedInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MultiDrawIndexedInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn first_index(mut self, first_index: u32) -> Self {
+ self.inner.first_index = first_index;
+ self
+ }
+ #[inline]
+ pub fn index_count(mut self, index_count: u32) -> Self {
+ self.inner.index_count = index_count;
+ self
+ }
+ #[inline]
+ pub fn vertex_offset(mut self, vertex_offset: i32) -> Self {
+ self.inner.vertex_offset = vertex_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MultiDrawIndexedInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubmitInfo.html>"]
+pub struct SubmitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub wait_semaphore_count: u32,
+ pub p_wait_semaphores: *const Semaphore,
+ pub p_wait_dst_stage_mask: *const PipelineStageFlags,
+ pub command_buffer_count: u32,
+ pub p_command_buffers: *const CommandBuffer,
+ pub signal_semaphore_count: u32,
+ pub p_signal_semaphores: *const Semaphore,
+}
+impl ::std::default::Default for SubmitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ wait_semaphore_count: u32::default(),
+ p_wait_semaphores: ::std::ptr::null(),
+ p_wait_dst_stage_mask: ::std::ptr::null(),
+ command_buffer_count: u32::default(),
+ p_command_buffers: ::std::ptr::null(),
+ signal_semaphore_count: u32::default(),
+ p_signal_semaphores: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubmitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBMIT_INFO;
+}
+impl SubmitInfo {
+ pub fn builder<'a>() -> SubmitInfoBuilder<'a> {
+ SubmitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubmitInfoBuilder<'a> {
+ inner: SubmitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSubmitInfo {}
+impl<'a> ::std::ops::Deref for SubmitInfoBuilder<'a> {
+ type Target = SubmitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubmitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubmitInfoBuilder<'a> {
+ #[inline]
+ pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> Self {
+ self.inner.wait_semaphore_count = wait_semaphores.len() as _;
+ self.inner.p_wait_semaphores = wait_semaphores.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn wait_dst_stage_mask(mut self, wait_dst_stage_mask: &'a [PipelineStageFlags]) -> Self {
+ self.inner.wait_semaphore_count = wait_dst_stage_mask.len() as _;
+ self.inner.p_wait_dst_stage_mask = wait_dst_stage_mask.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn command_buffers(mut self, command_buffers: &'a [CommandBuffer]) -> Self {
+ self.inner.command_buffer_count = command_buffers.len() as _;
+ self.inner.p_command_buffers = command_buffers.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn signal_semaphores(mut self, signal_semaphores: &'a [Semaphore]) -> Self {
+ self.inner.signal_semaphore_count = signal_semaphores.len() as _;
+ self.inner.p_signal_semaphores = signal_semaphores.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSubmitInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubmitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPropertiesKHR.html>"]
+pub struct DisplayPropertiesKHR {
+ pub display: DisplayKHR,
+ pub display_name: *const c_char,
+ pub physical_dimensions: Extent2D,
+ pub physical_resolution: Extent2D,
+ pub supported_transforms: SurfaceTransformFlagsKHR,
+ pub plane_reorder_possible: Bool32,
+ pub persistent_content: Bool32,
+}
+impl ::std::default::Default for DisplayPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ display: DisplayKHR::default(),
+ display_name: ::std::ptr::null(),
+ physical_dimensions: Extent2D::default(),
+ physical_resolution: Extent2D::default(),
+ supported_transforms: SurfaceTransformFlagsKHR::default(),
+ plane_reorder_possible: Bool32::default(),
+ persistent_content: Bool32::default(),
+ }
+ }
+}
+impl DisplayPropertiesKHR {
+ pub fn builder<'a>() -> DisplayPropertiesKHRBuilder<'a> {
+ DisplayPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPropertiesKHRBuilder<'a> {
+ inner: DisplayPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPropertiesKHRBuilder<'a> {
+ type Target = DisplayPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn display(mut self, display: DisplayKHR) -> Self {
+ self.inner.display = display;
+ self
+ }
+ #[inline]
+ pub fn display_name(mut self, display_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.display_name = display_name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn physical_dimensions(mut self, physical_dimensions: Extent2D) -> Self {
+ self.inner.physical_dimensions = physical_dimensions;
+ self
+ }
+ #[inline]
+ pub fn physical_resolution(mut self, physical_resolution: Extent2D) -> Self {
+ self.inner.physical_resolution = physical_resolution;
+ self
+ }
+ #[inline]
+ pub fn supported_transforms(mut self, supported_transforms: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.supported_transforms = supported_transforms;
+ self
+ }
+ #[inline]
+ pub fn plane_reorder_possible(mut self, plane_reorder_possible: bool) -> Self {
+ self.inner.plane_reorder_possible = plane_reorder_possible.into();
+ self
+ }
+ #[inline]
+ pub fn persistent_content(mut self, persistent_content: bool) -> Self {
+ self.inner.persistent_content = persistent_content.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPlanePropertiesKHR.html>"]
+pub struct DisplayPlanePropertiesKHR {
+ pub current_display: DisplayKHR,
+ pub current_stack_index: u32,
+}
+impl DisplayPlanePropertiesKHR {
+ pub fn builder<'a>() -> DisplayPlanePropertiesKHRBuilder<'a> {
+ DisplayPlanePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPlanePropertiesKHRBuilder<'a> {
+ inner: DisplayPlanePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPlanePropertiesKHRBuilder<'a> {
+ type Target = DisplayPlanePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPlanePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPlanePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn current_display(mut self, current_display: DisplayKHR) -> Self {
+ self.inner.current_display = current_display;
+ self
+ }
+ #[inline]
+ pub fn current_stack_index(mut self, current_stack_index: u32) -> Self {
+ self.inner.current_stack_index = current_stack_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPlanePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayModeParametersKHR.html>"]
+pub struct DisplayModeParametersKHR {
+ pub visible_region: Extent2D,
+ pub refresh_rate: u32,
+}
+impl DisplayModeParametersKHR {
+ pub fn builder<'a>() -> DisplayModeParametersKHRBuilder<'a> {
+ DisplayModeParametersKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayModeParametersKHRBuilder<'a> {
+ inner: DisplayModeParametersKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayModeParametersKHRBuilder<'a> {
+ type Target = DisplayModeParametersKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayModeParametersKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayModeParametersKHRBuilder<'a> {
+ #[inline]
+ pub fn visible_region(mut self, visible_region: Extent2D) -> Self {
+ self.inner.visible_region = visible_region;
+ self
+ }
+ #[inline]
+ pub fn refresh_rate(mut self, refresh_rate: u32) -> Self {
+ self.inner.refresh_rate = refresh_rate;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayModeParametersKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayModePropertiesKHR.html>"]
+pub struct DisplayModePropertiesKHR {
+ pub display_mode: DisplayModeKHR,
+ pub parameters: DisplayModeParametersKHR,
+}
+impl DisplayModePropertiesKHR {
+ pub fn builder<'a>() -> DisplayModePropertiesKHRBuilder<'a> {
+ DisplayModePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayModePropertiesKHRBuilder<'a> {
+ inner: DisplayModePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayModePropertiesKHRBuilder<'a> {
+ type Target = DisplayModePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayModePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayModePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn display_mode(mut self, display_mode: DisplayModeKHR) -> Self {
+ self.inner.display_mode = display_mode;
+ self
+ }
+ #[inline]
+ pub fn parameters(mut self, parameters: DisplayModeParametersKHR) -> Self {
+ self.inner.parameters = parameters;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayModePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayModeCreateInfoKHR.html>"]
+pub struct DisplayModeCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DisplayModeCreateFlagsKHR,
+ pub parameters: DisplayModeParametersKHR,
+}
+impl ::std::default::Default for DisplayModeCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DisplayModeCreateFlagsKHR::default(),
+ parameters: DisplayModeParametersKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayModeCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_MODE_CREATE_INFO_KHR;
+}
+impl DisplayModeCreateInfoKHR {
+ pub fn builder<'a>() -> DisplayModeCreateInfoKHRBuilder<'a> {
+ DisplayModeCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayModeCreateInfoKHRBuilder<'a> {
+ inner: DisplayModeCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayModeCreateInfoKHRBuilder<'a> {
+ type Target = DisplayModeCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayModeCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayModeCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DisplayModeCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn parameters(mut self, parameters: DisplayModeParametersKHR) -> Self {
+ self.inner.parameters = parameters;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayModeCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPlaneCapabilitiesKHR.html>"]
+pub struct DisplayPlaneCapabilitiesKHR {
+ pub supported_alpha: DisplayPlaneAlphaFlagsKHR,
+ pub min_src_position: Offset2D,
+ pub max_src_position: Offset2D,
+ pub min_src_extent: Extent2D,
+ pub max_src_extent: Extent2D,
+ pub min_dst_position: Offset2D,
+ pub max_dst_position: Offset2D,
+ pub min_dst_extent: Extent2D,
+ pub max_dst_extent: Extent2D,
+}
+impl DisplayPlaneCapabilitiesKHR {
+ pub fn builder<'a>() -> DisplayPlaneCapabilitiesKHRBuilder<'a> {
+ DisplayPlaneCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPlaneCapabilitiesKHRBuilder<'a> {
+ inner: DisplayPlaneCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPlaneCapabilitiesKHRBuilder<'a> {
+ type Target = DisplayPlaneCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPlaneCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPlaneCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn supported_alpha(mut self, supported_alpha: DisplayPlaneAlphaFlagsKHR) -> Self {
+ self.inner.supported_alpha = supported_alpha;
+ self
+ }
+ #[inline]
+ pub fn min_src_position(mut self, min_src_position: Offset2D) -> Self {
+ self.inner.min_src_position = min_src_position;
+ self
+ }
+ #[inline]
+ pub fn max_src_position(mut self, max_src_position: Offset2D) -> Self {
+ self.inner.max_src_position = max_src_position;
+ self
+ }
+ #[inline]
+ pub fn min_src_extent(mut self, min_src_extent: Extent2D) -> Self {
+ self.inner.min_src_extent = min_src_extent;
+ self
+ }
+ #[inline]
+ pub fn max_src_extent(mut self, max_src_extent: Extent2D) -> Self {
+ self.inner.max_src_extent = max_src_extent;
+ self
+ }
+ #[inline]
+ pub fn min_dst_position(mut self, min_dst_position: Offset2D) -> Self {
+ self.inner.min_dst_position = min_dst_position;
+ self
+ }
+ #[inline]
+ pub fn max_dst_position(mut self, max_dst_position: Offset2D) -> Self {
+ self.inner.max_dst_position = max_dst_position;
+ self
+ }
+ #[inline]
+ pub fn min_dst_extent(mut self, min_dst_extent: Extent2D) -> Self {
+ self.inner.min_dst_extent = min_dst_extent;
+ self
+ }
+ #[inline]
+ pub fn max_dst_extent(mut self, max_dst_extent: Extent2D) -> Self {
+ self.inner.max_dst_extent = max_dst_extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPlaneCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplaySurfaceCreateInfoKHR.html>"]
+pub struct DisplaySurfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DisplaySurfaceCreateFlagsKHR,
+ pub display_mode: DisplayModeKHR,
+ pub plane_index: u32,
+ pub plane_stack_index: u32,
+ pub transform: SurfaceTransformFlagsKHR,
+ pub global_alpha: f32,
+ pub alpha_mode: DisplayPlaneAlphaFlagsKHR,
+ pub image_extent: Extent2D,
+}
+impl ::std::default::Default for DisplaySurfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DisplaySurfaceCreateFlagsKHR::default(),
+ display_mode: DisplayModeKHR::default(),
+ plane_index: u32::default(),
+ plane_stack_index: u32::default(),
+ transform: SurfaceTransformFlagsKHR::default(),
+ global_alpha: f32::default(),
+ alpha_mode: DisplayPlaneAlphaFlagsKHR::default(),
+ image_extent: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplaySurfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_SURFACE_CREATE_INFO_KHR;
+}
+impl DisplaySurfaceCreateInfoKHR {
+ pub fn builder<'a>() -> DisplaySurfaceCreateInfoKHRBuilder<'a> {
+ DisplaySurfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplaySurfaceCreateInfoKHRBuilder<'a> {
+ inner: DisplaySurfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplaySurfaceCreateInfoKHRBuilder<'a> {
+ type Target = DisplaySurfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplaySurfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplaySurfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DisplaySurfaceCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn display_mode(mut self, display_mode: DisplayModeKHR) -> Self {
+ self.inner.display_mode = display_mode;
+ self
+ }
+ #[inline]
+ pub fn plane_index(mut self, plane_index: u32) -> Self {
+ self.inner.plane_index = plane_index;
+ self
+ }
+ #[inline]
+ pub fn plane_stack_index(mut self, plane_stack_index: u32) -> Self {
+ self.inner.plane_stack_index = plane_stack_index;
+ self
+ }
+ #[inline]
+ pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.transform = transform;
+ self
+ }
+ #[inline]
+ pub fn global_alpha(mut self, global_alpha: f32) -> Self {
+ self.inner.global_alpha = global_alpha;
+ self
+ }
+ #[inline]
+ pub fn alpha_mode(mut self, alpha_mode: DisplayPlaneAlphaFlagsKHR) -> Self {
+ self.inner.alpha_mode = alpha_mode;
+ self
+ }
+ #[inline]
+ pub fn image_extent(mut self, image_extent: Extent2D) -> Self {
+ self.inner.image_extent = image_extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplaySurfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPresentInfoKHR.html>"]
+pub struct DisplayPresentInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_rect: Rect2D,
+ pub dst_rect: Rect2D,
+ pub persistent: Bool32,
+}
+impl ::std::default::Default for DisplayPresentInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_rect: Rect2D::default(),
+ dst_rect: Rect2D::default(),
+ persistent: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayPresentInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PRESENT_INFO_KHR;
+}
+impl DisplayPresentInfoKHR {
+ pub fn builder<'a>() -> DisplayPresentInfoKHRBuilder<'a> {
+ DisplayPresentInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPresentInfoKHRBuilder<'a> {
+ inner: DisplayPresentInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for DisplayPresentInfoKHRBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for DisplayPresentInfoKHR {}
+impl<'a> ::std::ops::Deref for DisplayPresentInfoKHRBuilder<'a> {
+ type Target = DisplayPresentInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPresentInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPresentInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn src_rect(mut self, src_rect: Rect2D) -> Self {
+ self.inner.src_rect = src_rect;
+ self
+ }
+ #[inline]
+ pub fn dst_rect(mut self, dst_rect: Rect2D) -> Self {
+ self.inner.dst_rect = dst_rect;
+ self
+ }
+ #[inline]
+ pub fn persistent(mut self, persistent: bool) -> Self {
+ self.inner.persistent = persistent.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPresentInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilitiesKHR.html>"]
+pub struct SurfaceCapabilitiesKHR {
+ pub min_image_count: u32,
+ pub max_image_count: u32,
+ pub current_extent: Extent2D,
+ pub min_image_extent: Extent2D,
+ pub max_image_extent: Extent2D,
+ pub max_image_array_layers: u32,
+ pub supported_transforms: SurfaceTransformFlagsKHR,
+ pub current_transform: SurfaceTransformFlagsKHR,
+ pub supported_composite_alpha: CompositeAlphaFlagsKHR,
+ pub supported_usage_flags: ImageUsageFlags,
+}
+impl SurfaceCapabilitiesKHR {
+ pub fn builder<'a>() -> SurfaceCapabilitiesKHRBuilder<'a> {
+ SurfaceCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceCapabilitiesKHRBuilder<'a> {
+ inner: SurfaceCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SurfaceCapabilitiesKHRBuilder<'a> {
+ type Target = SurfaceCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn min_image_count(mut self, min_image_count: u32) -> Self {
+ self.inner.min_image_count = min_image_count;
+ self
+ }
+ #[inline]
+ pub fn max_image_count(mut self, max_image_count: u32) -> Self {
+ self.inner.max_image_count = max_image_count;
+ self
+ }
+ #[inline]
+ pub fn current_extent(mut self, current_extent: Extent2D) -> Self {
+ self.inner.current_extent = current_extent;
+ self
+ }
+ #[inline]
+ pub fn min_image_extent(mut self, min_image_extent: Extent2D) -> Self {
+ self.inner.min_image_extent = min_image_extent;
+ self
+ }
+ #[inline]
+ pub fn max_image_extent(mut self, max_image_extent: Extent2D) -> Self {
+ self.inner.max_image_extent = max_image_extent;
+ self
+ }
+ #[inline]
+ pub fn max_image_array_layers(mut self, max_image_array_layers: u32) -> Self {
+ self.inner.max_image_array_layers = max_image_array_layers;
+ self
+ }
+ #[inline]
+ pub fn supported_transforms(mut self, supported_transforms: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.supported_transforms = supported_transforms;
+ self
+ }
+ #[inline]
+ pub fn current_transform(mut self, current_transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.current_transform = current_transform;
+ self
+ }
+ #[inline]
+ pub fn supported_composite_alpha(
+ mut self,
+ supported_composite_alpha: CompositeAlphaFlagsKHR,
+ ) -> Self {
+ self.inner.supported_composite_alpha = supported_composite_alpha;
+ self
+ }
+ #[inline]
+ pub fn supported_usage_flags(mut self, supported_usage_flags: ImageUsageFlags) -> Self {
+ self.inner.supported_usage_flags = supported_usage_flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAndroidSurfaceCreateInfoKHR.html>"]
+pub struct AndroidSurfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: AndroidSurfaceCreateFlagsKHR,
+ pub window: *mut ANativeWindow,
+}
+impl ::std::default::Default for AndroidSurfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: AndroidSurfaceCreateFlagsKHR::default(),
+ window: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AndroidSurfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_SURFACE_CREATE_INFO_KHR;
+}
+impl AndroidSurfaceCreateInfoKHR {
+ pub fn builder<'a>() -> AndroidSurfaceCreateInfoKHRBuilder<'a> {
+ AndroidSurfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AndroidSurfaceCreateInfoKHRBuilder<'a> {
+ inner: AndroidSurfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AndroidSurfaceCreateInfoKHRBuilder<'a> {
+ type Target = AndroidSurfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AndroidSurfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AndroidSurfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: AndroidSurfaceCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn window(mut self, window: *mut ANativeWindow) -> Self {
+ self.inner.window = window;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AndroidSurfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkViSurfaceCreateInfoNN.html>"]
+pub struct ViSurfaceCreateInfoNN {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ViSurfaceCreateFlagsNN,
+ pub window: *mut c_void,
+}
+impl ::std::default::Default for ViSurfaceCreateInfoNN {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ViSurfaceCreateFlagsNN::default(),
+ window: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ViSurfaceCreateInfoNN {
+ const STRUCTURE_TYPE: StructureType = StructureType::VI_SURFACE_CREATE_INFO_NN;
+}
+impl ViSurfaceCreateInfoNN {
+ pub fn builder<'a>() -> ViSurfaceCreateInfoNNBuilder<'a> {
+ ViSurfaceCreateInfoNNBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ViSurfaceCreateInfoNNBuilder<'a> {
+ inner: ViSurfaceCreateInfoNN,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ViSurfaceCreateInfoNNBuilder<'a> {
+ type Target = ViSurfaceCreateInfoNN;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ViSurfaceCreateInfoNNBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ViSurfaceCreateInfoNNBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ViSurfaceCreateFlagsNN) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn window(mut self, window: *mut c_void) -> Self {
+ self.inner.window = window;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ViSurfaceCreateInfoNN {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWaylandSurfaceCreateInfoKHR.html>"]
+pub struct WaylandSurfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: WaylandSurfaceCreateFlagsKHR,
+ pub display: *mut wl_display,
+ pub surface: *mut wl_surface,
+}
+impl ::std::default::Default for WaylandSurfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: WaylandSurfaceCreateFlagsKHR::default(),
+ display: ::std::ptr::null_mut(),
+ surface: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for WaylandSurfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::WAYLAND_SURFACE_CREATE_INFO_KHR;
+}
+impl WaylandSurfaceCreateInfoKHR {
+ pub fn builder<'a>() -> WaylandSurfaceCreateInfoKHRBuilder<'a> {
+ WaylandSurfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct WaylandSurfaceCreateInfoKHRBuilder<'a> {
+ inner: WaylandSurfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for WaylandSurfaceCreateInfoKHRBuilder<'a> {
+ type Target = WaylandSurfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for WaylandSurfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> WaylandSurfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: WaylandSurfaceCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn display(mut self, display: *mut wl_display) -> Self {
+ self.inner.display = display;
+ self
+ }
+ #[inline]
+ pub fn surface(mut self, surface: *mut wl_surface) -> Self {
+ self.inner.surface = surface;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> WaylandSurfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWin32SurfaceCreateInfoKHR.html>"]
+pub struct Win32SurfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: Win32SurfaceCreateFlagsKHR,
+ pub hinstance: HINSTANCE,
+ pub hwnd: HWND,
+}
+impl ::std::default::Default for Win32SurfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: Win32SurfaceCreateFlagsKHR::default(),
+ hinstance: unsafe { ::std::mem::zeroed() },
+ hwnd: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for Win32SurfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::WIN32_SURFACE_CREATE_INFO_KHR;
+}
+impl Win32SurfaceCreateInfoKHR {
+ pub fn builder<'a>() -> Win32SurfaceCreateInfoKHRBuilder<'a> {
+ Win32SurfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Win32SurfaceCreateInfoKHRBuilder<'a> {
+ inner: Win32SurfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for Win32SurfaceCreateInfoKHRBuilder<'a> {
+ type Target = Win32SurfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Win32SurfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Win32SurfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: Win32SurfaceCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn hinstance(mut self, hinstance: HINSTANCE) -> Self {
+ self.inner.hinstance = hinstance;
+ self
+ }
+ #[inline]
+ pub fn hwnd(mut self, hwnd: HWND) -> Self {
+ self.inner.hwnd = hwnd;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Win32SurfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkXlibSurfaceCreateInfoKHR.html>"]
+pub struct XlibSurfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: XlibSurfaceCreateFlagsKHR,
+ pub dpy: *mut Display,
+ pub window: Window,
+}
+impl ::std::default::Default for XlibSurfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: XlibSurfaceCreateFlagsKHR::default(),
+ dpy: ::std::ptr::null_mut(),
+ window: Window::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for XlibSurfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::XLIB_SURFACE_CREATE_INFO_KHR;
+}
+impl XlibSurfaceCreateInfoKHR {
+ pub fn builder<'a>() -> XlibSurfaceCreateInfoKHRBuilder<'a> {
+ XlibSurfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct XlibSurfaceCreateInfoKHRBuilder<'a> {
+ inner: XlibSurfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for XlibSurfaceCreateInfoKHRBuilder<'a> {
+ type Target = XlibSurfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for XlibSurfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> XlibSurfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: XlibSurfaceCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn dpy(mut self, dpy: *mut Display) -> Self {
+ self.inner.dpy = dpy;
+ self
+ }
+ #[inline]
+ pub fn window(mut self, window: Window) -> Self {
+ self.inner.window = window;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> XlibSurfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkXcbSurfaceCreateInfoKHR.html>"]
+pub struct XcbSurfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: XcbSurfaceCreateFlagsKHR,
+ pub connection: *mut xcb_connection_t,
+ pub window: xcb_window_t,
+}
+impl ::std::default::Default for XcbSurfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: XcbSurfaceCreateFlagsKHR::default(),
+ connection: ::std::ptr::null_mut(),
+ window: xcb_window_t::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for XcbSurfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::XCB_SURFACE_CREATE_INFO_KHR;
+}
+impl XcbSurfaceCreateInfoKHR {
+ pub fn builder<'a>() -> XcbSurfaceCreateInfoKHRBuilder<'a> {
+ XcbSurfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct XcbSurfaceCreateInfoKHRBuilder<'a> {
+ inner: XcbSurfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for XcbSurfaceCreateInfoKHRBuilder<'a> {
+ type Target = XcbSurfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for XcbSurfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> XcbSurfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: XcbSurfaceCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn connection(mut self, connection: *mut xcb_connection_t) -> Self {
+ self.inner.connection = connection;
+ self
+ }
+ #[inline]
+ pub fn window(mut self, window: xcb_window_t) -> Self {
+ self.inner.window = window;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> XcbSurfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDirectFBSurfaceCreateInfoEXT.html>"]
+pub struct DirectFBSurfaceCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DirectFBSurfaceCreateFlagsEXT,
+ pub dfb: *mut IDirectFB,
+ pub surface: *mut IDirectFBSurface,
+}
+impl ::std::default::Default for DirectFBSurfaceCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DirectFBSurfaceCreateFlagsEXT::default(),
+ dfb: ::std::ptr::null_mut(),
+ surface: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DirectFBSurfaceCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DIRECTFB_SURFACE_CREATE_INFO_EXT;
+}
+impl DirectFBSurfaceCreateInfoEXT {
+ pub fn builder<'a>() -> DirectFBSurfaceCreateInfoEXTBuilder<'a> {
+ DirectFBSurfaceCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DirectFBSurfaceCreateInfoEXTBuilder<'a> {
+ inner: DirectFBSurfaceCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DirectFBSurfaceCreateInfoEXTBuilder<'a> {
+ type Target = DirectFBSurfaceCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DirectFBSurfaceCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DirectFBSurfaceCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DirectFBSurfaceCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn dfb(mut self, dfb: *mut IDirectFB) -> Self {
+ self.inner.dfb = dfb;
+ self
+ }
+ #[inline]
+ pub fn surface(mut self, surface: *mut IDirectFBSurface) -> Self {
+ self.inner.surface = surface;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DirectFBSurfaceCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImagePipeSurfaceCreateInfoFUCHSIA.html>"]
+pub struct ImagePipeSurfaceCreateInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ImagePipeSurfaceCreateFlagsFUCHSIA,
+ pub image_pipe_handle: zx_handle_t,
+}
+impl ::std::default::Default for ImagePipeSurfaceCreateInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ImagePipeSurfaceCreateFlagsFUCHSIA::default(),
+ image_pipe_handle: zx_handle_t::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImagePipeSurfaceCreateInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA;
+}
+impl ImagePipeSurfaceCreateInfoFUCHSIA {
+ pub fn builder<'a>() -> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
+ ImagePipeSurfaceCreateInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
+ inner: ImagePipeSurfaceCreateInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
+ type Target = ImagePipeSurfaceCreateInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImagePipeSurfaceCreateInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ImagePipeSurfaceCreateFlagsFUCHSIA) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn image_pipe_handle(mut self, image_pipe_handle: zx_handle_t) -> Self {
+ self.inner.image_pipe_handle = image_pipe_handle;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImagePipeSurfaceCreateInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStreamDescriptorSurfaceCreateInfoGGP.html>"]
+pub struct StreamDescriptorSurfaceCreateInfoGGP {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: StreamDescriptorSurfaceCreateFlagsGGP,
+ pub stream_descriptor: GgpStreamDescriptor,
+}
+impl ::std::default::Default for StreamDescriptorSurfaceCreateInfoGGP {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: StreamDescriptorSurfaceCreateFlagsGGP::default(),
+ stream_descriptor: GgpStreamDescriptor::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for StreamDescriptorSurfaceCreateInfoGGP {
+ const STRUCTURE_TYPE: StructureType = StructureType::STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP;
+}
+impl StreamDescriptorSurfaceCreateInfoGGP {
+ pub fn builder<'a>() -> StreamDescriptorSurfaceCreateInfoGGPBuilder<'a> {
+ StreamDescriptorSurfaceCreateInfoGGPBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct StreamDescriptorSurfaceCreateInfoGGPBuilder<'a> {
+ inner: StreamDescriptorSurfaceCreateInfoGGP,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for StreamDescriptorSurfaceCreateInfoGGPBuilder<'a> {
+ type Target = StreamDescriptorSurfaceCreateInfoGGP;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for StreamDescriptorSurfaceCreateInfoGGPBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> StreamDescriptorSurfaceCreateInfoGGPBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: StreamDescriptorSurfaceCreateFlagsGGP) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn stream_descriptor(mut self, stream_descriptor: GgpStreamDescriptor) -> Self {
+ self.inner.stream_descriptor = stream_descriptor;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> StreamDescriptorSurfaceCreateInfoGGP {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkScreenSurfaceCreateInfoQNX.html>"]
+pub struct ScreenSurfaceCreateInfoQNX {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ScreenSurfaceCreateFlagsQNX,
+ pub context: *mut _screen_context,
+ pub window: *mut _screen_window,
+}
+impl ::std::default::Default for ScreenSurfaceCreateInfoQNX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ScreenSurfaceCreateFlagsQNX::default(),
+ context: ::std::ptr::null_mut(),
+ window: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ScreenSurfaceCreateInfoQNX {
+ const STRUCTURE_TYPE: StructureType = StructureType::SCREEN_SURFACE_CREATE_INFO_QNX;
+}
+impl ScreenSurfaceCreateInfoQNX {
+ pub fn builder<'a>() -> ScreenSurfaceCreateInfoQNXBuilder<'a> {
+ ScreenSurfaceCreateInfoQNXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ScreenSurfaceCreateInfoQNXBuilder<'a> {
+ inner: ScreenSurfaceCreateInfoQNX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ScreenSurfaceCreateInfoQNXBuilder<'a> {
+ type Target = ScreenSurfaceCreateInfoQNX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ScreenSurfaceCreateInfoQNXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ScreenSurfaceCreateInfoQNXBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ScreenSurfaceCreateFlagsQNX) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn context(mut self, context: &'a mut _screen_context) -> Self {
+ self.inner.context = context;
+ self
+ }
+ #[inline]
+ pub fn window(mut self, window: &'a mut _screen_window) -> Self {
+ self.inner.window = window;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ScreenSurfaceCreateInfoQNX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceFormatKHR.html>"]
+pub struct SurfaceFormatKHR {
+ pub format: Format,
+ pub color_space: ColorSpaceKHR,
+}
+impl SurfaceFormatKHR {
+ pub fn builder<'a>() -> SurfaceFormatKHRBuilder<'a> {
+ SurfaceFormatKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceFormatKHRBuilder<'a> {
+ inner: SurfaceFormatKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SurfaceFormatKHRBuilder<'a> {
+ type Target = SurfaceFormatKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceFormatKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceFormatKHRBuilder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn color_space(mut self, color_space: ColorSpaceKHR) -> Self {
+ self.inner.color_space = color_space;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceFormatKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainCreateInfoKHR.html>"]
+pub struct SwapchainCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: SwapchainCreateFlagsKHR,
+ pub surface: SurfaceKHR,
+ pub min_image_count: u32,
+ pub image_format: Format,
+ pub image_color_space: ColorSpaceKHR,
+ pub image_extent: Extent2D,
+ pub image_array_layers: u32,
+ pub image_usage: ImageUsageFlags,
+ pub image_sharing_mode: SharingMode,
+ pub queue_family_index_count: u32,
+ pub p_queue_family_indices: *const u32,
+ pub pre_transform: SurfaceTransformFlagsKHR,
+ pub composite_alpha: CompositeAlphaFlagsKHR,
+ pub present_mode: PresentModeKHR,
+ pub clipped: Bool32,
+ pub old_swapchain: SwapchainKHR,
+}
+impl ::std::default::Default for SwapchainCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: SwapchainCreateFlagsKHR::default(),
+ surface: SurfaceKHR::default(),
+ min_image_count: u32::default(),
+ image_format: Format::default(),
+ image_color_space: ColorSpaceKHR::default(),
+ image_extent: Extent2D::default(),
+ image_array_layers: u32::default(),
+ image_usage: ImageUsageFlags::default(),
+ image_sharing_mode: SharingMode::default(),
+ queue_family_index_count: u32::default(),
+ p_queue_family_indices: ::std::ptr::null(),
+ pre_transform: SurfaceTransformFlagsKHR::default(),
+ composite_alpha: CompositeAlphaFlagsKHR::default(),
+ present_mode: PresentModeKHR::default(),
+ clipped: Bool32::default(),
+ old_swapchain: SwapchainKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_CREATE_INFO_KHR;
+}
+impl SwapchainCreateInfoKHR {
+ pub fn builder<'a>() -> SwapchainCreateInfoKHRBuilder<'a> {
+ SwapchainCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainCreateInfoKHRBuilder<'a> {
+ inner: SwapchainCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSwapchainCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for SwapchainCreateInfoKHRBuilder<'a> {
+ type Target = SwapchainCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SwapchainCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn surface(mut self, surface: SurfaceKHR) -> Self {
+ self.inner.surface = surface;
+ self
+ }
+ #[inline]
+ pub fn min_image_count(mut self, min_image_count: u32) -> Self {
+ self.inner.min_image_count = min_image_count;
+ self
+ }
+ #[inline]
+ pub fn image_format(mut self, image_format: Format) -> Self {
+ self.inner.image_format = image_format;
+ self
+ }
+ #[inline]
+ pub fn image_color_space(mut self, image_color_space: ColorSpaceKHR) -> Self {
+ self.inner.image_color_space = image_color_space;
+ self
+ }
+ #[inline]
+ pub fn image_extent(mut self, image_extent: Extent2D) -> Self {
+ self.inner.image_extent = image_extent;
+ self
+ }
+ #[inline]
+ pub fn image_array_layers(mut self, image_array_layers: u32) -> Self {
+ self.inner.image_array_layers = image_array_layers;
+ self
+ }
+ #[inline]
+ pub fn image_usage(mut self, image_usage: ImageUsageFlags) -> Self {
+ self.inner.image_usage = image_usage;
+ self
+ }
+ #[inline]
+ pub fn image_sharing_mode(mut self, image_sharing_mode: SharingMode) -> Self {
+ self.inner.image_sharing_mode = image_sharing_mode;
+ self
+ }
+ #[inline]
+ pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self {
+ self.inner.queue_family_index_count = queue_family_indices.len() as _;
+ self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn pre_transform(mut self, pre_transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.pre_transform = pre_transform;
+ self
+ }
+ #[inline]
+ pub fn composite_alpha(mut self, composite_alpha: CompositeAlphaFlagsKHR) -> Self {
+ self.inner.composite_alpha = composite_alpha;
+ self
+ }
+ #[inline]
+ pub fn present_mode(mut self, present_mode: PresentModeKHR) -> Self {
+ self.inner.present_mode = present_mode;
+ self
+ }
+ #[inline]
+ pub fn clipped(mut self, clipped: bool) -> Self {
+ self.inner.clipped = clipped.into();
+ self
+ }
+ #[inline]
+ pub fn old_swapchain(mut self, old_swapchain: SwapchainKHR) -> Self {
+ self.inner.old_swapchain = old_swapchain;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSwapchainCreateInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentInfoKHR.html>"]
+pub struct PresentInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub wait_semaphore_count: u32,
+ pub p_wait_semaphores: *const Semaphore,
+ pub swapchain_count: u32,
+ pub p_swapchains: *const SwapchainKHR,
+ pub p_image_indices: *const u32,
+ pub p_results: *mut Result,
+}
+impl ::std::default::Default for PresentInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ wait_semaphore_count: u32::default(),
+ p_wait_semaphores: ::std::ptr::null(),
+ swapchain_count: u32::default(),
+ p_swapchains: ::std::ptr::null(),
+ p_image_indices: ::std::ptr::null(),
+ p_results: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PresentInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_INFO_KHR;
+}
+impl PresentInfoKHR {
+ pub fn builder<'a>() -> PresentInfoKHRBuilder<'a> {
+ PresentInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentInfoKHRBuilder<'a> {
+ inner: PresentInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPresentInfoKHR {}
+impl<'a> ::std::ops::Deref for PresentInfoKHRBuilder<'a> {
+ type Target = PresentInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn wait_semaphores(mut self, wait_semaphores: &'a [Semaphore]) -> Self {
+ self.inner.wait_semaphore_count = wait_semaphores.len() as _;
+ self.inner.p_wait_semaphores = wait_semaphores.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn swapchains(mut self, swapchains: &'a [SwapchainKHR]) -> Self {
+ self.inner.swapchain_count = swapchains.len() as _;
+ self.inner.p_swapchains = swapchains.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn image_indices(mut self, image_indices: &'a [u32]) -> Self {
+ self.inner.swapchain_count = image_indices.len() as _;
+ self.inner.p_image_indices = image_indices.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn results(mut self, results: &'a mut [Result]) -> Self {
+ self.inner.swapchain_count = results.len() as _;
+ self.inner.p_results = results.as_mut_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPresentInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugReportCallbackCreateInfoEXT.html>"]
+pub struct DebugReportCallbackCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DebugReportFlagsEXT,
+ pub pfn_callback: PFN_vkDebugReportCallbackEXT,
+ pub p_user_data: *mut c_void,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DebugReportCallbackCreateInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DebugReportCallbackCreateInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("flags", &self.flags)
+ .field("pfn_callback", &(self.pfn_callback.map(|x| x as *const ())))
+ .field("p_user_data", &self.p_user_data)
+ .finish()
+ }
+}
+impl ::std::default::Default for DebugReportCallbackCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DebugReportFlagsEXT::default(),
+ pfn_callback: PFN_vkDebugReportCallbackEXT::default(),
+ p_user_data: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugReportCallbackCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT;
+}
+impl DebugReportCallbackCreateInfoEXT {
+ pub fn builder<'a>() -> DebugReportCallbackCreateInfoEXTBuilder<'a> {
+ DebugReportCallbackCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugReportCallbackCreateInfoEXTBuilder<'a> {
+ inner: DebugReportCallbackCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsInstanceCreateInfo for DebugReportCallbackCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsInstanceCreateInfo for DebugReportCallbackCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for DebugReportCallbackCreateInfoEXTBuilder<'a> {
+ type Target = DebugReportCallbackCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugReportCallbackCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugReportCallbackCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DebugReportFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn pfn_callback(mut self, pfn_callback: PFN_vkDebugReportCallbackEXT) -> Self {
+ self.inner.pfn_callback = pfn_callback;
+ self
+ }
+ #[inline]
+ pub fn user_data(mut self, user_data: *mut c_void) -> Self {
+ self.inner.p_user_data = user_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugReportCallbackCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationFlagsEXT.html>"]
+pub struct ValidationFlagsEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub disabled_validation_check_count: u32,
+ pub p_disabled_validation_checks: *const ValidationCheckEXT,
+}
+impl ::std::default::Default for ValidationFlagsEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ disabled_validation_check_count: u32::default(),
+ p_disabled_validation_checks: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ValidationFlagsEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VALIDATION_FLAGS_EXT;
+}
+impl ValidationFlagsEXT {
+ pub fn builder<'a>() -> ValidationFlagsEXTBuilder<'a> {
+ ValidationFlagsEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ValidationFlagsEXTBuilder<'a> {
+ inner: ValidationFlagsEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsInstanceCreateInfo for ValidationFlagsEXTBuilder<'_> {}
+unsafe impl ExtendsInstanceCreateInfo for ValidationFlagsEXT {}
+impl<'a> ::std::ops::Deref for ValidationFlagsEXTBuilder<'a> {
+ type Target = ValidationFlagsEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ValidationFlagsEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ValidationFlagsEXTBuilder<'a> {
+ #[inline]
+ pub fn disabled_validation_checks(
+ mut self,
+ disabled_validation_checks: &'a [ValidationCheckEXT],
+ ) -> Self {
+ self.inner.disabled_validation_check_count = disabled_validation_checks.len() as _;
+ self.inner.p_disabled_validation_checks = disabled_validation_checks.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ValidationFlagsEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationFeaturesEXT.html>"]
+pub struct ValidationFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub enabled_validation_feature_count: u32,
+ pub p_enabled_validation_features: *const ValidationFeatureEnableEXT,
+ pub disabled_validation_feature_count: u32,
+ pub p_disabled_validation_features: *const ValidationFeatureDisableEXT,
+}
+impl ::std::default::Default for ValidationFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ enabled_validation_feature_count: u32::default(),
+ p_enabled_validation_features: ::std::ptr::null(),
+ disabled_validation_feature_count: u32::default(),
+ p_disabled_validation_features: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ValidationFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VALIDATION_FEATURES_EXT;
+}
+impl ValidationFeaturesEXT {
+ pub fn builder<'a>() -> ValidationFeaturesEXTBuilder<'a> {
+ ValidationFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ValidationFeaturesEXTBuilder<'a> {
+ inner: ValidationFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsInstanceCreateInfo for ValidationFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsInstanceCreateInfo for ValidationFeaturesEXT {}
+impl<'a> ::std::ops::Deref for ValidationFeaturesEXTBuilder<'a> {
+ type Target = ValidationFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ValidationFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ValidationFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn enabled_validation_features(
+ mut self,
+ enabled_validation_features: &'a [ValidationFeatureEnableEXT],
+ ) -> Self {
+ self.inner.enabled_validation_feature_count = enabled_validation_features.len() as _;
+ self.inner.p_enabled_validation_features = enabled_validation_features.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn disabled_validation_features(
+ mut self,
+ disabled_validation_features: &'a [ValidationFeatureDisableEXT],
+ ) -> Self {
+ self.inner.disabled_validation_feature_count = disabled_validation_features.len() as _;
+ self.inner.p_disabled_validation_features = disabled_validation_features.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ValidationFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationStateRasterizationOrderAMD.html>"]
+pub struct PipelineRasterizationStateRasterizationOrderAMD {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub rasterization_order: RasterizationOrderAMD,
+}
+impl ::std::default::Default for PipelineRasterizationStateRasterizationOrderAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ rasterization_order: RasterizationOrderAMD::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationStateRasterizationOrderAMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD;
+}
+impl PipelineRasterizationStateRasterizationOrderAMD {
+ pub fn builder<'a>() -> PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
+ PipelineRasterizationStateRasterizationOrderAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
+ inner: PipelineRasterizationStateRasterizationOrderAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationStateRasterizationOrderAMDBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationStateRasterizationOrderAMD
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
+ type Target = PipelineRasterizationStateRasterizationOrderAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationStateRasterizationOrderAMDBuilder<'a> {
+ #[inline]
+ pub fn rasterization_order(mut self, rasterization_order: RasterizationOrderAMD) -> Self {
+ self.inner.rasterization_order = rasterization_order;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationStateRasterizationOrderAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugMarkerObjectNameInfoEXT.html>"]
+pub struct DebugMarkerObjectNameInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub object_type: DebugReportObjectTypeEXT,
+ pub object: u64,
+ pub p_object_name: *const c_char,
+}
+impl ::std::default::Default for DebugMarkerObjectNameInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ object_type: DebugReportObjectTypeEXT::default(),
+ object: u64::default(),
+ p_object_name: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugMarkerObjectNameInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_MARKER_OBJECT_NAME_INFO_EXT;
+}
+impl DebugMarkerObjectNameInfoEXT {
+ pub fn builder<'a>() -> DebugMarkerObjectNameInfoEXTBuilder<'a> {
+ DebugMarkerObjectNameInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugMarkerObjectNameInfoEXTBuilder<'a> {
+ inner: DebugMarkerObjectNameInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DebugMarkerObjectNameInfoEXTBuilder<'a> {
+ type Target = DebugMarkerObjectNameInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugMarkerObjectNameInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugMarkerObjectNameInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn object_type(mut self, object_type: DebugReportObjectTypeEXT) -> Self {
+ self.inner.object_type = object_type;
+ self
+ }
+ #[inline]
+ pub fn object(mut self, object: u64) -> Self {
+ self.inner.object = object;
+ self
+ }
+ #[inline]
+ pub fn object_name(mut self, object_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_object_name = object_name.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugMarkerObjectNameInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugMarkerObjectTagInfoEXT.html>"]
+pub struct DebugMarkerObjectTagInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub object_type: DebugReportObjectTypeEXT,
+ pub object: u64,
+ pub tag_name: u64,
+ pub tag_size: usize,
+ pub p_tag: *const c_void,
+}
+impl ::std::default::Default for DebugMarkerObjectTagInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ object_type: DebugReportObjectTypeEXT::default(),
+ object: u64::default(),
+ tag_name: u64::default(),
+ tag_size: usize::default(),
+ p_tag: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugMarkerObjectTagInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_MARKER_OBJECT_TAG_INFO_EXT;
+}
+impl DebugMarkerObjectTagInfoEXT {
+ pub fn builder<'a>() -> DebugMarkerObjectTagInfoEXTBuilder<'a> {
+ DebugMarkerObjectTagInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugMarkerObjectTagInfoEXTBuilder<'a> {
+ inner: DebugMarkerObjectTagInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DebugMarkerObjectTagInfoEXTBuilder<'a> {
+ type Target = DebugMarkerObjectTagInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugMarkerObjectTagInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugMarkerObjectTagInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn object_type(mut self, object_type: DebugReportObjectTypeEXT) -> Self {
+ self.inner.object_type = object_type;
+ self
+ }
+ #[inline]
+ pub fn object(mut self, object: u64) -> Self {
+ self.inner.object = object;
+ self
+ }
+ #[inline]
+ pub fn tag_name(mut self, tag_name: u64) -> Self {
+ self.inner.tag_name = tag_name;
+ self
+ }
+ #[inline]
+ pub fn tag(mut self, tag: &'a [u8]) -> Self {
+ self.inner.tag_size = tag.len();
+ self.inner.p_tag = tag.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugMarkerObjectTagInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugMarkerMarkerInfoEXT.html>"]
+pub struct DebugMarkerMarkerInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_marker_name: *const c_char,
+ pub color: [f32; 4],
+}
+impl ::std::default::Default for DebugMarkerMarkerInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_marker_name: ::std::ptr::null(),
+ color: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugMarkerMarkerInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_MARKER_MARKER_INFO_EXT;
+}
+impl DebugMarkerMarkerInfoEXT {
+ pub fn builder<'a>() -> DebugMarkerMarkerInfoEXTBuilder<'a> {
+ DebugMarkerMarkerInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugMarkerMarkerInfoEXTBuilder<'a> {
+ inner: DebugMarkerMarkerInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DebugMarkerMarkerInfoEXTBuilder<'a> {
+ type Target = DebugMarkerMarkerInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugMarkerMarkerInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugMarkerMarkerInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn marker_name(mut self, marker_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_marker_name = marker_name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn color(mut self, color: [f32; 4]) -> Self {
+ self.inner.color = color;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugMarkerMarkerInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDedicatedAllocationImageCreateInfoNV.html>"]
+pub struct DedicatedAllocationImageCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub dedicated_allocation: Bool32,
+}
+impl ::std::default::Default for DedicatedAllocationImageCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ dedicated_allocation: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DedicatedAllocationImageCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV;
+}
+impl DedicatedAllocationImageCreateInfoNV {
+ pub fn builder<'a>() -> DedicatedAllocationImageCreateInfoNVBuilder<'a> {
+ DedicatedAllocationImageCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DedicatedAllocationImageCreateInfoNVBuilder<'a> {
+ inner: DedicatedAllocationImageCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for DedicatedAllocationImageCreateInfoNVBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for DedicatedAllocationImageCreateInfoNV {}
+impl<'a> ::std::ops::Deref for DedicatedAllocationImageCreateInfoNVBuilder<'a> {
+ type Target = DedicatedAllocationImageCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DedicatedAllocationImageCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DedicatedAllocationImageCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn dedicated_allocation(mut self, dedicated_allocation: bool) -> Self {
+ self.inner.dedicated_allocation = dedicated_allocation.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DedicatedAllocationImageCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDedicatedAllocationBufferCreateInfoNV.html>"]
+pub struct DedicatedAllocationBufferCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub dedicated_allocation: Bool32,
+}
+impl ::std::default::Default for DedicatedAllocationBufferCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ dedicated_allocation: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DedicatedAllocationBufferCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV;
+}
+impl DedicatedAllocationBufferCreateInfoNV {
+ pub fn builder<'a>() -> DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
+ DedicatedAllocationBufferCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
+ inner: DedicatedAllocationBufferCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferCreateInfo for DedicatedAllocationBufferCreateInfoNVBuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for DedicatedAllocationBufferCreateInfoNV {}
+impl<'a> ::std::ops::Deref for DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
+ type Target = DedicatedAllocationBufferCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DedicatedAllocationBufferCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn dedicated_allocation(mut self, dedicated_allocation: bool) -> Self {
+ self.inner.dedicated_allocation = dedicated_allocation.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DedicatedAllocationBufferCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDedicatedAllocationMemoryAllocateInfoNV.html>"]
+pub struct DedicatedAllocationMemoryAllocateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+ pub buffer: Buffer,
+}
+impl ::std::default::Default for DedicatedAllocationMemoryAllocateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ buffer: Buffer::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DedicatedAllocationMemoryAllocateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV;
+}
+impl DedicatedAllocationMemoryAllocateInfoNV {
+ pub fn builder<'a>() -> DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
+ DedicatedAllocationMemoryAllocateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
+ inner: DedicatedAllocationMemoryAllocateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for DedicatedAllocationMemoryAllocateInfoNVBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for DedicatedAllocationMemoryAllocateInfoNV {}
+impl<'a> ::std::ops::Deref for DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
+ type Target = DedicatedAllocationMemoryAllocateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DedicatedAllocationMemoryAllocateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DedicatedAllocationMemoryAllocateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalImageFormatPropertiesNV.html>"]
+pub struct ExternalImageFormatPropertiesNV {
+ pub image_format_properties: ImageFormatProperties,
+ pub external_memory_features: ExternalMemoryFeatureFlagsNV,
+ pub export_from_imported_handle_types: ExternalMemoryHandleTypeFlagsNV,
+ pub compatible_handle_types: ExternalMemoryHandleTypeFlagsNV,
+}
+impl ExternalImageFormatPropertiesNV {
+ pub fn builder<'a>() -> ExternalImageFormatPropertiesNVBuilder<'a> {
+ ExternalImageFormatPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalImageFormatPropertiesNVBuilder<'a> {
+ inner: ExternalImageFormatPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ExternalImageFormatPropertiesNVBuilder<'a> {
+ type Target = ExternalImageFormatPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalImageFormatPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalImageFormatPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn image_format_properties(
+ mut self,
+ image_format_properties: ImageFormatProperties,
+ ) -> Self {
+ self.inner.image_format_properties = image_format_properties;
+ self
+ }
+ #[inline]
+ pub fn external_memory_features(
+ mut self,
+ external_memory_features: ExternalMemoryFeatureFlagsNV,
+ ) -> Self {
+ self.inner.external_memory_features = external_memory_features;
+ self
+ }
+ #[inline]
+ pub fn export_from_imported_handle_types(
+ mut self,
+ export_from_imported_handle_types: ExternalMemoryHandleTypeFlagsNV,
+ ) -> Self {
+ self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
+ self
+ }
+ #[inline]
+ pub fn compatible_handle_types(
+ mut self,
+ compatible_handle_types: ExternalMemoryHandleTypeFlagsNV,
+ ) -> Self {
+ self.inner.compatible_handle_types = compatible_handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalImageFormatPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryImageCreateInfoNV.html>"]
+pub struct ExternalMemoryImageCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalMemoryHandleTypeFlagsNV,
+}
+impl ::std::default::Default for ExternalMemoryImageCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalMemoryHandleTypeFlagsNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalMemoryImageCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV;
+}
+impl ExternalMemoryImageCreateInfoNV {
+ pub fn builder<'a>() -> ExternalMemoryImageCreateInfoNVBuilder<'a> {
+ ExternalMemoryImageCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalMemoryImageCreateInfoNVBuilder<'a> {
+ inner: ExternalMemoryImageCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoNVBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoNV {}
+impl<'a> ::std::ops::Deref for ExternalMemoryImageCreateInfoNVBuilder<'a> {
+ type Target = ExternalMemoryImageCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalMemoryImageCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalMemoryImageCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlagsNV) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalMemoryImageCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMemoryAllocateInfoNV.html>"]
+pub struct ExportMemoryAllocateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalMemoryHandleTypeFlagsNV,
+}
+impl ::std::default::Default for ExportMemoryAllocateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalMemoryHandleTypeFlagsNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMemoryAllocateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_ALLOCATE_INFO_NV;
+}
+impl ExportMemoryAllocateInfoNV {
+ pub fn builder<'a>() -> ExportMemoryAllocateInfoNVBuilder<'a> {
+ ExportMemoryAllocateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMemoryAllocateInfoNVBuilder<'a> {
+ inner: ExportMemoryAllocateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoNVBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoNV {}
+impl<'a> ::std::ops::Deref for ExportMemoryAllocateInfoNVBuilder<'a> {
+ type Target = ExportMemoryAllocateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMemoryAllocateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMemoryAllocateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlagsNV) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMemoryAllocateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMemoryWin32HandleInfoNV.html>"]
+pub struct ImportMemoryWin32HandleInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalMemoryHandleTypeFlagsNV,
+ pub handle: HANDLE,
+}
+impl ::std::default::Default for ImportMemoryWin32HandleInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalMemoryHandleTypeFlagsNV::default(),
+ handle: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMemoryWin32HandleInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_WIN32_HANDLE_INFO_NV;
+}
+impl ImportMemoryWin32HandleInfoNV {
+ pub fn builder<'a>() -> ImportMemoryWin32HandleInfoNVBuilder<'a> {
+ ImportMemoryWin32HandleInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMemoryWin32HandleInfoNVBuilder<'a> {
+ inner: ImportMemoryWin32HandleInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoNVBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoNV {}
+impl<'a> ::std::ops::Deref for ImportMemoryWin32HandleInfoNVBuilder<'a> {
+ type Target = ImportMemoryWin32HandleInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMemoryWin32HandleInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMemoryWin32HandleInfoNVBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlagsNV) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn handle(mut self, handle: HANDLE) -> Self {
+ self.inner.handle = handle;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMemoryWin32HandleInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMemoryWin32HandleInfoNV.html>"]
+pub struct ExportMemoryWin32HandleInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_attributes: *const SECURITY_ATTRIBUTES,
+ pub dw_access: DWORD,
+}
+impl ::std::default::Default for ExportMemoryWin32HandleInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_attributes: ::std::ptr::null(),
+ dw_access: DWORD::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMemoryWin32HandleInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_WIN32_HANDLE_INFO_NV;
+}
+impl ExportMemoryWin32HandleInfoNV {
+ pub fn builder<'a>() -> ExportMemoryWin32HandleInfoNVBuilder<'a> {
+ ExportMemoryWin32HandleInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMemoryWin32HandleInfoNVBuilder<'a> {
+ inner: ExportMemoryWin32HandleInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoNVBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoNV {}
+impl<'a> ::std::ops::Deref for ExportMemoryWin32HandleInfoNVBuilder<'a> {
+ type Target = ExportMemoryWin32HandleInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMemoryWin32HandleInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMemoryWin32HandleInfoNVBuilder<'a> {
+ #[inline]
+ pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self {
+ self.inner.p_attributes = attributes;
+ self
+ }
+ #[inline]
+ pub fn dw_access(mut self, dw_access: DWORD) -> Self {
+ self.inner.dw_access = dw_access;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMemoryWin32HandleInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWin32KeyedMutexAcquireReleaseInfoNV.html>"]
+pub struct Win32KeyedMutexAcquireReleaseInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acquire_count: u32,
+ pub p_acquire_syncs: *const DeviceMemory,
+ pub p_acquire_keys: *const u64,
+ pub p_acquire_timeout_milliseconds: *const u32,
+ pub release_count: u32,
+ pub p_release_syncs: *const DeviceMemory,
+ pub p_release_keys: *const u64,
+}
+impl ::std::default::Default for Win32KeyedMutexAcquireReleaseInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acquire_count: u32::default(),
+ p_acquire_syncs: ::std::ptr::null(),
+ p_acquire_keys: ::std::ptr::null(),
+ p_acquire_timeout_milliseconds: ::std::ptr::null(),
+ release_count: u32::default(),
+ p_release_syncs: ::std::ptr::null(),
+ p_release_keys: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for Win32KeyedMutexAcquireReleaseInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV;
+}
+impl Win32KeyedMutexAcquireReleaseInfoNV {
+ pub fn builder<'a>() -> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
+ Win32KeyedMutexAcquireReleaseInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
+ inner: Win32KeyedMutexAcquireReleaseInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoNV {}
+unsafe impl ExtendsSubmitInfo2 for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo2 for Win32KeyedMutexAcquireReleaseInfoNV {}
+impl<'a> ::std::ops::Deref for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
+ type Target = Win32KeyedMutexAcquireReleaseInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Win32KeyedMutexAcquireReleaseInfoNVBuilder<'a> {
+ #[inline]
+ pub fn acquire_syncs(mut self, acquire_syncs: &'a [DeviceMemory]) -> Self {
+ self.inner.acquire_count = acquire_syncs.len() as _;
+ self.inner.p_acquire_syncs = acquire_syncs.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn acquire_keys(mut self, acquire_keys: &'a [u64]) -> Self {
+ self.inner.acquire_count = acquire_keys.len() as _;
+ self.inner.p_acquire_keys = acquire_keys.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn acquire_timeout_milliseconds(mut self, acquire_timeout_milliseconds: &'a [u32]) -> Self {
+ self.inner.acquire_count = acquire_timeout_milliseconds.len() as _;
+ self.inner.p_acquire_timeout_milliseconds = acquire_timeout_milliseconds.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn release_syncs(mut self, release_syncs: &'a [DeviceMemory]) -> Self {
+ self.inner.release_count = release_syncs.len() as _;
+ self.inner.p_release_syncs = release_syncs.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn release_keys(mut self, release_keys: &'a [u64]) -> Self {
+ self.inner.release_count = release_keys.len() as _;
+ self.inner.p_release_keys = release_keys.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Win32KeyedMutexAcquireReleaseInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV.html>"]
+pub struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_generated_commands: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_generated_commands: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV;
+}
+impl PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'a> {
+ PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceDeviceGeneratedCommandsFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDeviceGeneratedCommandsFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn device_generated_commands(mut self, device_generated_commands: bool) -> Self {
+ self.inner.device_generated_commands = device_generated_commands.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDevicePrivateDataCreateInfo.html>"]
+pub struct DevicePrivateDataCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub private_data_slot_request_count: u32,
+}
+impl ::std::default::Default for DevicePrivateDataCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ private_data_slot_request_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DevicePrivateDataCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_PRIVATE_DATA_CREATE_INFO;
+}
+impl DevicePrivateDataCreateInfo {
+ pub fn builder<'a>() -> DevicePrivateDataCreateInfoBuilder<'a> {
+ DevicePrivateDataCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DevicePrivateDataCreateInfoBuilder<'a> {
+ inner: DevicePrivateDataCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceCreateInfo for DevicePrivateDataCreateInfoBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for DevicePrivateDataCreateInfo {}
+impl<'a> ::std::ops::Deref for DevicePrivateDataCreateInfoBuilder<'a> {
+ type Target = DevicePrivateDataCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DevicePrivateDataCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DevicePrivateDataCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn private_data_slot_request_count(mut self, private_data_slot_request_count: u32) -> Self {
+ self.inner.private_data_slot_request_count = private_data_slot_request_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DevicePrivateDataCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPrivateDataSlotCreateInfo.html>"]
+pub struct PrivateDataSlotCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PrivateDataSlotCreateFlags,
+}
+impl ::std::default::Default for PrivateDataSlotCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PrivateDataSlotCreateFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PrivateDataSlotCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PRIVATE_DATA_SLOT_CREATE_INFO;
+}
+impl PrivateDataSlotCreateInfo {
+ pub fn builder<'a>() -> PrivateDataSlotCreateInfoBuilder<'a> {
+ PrivateDataSlotCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PrivateDataSlotCreateInfoBuilder<'a> {
+ inner: PrivateDataSlotCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PrivateDataSlotCreateInfoBuilder<'a> {
+ type Target = PrivateDataSlotCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PrivateDataSlotCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PrivateDataSlotCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PrivateDataSlotCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PrivateDataSlotCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePrivateDataFeatures.html>"]
+pub struct PhysicalDevicePrivateDataFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub private_data: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePrivateDataFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ private_data: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePrivateDataFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES;
+}
+impl PhysicalDevicePrivateDataFeatures {
+ pub fn builder<'a>() -> PhysicalDevicePrivateDataFeaturesBuilder<'a> {
+ PhysicalDevicePrivateDataFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePrivateDataFeaturesBuilder<'a> {
+ inner: PhysicalDevicePrivateDataFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePrivateDataFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePrivateDataFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrivateDataFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrivateDataFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePrivateDataFeaturesBuilder<'a> {
+ type Target = PhysicalDevicePrivateDataFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePrivateDataFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePrivateDataFeaturesBuilder<'a> {
+ #[inline]
+ pub fn private_data(mut self, private_data: bool) -> Self {
+ self.inner.private_data = private_data.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePrivateDataFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV.html>"]
+pub struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_graphics_shader_group_count: u32,
+ pub max_indirect_sequence_count: u32,
+ pub max_indirect_commands_token_count: u32,
+ pub max_indirect_commands_stream_count: u32,
+ pub max_indirect_commands_token_offset: u32,
+ pub max_indirect_commands_stream_stride: u32,
+ pub min_sequences_count_buffer_offset_alignment: u32,
+ pub min_sequences_index_buffer_offset_alignment: u32,
+ pub min_indirect_commands_buffer_offset_alignment: u32,
+}
+impl ::std::default::Default for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_graphics_shader_group_count: u32::default(),
+ max_indirect_sequence_count: u32::default(),
+ max_indirect_commands_token_count: u32::default(),
+ max_indirect_commands_stream_count: u32::default(),
+ max_indirect_commands_token_offset: u32::default(),
+ max_indirect_commands_stream_stride: u32::default(),
+ min_sequences_count_buffer_offset_alignment: u32::default(),
+ min_sequences_index_buffer_offset_alignment: u32::default(),
+ min_indirect_commands_buffer_offset_alignment: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV;
+}
+impl PhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder<'a> {
+ PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceDeviceGeneratedCommandsPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDeviceGeneratedCommandsPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDeviceGeneratedCommandsPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn max_graphics_shader_group_count(mut self, max_graphics_shader_group_count: u32) -> Self {
+ self.inner.max_graphics_shader_group_count = max_graphics_shader_group_count;
+ self
+ }
+ #[inline]
+ pub fn max_indirect_sequence_count(mut self, max_indirect_sequence_count: u32) -> Self {
+ self.inner.max_indirect_sequence_count = max_indirect_sequence_count;
+ self
+ }
+ #[inline]
+ pub fn max_indirect_commands_token_count(
+ mut self,
+ max_indirect_commands_token_count: u32,
+ ) -> Self {
+ self.inner.max_indirect_commands_token_count = max_indirect_commands_token_count;
+ self
+ }
+ #[inline]
+ pub fn max_indirect_commands_stream_count(
+ mut self,
+ max_indirect_commands_stream_count: u32,
+ ) -> Self {
+ self.inner.max_indirect_commands_stream_count = max_indirect_commands_stream_count;
+ self
+ }
+ #[inline]
+ pub fn max_indirect_commands_token_offset(
+ mut self,
+ max_indirect_commands_token_offset: u32,
+ ) -> Self {
+ self.inner.max_indirect_commands_token_offset = max_indirect_commands_token_offset;
+ self
+ }
+ #[inline]
+ pub fn max_indirect_commands_stream_stride(
+ mut self,
+ max_indirect_commands_stream_stride: u32,
+ ) -> Self {
+ self.inner.max_indirect_commands_stream_stride = max_indirect_commands_stream_stride;
+ self
+ }
+ #[inline]
+ pub fn min_sequences_count_buffer_offset_alignment(
+ mut self,
+ min_sequences_count_buffer_offset_alignment: u32,
+ ) -> Self {
+ self.inner.min_sequences_count_buffer_offset_alignment =
+ min_sequences_count_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_sequences_index_buffer_offset_alignment(
+ mut self,
+ min_sequences_index_buffer_offset_alignment: u32,
+ ) -> Self {
+ self.inner.min_sequences_index_buffer_offset_alignment =
+ min_sequences_index_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_indirect_commands_buffer_offset_alignment(
+ mut self,
+ min_indirect_commands_buffer_offset_alignment: u32,
+ ) -> Self {
+ self.inner.min_indirect_commands_buffer_offset_alignment =
+ min_indirect_commands_buffer_offset_alignment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultiDrawPropertiesEXT.html>"]
+pub struct PhysicalDeviceMultiDrawPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_multi_draw_count: u32,
+}
+impl ::std::default::Default for PhysicalDeviceMultiDrawPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_multi_draw_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultiDrawPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT;
+}
+impl PhysicalDeviceMultiDrawPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMultiDrawPropertiesEXTBuilder<'a> {
+ PhysicalDeviceMultiDrawPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultiDrawPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceMultiDrawPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiDrawPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiDrawPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMultiDrawPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMultiDrawPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiDrawPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultiDrawPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_multi_draw_count(mut self, max_multi_draw_count: u32) -> Self {
+ self.inner.max_multi_draw_count = max_multi_draw_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultiDrawPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGraphicsShaderGroupCreateInfoNV.html>"]
+pub struct GraphicsShaderGroupCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub stage_count: u32,
+ pub p_stages: *const PipelineShaderStageCreateInfo,
+ pub p_vertex_input_state: *const PipelineVertexInputStateCreateInfo,
+ pub p_tessellation_state: *const PipelineTessellationStateCreateInfo,
+}
+impl ::std::default::Default for GraphicsShaderGroupCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ stage_count: u32::default(),
+ p_stages: ::std::ptr::null(),
+ p_vertex_input_state: ::std::ptr::null(),
+ p_tessellation_state: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GraphicsShaderGroupCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_SHADER_GROUP_CREATE_INFO_NV;
+}
+impl GraphicsShaderGroupCreateInfoNV {
+ pub fn builder<'a>() -> GraphicsShaderGroupCreateInfoNVBuilder<'a> {
+ GraphicsShaderGroupCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GraphicsShaderGroupCreateInfoNVBuilder<'a> {
+ inner: GraphicsShaderGroupCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GraphicsShaderGroupCreateInfoNVBuilder<'a> {
+ type Target = GraphicsShaderGroupCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GraphicsShaderGroupCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GraphicsShaderGroupCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo]) -> Self {
+ self.inner.stage_count = stages.len() as _;
+ self.inner.p_stages = stages.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn vertex_input_state(
+ mut self,
+ vertex_input_state: &'a PipelineVertexInputStateCreateInfo,
+ ) -> Self {
+ self.inner.p_vertex_input_state = vertex_input_state;
+ self
+ }
+ #[inline]
+ pub fn tessellation_state(
+ mut self,
+ tessellation_state: &'a PipelineTessellationStateCreateInfo,
+ ) -> Self {
+ self.inner.p_tessellation_state = tessellation_state;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GraphicsShaderGroupCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGraphicsPipelineShaderGroupsCreateInfoNV.html>"]
+pub struct GraphicsPipelineShaderGroupsCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub group_count: u32,
+ pub p_groups: *const GraphicsShaderGroupCreateInfoNV,
+ pub pipeline_count: u32,
+ pub p_pipelines: *const Pipeline,
+}
+impl ::std::default::Default for GraphicsPipelineShaderGroupsCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ group_count: u32::default(),
+ p_groups: ::std::ptr::null(),
+ pipeline_count: u32::default(),
+ p_pipelines: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GraphicsPipelineShaderGroupsCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV;
+}
+impl GraphicsPipelineShaderGroupsCreateInfoNV {
+ pub fn builder<'a>() -> GraphicsPipelineShaderGroupsCreateInfoNVBuilder<'a> {
+ GraphicsPipelineShaderGroupsCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GraphicsPipelineShaderGroupsCreateInfoNVBuilder<'a> {
+ inner: GraphicsPipelineShaderGroupsCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo
+ for GraphicsPipelineShaderGroupsCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for GraphicsPipelineShaderGroupsCreateInfoNV {}
+impl<'a> ::std::ops::Deref for GraphicsPipelineShaderGroupsCreateInfoNVBuilder<'a> {
+ type Target = GraphicsPipelineShaderGroupsCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GraphicsPipelineShaderGroupsCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GraphicsPipelineShaderGroupsCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn groups(mut self, groups: &'a [GraphicsShaderGroupCreateInfoNV]) -> Self {
+ self.inner.group_count = groups.len() as _;
+ self.inner.p_groups = groups.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn pipelines(mut self, pipelines: &'a [Pipeline]) -> Self {
+ self.inner.pipeline_count = pipelines.len() as _;
+ self.inner.p_pipelines = pipelines.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GraphicsPipelineShaderGroupsCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindShaderGroupIndirectCommandNV.html>"]
+pub struct BindShaderGroupIndirectCommandNV {
+ pub group_index: u32,
+}
+impl BindShaderGroupIndirectCommandNV {
+ pub fn builder<'a>() -> BindShaderGroupIndirectCommandNVBuilder<'a> {
+ BindShaderGroupIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindShaderGroupIndirectCommandNVBuilder<'a> {
+ inner: BindShaderGroupIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BindShaderGroupIndirectCommandNVBuilder<'a> {
+ type Target = BindShaderGroupIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindShaderGroupIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindShaderGroupIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn group_index(mut self, group_index: u32) -> Self {
+ self.inner.group_index = group_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindShaderGroupIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindIndexBufferIndirectCommandNV.html>"]
+pub struct BindIndexBufferIndirectCommandNV {
+ pub buffer_address: DeviceAddress,
+ pub size: u32,
+ pub index_type: IndexType,
+}
+impl BindIndexBufferIndirectCommandNV {
+ pub fn builder<'a>() -> BindIndexBufferIndirectCommandNVBuilder<'a> {
+ BindIndexBufferIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindIndexBufferIndirectCommandNVBuilder<'a> {
+ inner: BindIndexBufferIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BindIndexBufferIndirectCommandNVBuilder<'a> {
+ type Target = BindIndexBufferIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindIndexBufferIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindIndexBufferIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn buffer_address(mut self, buffer_address: DeviceAddress) -> Self {
+ self.inner.buffer_address = buffer_address;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: u32) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn index_type(mut self, index_type: IndexType) -> Self {
+ self.inner.index_type = index_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindIndexBufferIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindVertexBufferIndirectCommandNV.html>"]
+pub struct BindVertexBufferIndirectCommandNV {
+ pub buffer_address: DeviceAddress,
+ pub size: u32,
+ pub stride: u32,
+}
+impl BindVertexBufferIndirectCommandNV {
+ pub fn builder<'a>() -> BindVertexBufferIndirectCommandNVBuilder<'a> {
+ BindVertexBufferIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindVertexBufferIndirectCommandNVBuilder<'a> {
+ inner: BindVertexBufferIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BindVertexBufferIndirectCommandNVBuilder<'a> {
+ type Target = BindVertexBufferIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindVertexBufferIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindVertexBufferIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn buffer_address(mut self, buffer_address: DeviceAddress) -> Self {
+ self.inner.buffer_address = buffer_address;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: u32) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: u32) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindVertexBufferIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSetStateFlagsIndirectCommandNV.html>"]
+pub struct SetStateFlagsIndirectCommandNV {
+ pub data: u32,
+}
+impl SetStateFlagsIndirectCommandNV {
+ pub fn builder<'a>() -> SetStateFlagsIndirectCommandNVBuilder<'a> {
+ SetStateFlagsIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SetStateFlagsIndirectCommandNVBuilder<'a> {
+ inner: SetStateFlagsIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SetStateFlagsIndirectCommandNVBuilder<'a> {
+ type Target = SetStateFlagsIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SetStateFlagsIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SetStateFlagsIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn data(mut self, data: u32) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SetStateFlagsIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectCommandsStreamNV.html>"]
+pub struct IndirectCommandsStreamNV {
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+}
+impl IndirectCommandsStreamNV {
+ pub fn builder<'a>() -> IndirectCommandsStreamNVBuilder<'a> {
+ IndirectCommandsStreamNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct IndirectCommandsStreamNVBuilder<'a> {
+ inner: IndirectCommandsStreamNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for IndirectCommandsStreamNVBuilder<'a> {
+ type Target = IndirectCommandsStreamNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for IndirectCommandsStreamNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> IndirectCommandsStreamNVBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> IndirectCommandsStreamNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectCommandsLayoutTokenNV.html>"]
+pub struct IndirectCommandsLayoutTokenNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub token_type: IndirectCommandsTokenTypeNV,
+ pub stream: u32,
+ pub offset: u32,
+ pub vertex_binding_unit: u32,
+ pub vertex_dynamic_stride: Bool32,
+ pub pushconstant_pipeline_layout: PipelineLayout,
+ pub pushconstant_shader_stage_flags: ShaderStageFlags,
+ pub pushconstant_offset: u32,
+ pub pushconstant_size: u32,
+ pub indirect_state_flags: IndirectStateFlagsNV,
+ pub index_type_count: u32,
+ pub p_index_types: *const IndexType,
+ pub p_index_type_values: *const u32,
+}
+impl ::std::default::Default for IndirectCommandsLayoutTokenNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ token_type: IndirectCommandsTokenTypeNV::default(),
+ stream: u32::default(),
+ offset: u32::default(),
+ vertex_binding_unit: u32::default(),
+ vertex_dynamic_stride: Bool32::default(),
+ pushconstant_pipeline_layout: PipelineLayout::default(),
+ pushconstant_shader_stage_flags: ShaderStageFlags::default(),
+ pushconstant_offset: u32::default(),
+ pushconstant_size: u32::default(),
+ indirect_state_flags: IndirectStateFlagsNV::default(),
+ index_type_count: u32::default(),
+ p_index_types: ::std::ptr::null(),
+ p_index_type_values: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for IndirectCommandsLayoutTokenNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::INDIRECT_COMMANDS_LAYOUT_TOKEN_NV;
+}
+impl IndirectCommandsLayoutTokenNV {
+ pub fn builder<'a>() -> IndirectCommandsLayoutTokenNVBuilder<'a> {
+ IndirectCommandsLayoutTokenNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct IndirectCommandsLayoutTokenNVBuilder<'a> {
+ inner: IndirectCommandsLayoutTokenNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for IndirectCommandsLayoutTokenNVBuilder<'a> {
+ type Target = IndirectCommandsLayoutTokenNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for IndirectCommandsLayoutTokenNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> IndirectCommandsLayoutTokenNVBuilder<'a> {
+ #[inline]
+ pub fn token_type(mut self, token_type: IndirectCommandsTokenTypeNV) -> Self {
+ self.inner.token_type = token_type;
+ self
+ }
+ #[inline]
+ pub fn stream(mut self, stream: u32) -> Self {
+ self.inner.stream = stream;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: u32) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn vertex_binding_unit(mut self, vertex_binding_unit: u32) -> Self {
+ self.inner.vertex_binding_unit = vertex_binding_unit;
+ self
+ }
+ #[inline]
+ pub fn vertex_dynamic_stride(mut self, vertex_dynamic_stride: bool) -> Self {
+ self.inner.vertex_dynamic_stride = vertex_dynamic_stride.into();
+ self
+ }
+ #[inline]
+ pub fn pushconstant_pipeline_layout(
+ mut self,
+ pushconstant_pipeline_layout: PipelineLayout,
+ ) -> Self {
+ self.inner.pushconstant_pipeline_layout = pushconstant_pipeline_layout;
+ self
+ }
+ #[inline]
+ pub fn pushconstant_shader_stage_flags(
+ mut self,
+ pushconstant_shader_stage_flags: ShaderStageFlags,
+ ) -> Self {
+ self.inner.pushconstant_shader_stage_flags = pushconstant_shader_stage_flags;
+ self
+ }
+ #[inline]
+ pub fn pushconstant_offset(mut self, pushconstant_offset: u32) -> Self {
+ self.inner.pushconstant_offset = pushconstant_offset;
+ self
+ }
+ #[inline]
+ pub fn pushconstant_size(mut self, pushconstant_size: u32) -> Self {
+ self.inner.pushconstant_size = pushconstant_size;
+ self
+ }
+ #[inline]
+ pub fn indirect_state_flags(mut self, indirect_state_flags: IndirectStateFlagsNV) -> Self {
+ self.inner.indirect_state_flags = indirect_state_flags;
+ self
+ }
+ #[inline]
+ pub fn index_types(mut self, index_types: &'a [IndexType]) -> Self {
+ self.inner.index_type_count = index_types.len() as _;
+ self.inner.p_index_types = index_types.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn index_type_values(mut self, index_type_values: &'a [u32]) -> Self {
+ self.inner.index_type_count = index_type_values.len() as _;
+ self.inner.p_index_type_values = index_type_values.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> IndirectCommandsLayoutTokenNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectCommandsLayoutCreateInfoNV.html>"]
+pub struct IndirectCommandsLayoutCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: IndirectCommandsLayoutUsageFlagsNV,
+ pub pipeline_bind_point: PipelineBindPoint,
+ pub token_count: u32,
+ pub p_tokens: *const IndirectCommandsLayoutTokenNV,
+ pub stream_count: u32,
+ pub p_stream_strides: *const u32,
+}
+impl ::std::default::Default for IndirectCommandsLayoutCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: IndirectCommandsLayoutUsageFlagsNV::default(),
+ pipeline_bind_point: PipelineBindPoint::default(),
+ token_count: u32::default(),
+ p_tokens: ::std::ptr::null(),
+ stream_count: u32::default(),
+ p_stream_strides: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for IndirectCommandsLayoutCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV;
+}
+impl IndirectCommandsLayoutCreateInfoNV {
+ pub fn builder<'a>() -> IndirectCommandsLayoutCreateInfoNVBuilder<'a> {
+ IndirectCommandsLayoutCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct IndirectCommandsLayoutCreateInfoNVBuilder<'a> {
+ inner: IndirectCommandsLayoutCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for IndirectCommandsLayoutCreateInfoNVBuilder<'a> {
+ type Target = IndirectCommandsLayoutCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for IndirectCommandsLayoutCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> IndirectCommandsLayoutCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: IndirectCommandsLayoutUsageFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self {
+ self.inner.pipeline_bind_point = pipeline_bind_point;
+ self
+ }
+ #[inline]
+ pub fn tokens(mut self, tokens: &'a [IndirectCommandsLayoutTokenNV]) -> Self {
+ self.inner.token_count = tokens.len() as _;
+ self.inner.p_tokens = tokens.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn stream_strides(mut self, stream_strides: &'a [u32]) -> Self {
+ self.inner.stream_count = stream_strides.len() as _;
+ self.inner.p_stream_strides = stream_strides.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> IndirectCommandsLayoutCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeneratedCommandsInfoNV.html>"]
+pub struct GeneratedCommandsInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub pipeline_bind_point: PipelineBindPoint,
+ pub pipeline: Pipeline,
+ pub indirect_commands_layout: IndirectCommandsLayoutNV,
+ pub stream_count: u32,
+ pub p_streams: *const IndirectCommandsStreamNV,
+ pub sequences_count: u32,
+ pub preprocess_buffer: Buffer,
+ pub preprocess_offset: DeviceSize,
+ pub preprocess_size: DeviceSize,
+ pub sequences_count_buffer: Buffer,
+ pub sequences_count_offset: DeviceSize,
+ pub sequences_index_buffer: Buffer,
+ pub sequences_index_offset: DeviceSize,
+}
+impl ::std::default::Default for GeneratedCommandsInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ pipeline_bind_point: PipelineBindPoint::default(),
+ pipeline: Pipeline::default(),
+ indirect_commands_layout: IndirectCommandsLayoutNV::default(),
+ stream_count: u32::default(),
+ p_streams: ::std::ptr::null(),
+ sequences_count: u32::default(),
+ preprocess_buffer: Buffer::default(),
+ preprocess_offset: DeviceSize::default(),
+ preprocess_size: DeviceSize::default(),
+ sequences_count_buffer: Buffer::default(),
+ sequences_count_offset: DeviceSize::default(),
+ sequences_index_buffer: Buffer::default(),
+ sequences_index_offset: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GeneratedCommandsInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::GENERATED_COMMANDS_INFO_NV;
+}
+impl GeneratedCommandsInfoNV {
+ pub fn builder<'a>() -> GeneratedCommandsInfoNVBuilder<'a> {
+ GeneratedCommandsInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GeneratedCommandsInfoNVBuilder<'a> {
+ inner: GeneratedCommandsInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GeneratedCommandsInfoNVBuilder<'a> {
+ type Target = GeneratedCommandsInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GeneratedCommandsInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GeneratedCommandsInfoNVBuilder<'a> {
+ #[inline]
+ pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self {
+ self.inner.pipeline_bind_point = pipeline_bind_point;
+ self
+ }
+ #[inline]
+ pub fn pipeline(mut self, pipeline: Pipeline) -> Self {
+ self.inner.pipeline = pipeline;
+ self
+ }
+ #[inline]
+ pub fn indirect_commands_layout(
+ mut self,
+ indirect_commands_layout: IndirectCommandsLayoutNV,
+ ) -> Self {
+ self.inner.indirect_commands_layout = indirect_commands_layout;
+ self
+ }
+ #[inline]
+ pub fn streams(mut self, streams: &'a [IndirectCommandsStreamNV]) -> Self {
+ self.inner.stream_count = streams.len() as _;
+ self.inner.p_streams = streams.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn sequences_count(mut self, sequences_count: u32) -> Self {
+ self.inner.sequences_count = sequences_count;
+ self
+ }
+ #[inline]
+ pub fn preprocess_buffer(mut self, preprocess_buffer: Buffer) -> Self {
+ self.inner.preprocess_buffer = preprocess_buffer;
+ self
+ }
+ #[inline]
+ pub fn preprocess_offset(mut self, preprocess_offset: DeviceSize) -> Self {
+ self.inner.preprocess_offset = preprocess_offset;
+ self
+ }
+ #[inline]
+ pub fn preprocess_size(mut self, preprocess_size: DeviceSize) -> Self {
+ self.inner.preprocess_size = preprocess_size;
+ self
+ }
+ #[inline]
+ pub fn sequences_count_buffer(mut self, sequences_count_buffer: Buffer) -> Self {
+ self.inner.sequences_count_buffer = sequences_count_buffer;
+ self
+ }
+ #[inline]
+ pub fn sequences_count_offset(mut self, sequences_count_offset: DeviceSize) -> Self {
+ self.inner.sequences_count_offset = sequences_count_offset;
+ self
+ }
+ #[inline]
+ pub fn sequences_index_buffer(mut self, sequences_index_buffer: Buffer) -> Self {
+ self.inner.sequences_index_buffer = sequences_index_buffer;
+ self
+ }
+ #[inline]
+ pub fn sequences_index_offset(mut self, sequences_index_offset: DeviceSize) -> Self {
+ self.inner.sequences_index_offset = sequences_index_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GeneratedCommandsInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeneratedCommandsMemoryRequirementsInfoNV.html>"]
+pub struct GeneratedCommandsMemoryRequirementsInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub pipeline_bind_point: PipelineBindPoint,
+ pub pipeline: Pipeline,
+ pub indirect_commands_layout: IndirectCommandsLayoutNV,
+ pub max_sequences_count: u32,
+}
+impl ::std::default::Default for GeneratedCommandsMemoryRequirementsInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ pipeline_bind_point: PipelineBindPoint::default(),
+ pipeline: Pipeline::default(),
+ indirect_commands_layout: IndirectCommandsLayoutNV::default(),
+ max_sequences_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GeneratedCommandsMemoryRequirementsInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV;
+}
+impl GeneratedCommandsMemoryRequirementsInfoNV {
+ pub fn builder<'a>() -> GeneratedCommandsMemoryRequirementsInfoNVBuilder<'a> {
+ GeneratedCommandsMemoryRequirementsInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GeneratedCommandsMemoryRequirementsInfoNVBuilder<'a> {
+ inner: GeneratedCommandsMemoryRequirementsInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GeneratedCommandsMemoryRequirementsInfoNVBuilder<'a> {
+ type Target = GeneratedCommandsMemoryRequirementsInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GeneratedCommandsMemoryRequirementsInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GeneratedCommandsMemoryRequirementsInfoNVBuilder<'a> {
+ #[inline]
+ pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self {
+ self.inner.pipeline_bind_point = pipeline_bind_point;
+ self
+ }
+ #[inline]
+ pub fn pipeline(mut self, pipeline: Pipeline) -> Self {
+ self.inner.pipeline = pipeline;
+ self
+ }
+ #[inline]
+ pub fn indirect_commands_layout(
+ mut self,
+ indirect_commands_layout: IndirectCommandsLayoutNV,
+ ) -> Self {
+ self.inner.indirect_commands_layout = indirect_commands_layout;
+ self
+ }
+ #[inline]
+ pub fn max_sequences_count(mut self, max_sequences_count: u32) -> Self {
+ self.inner.max_sequences_count = max_sequences_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GeneratedCommandsMemoryRequirementsInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFeatures2.html>"]
+pub struct PhysicalDeviceFeatures2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub features: PhysicalDeviceFeatures,
+}
+impl ::std::default::Default for PhysicalDeviceFeatures2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ features: PhysicalDeviceFeatures::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFeatures2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FEATURES_2;
+}
+impl PhysicalDeviceFeatures2 {
+ pub fn builder<'a>() -> PhysicalDeviceFeatures2Builder<'a> {
+ PhysicalDeviceFeatures2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFeatures2Builder<'a> {
+ inner: PhysicalDeviceFeatures2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFeatures2Builder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFeatures2 {}
+pub unsafe trait ExtendsPhysicalDeviceFeatures2 {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFeatures2Builder<'a> {
+ type Target = PhysicalDeviceFeatures2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFeatures2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFeatures2Builder<'a> {
+ #[inline]
+ pub fn features(mut self, features: PhysicalDeviceFeatures) -> Self {
+ self.inner.features = features;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceFeatures2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFeatures2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceProperties2.html>"]
+pub struct PhysicalDeviceProperties2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub properties: PhysicalDeviceProperties,
+}
+impl ::std::default::Default for PhysicalDeviceProperties2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ properties: PhysicalDeviceProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceProperties2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROPERTIES_2;
+}
+impl PhysicalDeviceProperties2 {
+ pub fn builder<'a>() -> PhysicalDeviceProperties2Builder<'a> {
+ PhysicalDeviceProperties2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceProperties2Builder<'a> {
+ inner: PhysicalDeviceProperties2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceProperties2 {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceProperties2Builder<'a> {
+ type Target = PhysicalDeviceProperties2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceProperties2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceProperties2Builder<'a> {
+ #[inline]
+ pub fn properties(mut self, properties: PhysicalDeviceProperties) -> Self {
+ self.inner.properties = properties;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceProperties2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceProperties2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFormatProperties2.html>"]
+pub struct FormatProperties2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub format_properties: FormatProperties,
+}
+impl ::std::default::Default for FormatProperties2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ format_properties: FormatProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FormatProperties2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::FORMAT_PROPERTIES_2;
+}
+impl FormatProperties2 {
+ pub fn builder<'a>() -> FormatProperties2Builder<'a> {
+ FormatProperties2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FormatProperties2Builder<'a> {
+ inner: FormatProperties2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsFormatProperties2 {}
+impl<'a> ::std::ops::Deref for FormatProperties2Builder<'a> {
+ type Target = FormatProperties2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FormatProperties2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FormatProperties2Builder<'a> {
+ #[inline]
+ pub fn format_properties(mut self, format_properties: FormatProperties) -> Self {
+ self.inner.format_properties = format_properties;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsFormatProperties2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FormatProperties2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageFormatProperties2.html>"]
+pub struct ImageFormatProperties2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_format_properties: ImageFormatProperties,
+}
+impl ::std::default::Default for ImageFormatProperties2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_format_properties: ImageFormatProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageFormatProperties2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_FORMAT_PROPERTIES_2;
+}
+impl ImageFormatProperties2 {
+ pub fn builder<'a>() -> ImageFormatProperties2Builder<'a> {
+ ImageFormatProperties2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageFormatProperties2Builder<'a> {
+ inner: ImageFormatProperties2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageFormatProperties2 {}
+impl<'a> ::std::ops::Deref for ImageFormatProperties2Builder<'a> {
+ type Target = ImageFormatProperties2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageFormatProperties2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageFormatProperties2Builder<'a> {
+ #[inline]
+ pub fn image_format_properties(
+ mut self,
+ image_format_properties: ImageFormatProperties,
+ ) -> Self {
+ self.inner.image_format_properties = image_format_properties;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageFormatProperties2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageFormatProperties2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageFormatInfo2.html>"]
+pub struct PhysicalDeviceImageFormatInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub format: Format,
+ pub ty: ImageType,
+ pub tiling: ImageTiling,
+ pub usage: ImageUsageFlags,
+ pub flags: ImageCreateFlags,
+}
+impl ::std::default::Default for PhysicalDeviceImageFormatInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ format: Format::default(),
+ ty: ImageType::default(),
+ tiling: ImageTiling::default(),
+ usage: ImageUsageFlags::default(),
+ flags: ImageCreateFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageFormatInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+}
+impl PhysicalDeviceImageFormatInfo2 {
+ pub fn builder<'a>() -> PhysicalDeviceImageFormatInfo2Builder<'a> {
+ PhysicalDeviceImageFormatInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageFormatInfo2Builder<'a> {
+ inner: PhysicalDeviceImageFormatInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceImageFormatInfo2 {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageFormatInfo2Builder<'a> {
+ type Target = PhysicalDeviceImageFormatInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageFormatInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageFormatInfo2Builder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn ty(mut self, ty: ImageType) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn tiling(mut self, tiling: ImageTiling) -> Self {
+ self.inner.tiling = tiling;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: ImageUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: ImageCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceImageFormatInfo2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageFormatInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyProperties2.html>"]
+pub struct QueueFamilyProperties2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub queue_family_properties: QueueFamilyProperties,
+}
+impl ::std::default::Default for QueueFamilyProperties2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ queue_family_properties: QueueFamilyProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueueFamilyProperties2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_PROPERTIES_2;
+}
+impl QueueFamilyProperties2 {
+ pub fn builder<'a>() -> QueueFamilyProperties2Builder<'a> {
+ QueueFamilyProperties2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyProperties2Builder<'a> {
+ inner: QueueFamilyProperties2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsQueueFamilyProperties2 {}
+impl<'a> ::std::ops::Deref for QueueFamilyProperties2Builder<'a> {
+ type Target = QueueFamilyProperties2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyProperties2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyProperties2Builder<'a> {
+ #[inline]
+ pub fn queue_family_properties(
+ mut self,
+ queue_family_properties: QueueFamilyProperties,
+ ) -> Self {
+ self.inner.queue_family_properties = queue_family_properties;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsQueueFamilyProperties2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyProperties2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryProperties2.html>"]
+pub struct PhysicalDeviceMemoryProperties2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_properties: PhysicalDeviceMemoryProperties,
+}
+impl ::std::default::Default for PhysicalDeviceMemoryProperties2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_properties: PhysicalDeviceMemoryProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMemoryProperties2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
+}
+impl PhysicalDeviceMemoryProperties2 {
+ pub fn builder<'a>() -> PhysicalDeviceMemoryProperties2Builder<'a> {
+ PhysicalDeviceMemoryProperties2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMemoryProperties2Builder<'a> {
+ inner: PhysicalDeviceMemoryProperties2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceMemoryProperties2 {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryProperties2Builder<'a> {
+ type Target = PhysicalDeviceMemoryProperties2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryProperties2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMemoryProperties2Builder<'a> {
+ #[inline]
+ pub fn memory_properties(mut self, memory_properties: PhysicalDeviceMemoryProperties) -> Self {
+ self.inner.memory_properties = memory_properties;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceMemoryProperties2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMemoryProperties2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageFormatProperties2.html>"]
+pub struct SparseImageFormatProperties2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub properties: SparseImageFormatProperties,
+}
+impl ::std::default::Default for SparseImageFormatProperties2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ properties: SparseImageFormatProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SparseImageFormatProperties2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::SPARSE_IMAGE_FORMAT_PROPERTIES_2;
+}
+impl SparseImageFormatProperties2 {
+ pub fn builder<'a>() -> SparseImageFormatProperties2Builder<'a> {
+ SparseImageFormatProperties2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageFormatProperties2Builder<'a> {
+ inner: SparseImageFormatProperties2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageFormatProperties2Builder<'a> {
+ type Target = SparseImageFormatProperties2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageFormatProperties2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageFormatProperties2Builder<'a> {
+ #[inline]
+ pub fn properties(mut self, properties: SparseImageFormatProperties) -> Self {
+ self.inner.properties = properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageFormatProperties2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSparseImageFormatInfo2.html>"]
+pub struct PhysicalDeviceSparseImageFormatInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub format: Format,
+ pub ty: ImageType,
+ pub samples: SampleCountFlags,
+ pub usage: ImageUsageFlags,
+ pub tiling: ImageTiling,
+}
+impl ::std::default::Default for PhysicalDeviceSparseImageFormatInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ format: Format::default(),
+ ty: ImageType::default(),
+ samples: SampleCountFlags::default(),
+ usage: ImageUsageFlags::default(),
+ tiling: ImageTiling::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSparseImageFormatInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2;
+}
+impl PhysicalDeviceSparseImageFormatInfo2 {
+ pub fn builder<'a>() -> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
+ PhysicalDeviceSparseImageFormatInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
+ inner: PhysicalDeviceSparseImageFormatInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
+ type Target = PhysicalDeviceSparseImageFormatInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSparseImageFormatInfo2Builder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn ty(mut self, ty: ImageType) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn samples(mut self, samples: SampleCountFlags) -> Self {
+ self.inner.samples = samples;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: ImageUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn tiling(mut self, tiling: ImageTiling) -> Self {
+ self.inner.tiling = tiling;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSparseImageFormatInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePushDescriptorPropertiesKHR.html>"]
+pub struct PhysicalDevicePushDescriptorPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_push_descriptors: u32,
+}
+impl ::std::default::Default for PhysicalDevicePushDescriptorPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_push_descriptors: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePushDescriptorPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR;
+}
+impl PhysicalDevicePushDescriptorPropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
+ PhysicalDevicePushDescriptorPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
+ inner: PhysicalDevicePushDescriptorPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDevicePushDescriptorPropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePushDescriptorPropertiesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
+ type Target = PhysicalDevicePushDescriptorPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePushDescriptorPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn max_push_descriptors(mut self, max_push_descriptors: u32) -> Self {
+ self.inner.max_push_descriptors = max_push_descriptors;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePushDescriptorPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkConformanceVersion.html>"]
+pub struct ConformanceVersion {
+ pub major: u8,
+ pub minor: u8,
+ pub subminor: u8,
+ pub patch: u8,
+}
+impl ConformanceVersion {
+ pub fn builder<'a>() -> ConformanceVersionBuilder<'a> {
+ ConformanceVersionBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ConformanceVersionBuilder<'a> {
+ inner: ConformanceVersion,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ConformanceVersionBuilder<'a> {
+ type Target = ConformanceVersion;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ConformanceVersionBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ConformanceVersionBuilder<'a> {
+ #[inline]
+ pub fn major(mut self, major: u8) -> Self {
+ self.inner.major = major;
+ self
+ }
+ #[inline]
+ pub fn minor(mut self, minor: u8) -> Self {
+ self.inner.minor = minor;
+ self
+ }
+ #[inline]
+ pub fn subminor(mut self, subminor: u8) -> Self {
+ self.inner.subminor = subminor;
+ self
+ }
+ #[inline]
+ pub fn patch(mut self, patch: u8) -> Self {
+ self.inner.patch = patch;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ConformanceVersion {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDriverProperties.html>"]
+pub struct PhysicalDeviceDriverProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub driver_id: DriverId,
+ pub driver_name: [c_char; MAX_DRIVER_NAME_SIZE],
+ pub driver_info: [c_char; MAX_DRIVER_INFO_SIZE],
+ pub conformance_version: ConformanceVersion,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PhysicalDeviceDriverProperties {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PhysicalDeviceDriverProperties")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("driver_id", &self.driver_id)
+ .field("driver_name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.driver_name.as_ptr())
+ })
+ .field("driver_info", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.driver_info.as_ptr())
+ })
+ .field("conformance_version", &self.conformance_version)
+ .finish()
+ }
+}
+impl ::std::default::Default for PhysicalDeviceDriverProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ driver_id: DriverId::default(),
+ driver_name: unsafe { ::std::mem::zeroed() },
+ driver_info: unsafe { ::std::mem::zeroed() },
+ conformance_version: ConformanceVersion::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDriverProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DRIVER_PROPERTIES;
+}
+impl PhysicalDeviceDriverProperties {
+ pub fn builder<'a>() -> PhysicalDeviceDriverPropertiesBuilder<'a> {
+ PhysicalDeviceDriverPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDriverPropertiesBuilder<'a> {
+ inner: PhysicalDeviceDriverProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDriverPropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDriverProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDriverPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceDriverProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDriverPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDriverPropertiesBuilder<'a> {
+ #[inline]
+ pub fn driver_id(mut self, driver_id: DriverId) -> Self {
+ self.inner.driver_id = driver_id;
+ self
+ }
+ #[inline]
+ pub fn driver_name(mut self, driver_name: [c_char; MAX_DRIVER_NAME_SIZE]) -> Self {
+ self.inner.driver_name = driver_name;
+ self
+ }
+ #[inline]
+ pub fn driver_info(mut self, driver_info: [c_char; MAX_DRIVER_INFO_SIZE]) -> Self {
+ self.inner.driver_info = driver_info;
+ self
+ }
+ #[inline]
+ pub fn conformance_version(mut self, conformance_version: ConformanceVersion) -> Self {
+ self.inner.conformance_version = conformance_version;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDriverProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentRegionsKHR.html>"]
+pub struct PresentRegionsKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain_count: u32,
+ pub p_regions: *const PresentRegionKHR,
+}
+impl ::std::default::Default for PresentRegionsKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PresentRegionsKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_REGIONS_KHR;
+}
+impl PresentRegionsKHR {
+ pub fn builder<'a>() -> PresentRegionsKHRBuilder<'a> {
+ PresentRegionsKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentRegionsKHRBuilder<'a> {
+ inner: PresentRegionsKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for PresentRegionsKHRBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for PresentRegionsKHR {}
+impl<'a> ::std::ops::Deref for PresentRegionsKHRBuilder<'a> {
+ type Target = PresentRegionsKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentRegionsKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentRegionsKHRBuilder<'a> {
+ #[inline]
+ pub fn regions(mut self, regions: &'a [PresentRegionKHR]) -> Self {
+ self.inner.swapchain_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentRegionsKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentRegionKHR.html>"]
+pub struct PresentRegionKHR {
+ pub rectangle_count: u32,
+ pub p_rectangles: *const RectLayerKHR,
+}
+impl ::std::default::Default for PresentRegionKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ rectangle_count: u32::default(),
+ p_rectangles: ::std::ptr::null(),
+ }
+ }
+}
+impl PresentRegionKHR {
+ pub fn builder<'a>() -> PresentRegionKHRBuilder<'a> {
+ PresentRegionKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentRegionKHRBuilder<'a> {
+ inner: PresentRegionKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PresentRegionKHRBuilder<'a> {
+ type Target = PresentRegionKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentRegionKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentRegionKHRBuilder<'a> {
+ #[inline]
+ pub fn rectangles(mut self, rectangles: &'a [RectLayerKHR]) -> Self {
+ self.inner.rectangle_count = rectangles.len() as _;
+ self.inner.p_rectangles = rectangles.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentRegionKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRectLayerKHR.html>"]
+pub struct RectLayerKHR {
+ pub offset: Offset2D,
+ pub extent: Extent2D,
+ pub layer: u32,
+}
+impl RectLayerKHR {
+ pub fn builder<'a>() -> RectLayerKHRBuilder<'a> {
+ RectLayerKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RectLayerKHRBuilder<'a> {
+ inner: RectLayerKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RectLayerKHRBuilder<'a> {
+ type Target = RectLayerKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RectLayerKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RectLayerKHRBuilder<'a> {
+ #[inline]
+ pub fn offset(mut self, offset: Offset2D) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent2D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[inline]
+ pub fn layer(mut self, layer: u32) -> Self {
+ self.inner.layer = layer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RectLayerKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVariablePointersFeatures.html>"]
+pub struct PhysicalDeviceVariablePointersFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub variable_pointers_storage_buffer: Bool32,
+ pub variable_pointers: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVariablePointersFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ variable_pointers_storage_buffer: Bool32::default(),
+ variable_pointers: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVariablePointersFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
+}
+impl PhysicalDeviceVariablePointersFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceVariablePointersFeaturesBuilder<'a> {
+ PhysicalDeviceVariablePointersFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVariablePointersFeaturesBuilder<'a> {
+ inner: PhysicalDeviceVariablePointersFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVariablePointersFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVariablePointersFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVariablePointersFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVariablePointersFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVariablePointersFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceVariablePointersFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVariablePointersFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVariablePointersFeaturesBuilder<'a> {
+ #[inline]
+ pub fn variable_pointers_storage_buffer(
+ mut self,
+ variable_pointers_storage_buffer: bool,
+ ) -> Self {
+ self.inner.variable_pointers_storage_buffer = variable_pointers_storage_buffer.into();
+ self
+ }
+ #[inline]
+ pub fn variable_pointers(mut self, variable_pointers: bool) -> Self {
+ self.inner.variable_pointers = variable_pointers.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVariablePointersFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryProperties.html>"]
+pub struct ExternalMemoryProperties {
+ pub external_memory_features: ExternalMemoryFeatureFlags,
+ pub export_from_imported_handle_types: ExternalMemoryHandleTypeFlags,
+ pub compatible_handle_types: ExternalMemoryHandleTypeFlags,
+}
+impl ExternalMemoryProperties {
+ pub fn builder<'a>() -> ExternalMemoryPropertiesBuilder<'a> {
+ ExternalMemoryPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalMemoryPropertiesBuilder<'a> {
+ inner: ExternalMemoryProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ExternalMemoryPropertiesBuilder<'a> {
+ type Target = ExternalMemoryProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalMemoryPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalMemoryPropertiesBuilder<'a> {
+ #[inline]
+ pub fn external_memory_features(
+ mut self,
+ external_memory_features: ExternalMemoryFeatureFlags,
+ ) -> Self {
+ self.inner.external_memory_features = external_memory_features;
+ self
+ }
+ #[inline]
+ pub fn export_from_imported_handle_types(
+ mut self,
+ export_from_imported_handle_types: ExternalMemoryHandleTypeFlags,
+ ) -> Self {
+ self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
+ self
+ }
+ #[inline]
+ pub fn compatible_handle_types(
+ mut self,
+ compatible_handle_types: ExternalMemoryHandleTypeFlags,
+ ) -> Self {
+ self.inner.compatible_handle_types = compatible_handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalMemoryProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExternalImageFormatInfo.html>"]
+pub struct PhysicalDeviceExternalImageFormatInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for PhysicalDeviceExternalImageFormatInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExternalImageFormatInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+}
+impl PhysicalDeviceExternalImageFormatInfo {
+ pub fn builder<'a>() -> PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
+ PhysicalDeviceExternalImageFormatInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
+ inner: PhysicalDeviceExternalImageFormatInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2
+ for PhysicalDeviceExternalImageFormatInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceExternalImageFormatInfo {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
+ type Target = PhysicalDeviceExternalImageFormatInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExternalImageFormatInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExternalImageFormatInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalImageFormatProperties.html>"]
+pub struct ExternalImageFormatProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub external_memory_properties: ExternalMemoryProperties,
+}
+impl ::std::default::Default for ExternalImageFormatProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ external_memory_properties: ExternalMemoryProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalImageFormatProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_IMAGE_FORMAT_PROPERTIES;
+}
+impl ExternalImageFormatProperties {
+ pub fn builder<'a>() -> ExternalImageFormatPropertiesBuilder<'a> {
+ ExternalImageFormatPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalImageFormatPropertiesBuilder<'a> {
+ inner: ExternalImageFormatProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageFormatProperties2 for ExternalImageFormatPropertiesBuilder<'_> {}
+unsafe impl ExtendsImageFormatProperties2 for ExternalImageFormatProperties {}
+impl<'a> ::std::ops::Deref for ExternalImageFormatPropertiesBuilder<'a> {
+ type Target = ExternalImageFormatProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalImageFormatPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalImageFormatPropertiesBuilder<'a> {
+ #[inline]
+ pub fn external_memory_properties(
+ mut self,
+ external_memory_properties: ExternalMemoryProperties,
+ ) -> Self {
+ self.inner.external_memory_properties = external_memory_properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalImageFormatProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExternalBufferInfo.html>"]
+pub struct PhysicalDeviceExternalBufferInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: BufferCreateFlags,
+ pub usage: BufferUsageFlags,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for PhysicalDeviceExternalBufferInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: BufferCreateFlags::default(),
+ usage: BufferUsageFlags::default(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExternalBufferInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
+}
+impl PhysicalDeviceExternalBufferInfo {
+ pub fn builder<'a>() -> PhysicalDeviceExternalBufferInfoBuilder<'a> {
+ PhysicalDeviceExternalBufferInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExternalBufferInfoBuilder<'a> {
+ inner: PhysicalDeviceExternalBufferInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExternalBufferInfoBuilder<'a> {
+ type Target = PhysicalDeviceExternalBufferInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalBufferInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExternalBufferInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: BufferCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: BufferUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExternalBufferInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalBufferProperties.html>"]
+pub struct ExternalBufferProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub external_memory_properties: ExternalMemoryProperties,
+}
+impl ::std::default::Default for ExternalBufferProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ external_memory_properties: ExternalMemoryProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalBufferProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_BUFFER_PROPERTIES;
+}
+impl ExternalBufferProperties {
+ pub fn builder<'a>() -> ExternalBufferPropertiesBuilder<'a> {
+ ExternalBufferPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalBufferPropertiesBuilder<'a> {
+ inner: ExternalBufferProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ExternalBufferPropertiesBuilder<'a> {
+ type Target = ExternalBufferProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalBufferPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalBufferPropertiesBuilder<'a> {
+ #[inline]
+ pub fn external_memory_properties(
+ mut self,
+ external_memory_properties: ExternalMemoryProperties,
+ ) -> Self {
+ self.inner.external_memory_properties = external_memory_properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalBufferProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceIDProperties.html>"]
+pub struct PhysicalDeviceIDProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_uuid: [u8; UUID_SIZE],
+ pub driver_uuid: [u8; UUID_SIZE],
+ pub device_luid: [u8; LUID_SIZE],
+ pub device_node_mask: u32,
+ pub device_luid_valid: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceIDProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_uuid: unsafe { ::std::mem::zeroed() },
+ driver_uuid: unsafe { ::std::mem::zeroed() },
+ device_luid: unsafe { ::std::mem::zeroed() },
+ device_node_mask: u32::default(),
+ device_luid_valid: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceIDProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ID_PROPERTIES;
+}
+impl PhysicalDeviceIDProperties {
+ pub fn builder<'a>() -> PhysicalDeviceIDPropertiesBuilder<'a> {
+ PhysicalDeviceIDPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceIDPropertiesBuilder<'a> {
+ inner: PhysicalDeviceIDProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceIDPropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceIDProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceIDPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceIDProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceIDPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceIDPropertiesBuilder<'a> {
+ #[inline]
+ pub fn device_uuid(mut self, device_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.device_uuid = device_uuid;
+ self
+ }
+ #[inline]
+ pub fn driver_uuid(mut self, driver_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.driver_uuid = driver_uuid;
+ self
+ }
+ #[inline]
+ pub fn device_luid(mut self, device_luid: [u8; LUID_SIZE]) -> Self {
+ self.inner.device_luid = device_luid;
+ self
+ }
+ #[inline]
+ pub fn device_node_mask(mut self, device_node_mask: u32) -> Self {
+ self.inner.device_node_mask = device_node_mask;
+ self
+ }
+ #[inline]
+ pub fn device_luid_valid(mut self, device_luid_valid: bool) -> Self {
+ self.inner.device_luid_valid = device_luid_valid.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceIDProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryImageCreateInfo.html>"]
+pub struct ExternalMemoryImageCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for ExternalMemoryImageCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalMemoryImageCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+}
+impl ExternalMemoryImageCreateInfo {
+ pub fn builder<'a>() -> ExternalMemoryImageCreateInfoBuilder<'a> {
+ ExternalMemoryImageCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalMemoryImageCreateInfoBuilder<'a> {
+ inner: ExternalMemoryImageCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfoBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ExternalMemoryImageCreateInfo {}
+impl<'a> ::std::ops::Deref for ExternalMemoryImageCreateInfoBuilder<'a> {
+ type Target = ExternalMemoryImageCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalMemoryImageCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalMemoryImageCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalMemoryImageCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalMemoryBufferCreateInfo.html>"]
+pub struct ExternalMemoryBufferCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for ExternalMemoryBufferCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalMemoryBufferCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_MEMORY_BUFFER_CREATE_INFO;
+}
+impl ExternalMemoryBufferCreateInfo {
+ pub fn builder<'a>() -> ExternalMemoryBufferCreateInfoBuilder<'a> {
+ ExternalMemoryBufferCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalMemoryBufferCreateInfoBuilder<'a> {
+ inner: ExternalMemoryBufferCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferCreateInfo for ExternalMemoryBufferCreateInfoBuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for ExternalMemoryBufferCreateInfo {}
+impl<'a> ::std::ops::Deref for ExternalMemoryBufferCreateInfoBuilder<'a> {
+ type Target = ExternalMemoryBufferCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalMemoryBufferCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalMemoryBufferCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalMemoryBufferCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMemoryAllocateInfo.html>"]
+pub struct ExportMemoryAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for ExportMemoryAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMemoryAllocateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_ALLOCATE_INFO;
+}
+impl ExportMemoryAllocateInfo {
+ pub fn builder<'a>() -> ExportMemoryAllocateInfoBuilder<'a> {
+ ExportMemoryAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMemoryAllocateInfoBuilder<'a> {
+ inner: ExportMemoryAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfoBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryAllocateInfo {}
+impl<'a> ::std::ops::Deref for ExportMemoryAllocateInfoBuilder<'a> {
+ type Target = ExportMemoryAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMemoryAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMemoryAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMemoryAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMemoryWin32HandleInfoKHR.html>"]
+pub struct ImportMemoryWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+ pub handle: HANDLE,
+ pub name: LPCWSTR,
+}
+impl ::std::default::Default for ImportMemoryWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ handle: unsafe { ::std::mem::zeroed() },
+ name: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMemoryWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
+}
+impl ImportMemoryWin32HandleInfoKHR {
+ pub fn builder<'a>() -> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
+ ImportMemoryWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMemoryWin32HandleInfoKHRBuilder<'a> {
+ inner: ImportMemoryWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoKHRBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryWin32HandleInfoKHR {}
+impl<'a> ::std::ops::Deref for ImportMemoryWin32HandleInfoKHRBuilder<'a> {
+ type Target = ImportMemoryWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMemoryWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMemoryWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn handle(mut self, handle: HANDLE) -> Self {
+ self.inner.handle = handle;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: LPCWSTR) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMemoryWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMemoryWin32HandleInfoKHR.html>"]
+pub struct ExportMemoryWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_attributes: *const SECURITY_ATTRIBUTES,
+ pub dw_access: DWORD,
+ pub name: LPCWSTR,
+}
+impl ::std::default::Default for ExportMemoryWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_attributes: ::std::ptr::null(),
+ dw_access: DWORD::default(),
+ name: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMemoryWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR;
+}
+impl ExportMemoryWin32HandleInfoKHR {
+ pub fn builder<'a>() -> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
+ ExportMemoryWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMemoryWin32HandleInfoKHRBuilder<'a> {
+ inner: ExportMemoryWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoKHRBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMemoryWin32HandleInfoKHR {}
+impl<'a> ::std::ops::Deref for ExportMemoryWin32HandleInfoKHRBuilder<'a> {
+ type Target = ExportMemoryWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMemoryWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMemoryWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self {
+ self.inner.p_attributes = attributes;
+ self
+ }
+ #[inline]
+ pub fn dw_access(mut self, dw_access: DWORD) -> Self {
+ self.inner.dw_access = dw_access;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: LPCWSTR) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMemoryWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMemoryZirconHandleInfoFUCHSIA.html>"]
+pub struct ImportMemoryZirconHandleInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+ pub handle: zx_handle_t,
+}
+impl ::std::default::Default for ImportMemoryZirconHandleInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ handle: zx_handle_t::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMemoryZirconHandleInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA;
+}
+impl ImportMemoryZirconHandleInfoFUCHSIA {
+ pub fn builder<'a>() -> ImportMemoryZirconHandleInfoFUCHSIABuilder<'a> {
+ ImportMemoryZirconHandleInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMemoryZirconHandleInfoFUCHSIABuilder<'a> {
+ inner: ImportMemoryZirconHandleInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryZirconHandleInfoFUCHSIABuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryZirconHandleInfoFUCHSIA {}
+impl<'a> ::std::ops::Deref for ImportMemoryZirconHandleInfoFUCHSIABuilder<'a> {
+ type Target = ImportMemoryZirconHandleInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMemoryZirconHandleInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMemoryZirconHandleInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn handle(mut self, handle: zx_handle_t) -> Self {
+ self.inner.handle = handle;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMemoryZirconHandleInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryZirconHandlePropertiesFUCHSIA.html>"]
+pub struct MemoryZirconHandlePropertiesFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_type_bits: u32,
+}
+impl ::std::default::Default for MemoryZirconHandlePropertiesFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_type_bits: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryZirconHandlePropertiesFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA;
+}
+impl MemoryZirconHandlePropertiesFUCHSIA {
+ pub fn builder<'a>() -> MemoryZirconHandlePropertiesFUCHSIABuilder<'a> {
+ MemoryZirconHandlePropertiesFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryZirconHandlePropertiesFUCHSIABuilder<'a> {
+ inner: MemoryZirconHandlePropertiesFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryZirconHandlePropertiesFUCHSIABuilder<'a> {
+ type Target = MemoryZirconHandlePropertiesFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryZirconHandlePropertiesFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryZirconHandlePropertiesFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryZirconHandlePropertiesFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryGetZirconHandleInfoFUCHSIA.html>"]
+pub struct MemoryGetZirconHandleInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for MemoryGetZirconHandleInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryGetZirconHandleInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA;
+}
+impl MemoryGetZirconHandleInfoFUCHSIA {
+ pub fn builder<'a>() -> MemoryGetZirconHandleInfoFUCHSIABuilder<'a> {
+ MemoryGetZirconHandleInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryGetZirconHandleInfoFUCHSIABuilder<'a> {
+ inner: MemoryGetZirconHandleInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryGetZirconHandleInfoFUCHSIABuilder<'a> {
+ type Target = MemoryGetZirconHandleInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryGetZirconHandleInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryGetZirconHandleInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryGetZirconHandleInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryWin32HandlePropertiesKHR.html>"]
+pub struct MemoryWin32HandlePropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_type_bits: u32,
+}
+impl ::std::default::Default for MemoryWin32HandlePropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_type_bits: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryWin32HandlePropertiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_WIN32_HANDLE_PROPERTIES_KHR;
+}
+impl MemoryWin32HandlePropertiesKHR {
+ pub fn builder<'a>() -> MemoryWin32HandlePropertiesKHRBuilder<'a> {
+ MemoryWin32HandlePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryWin32HandlePropertiesKHRBuilder<'a> {
+ inner: MemoryWin32HandlePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryWin32HandlePropertiesKHRBuilder<'a> {
+ type Target = MemoryWin32HandlePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryWin32HandlePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryWin32HandlePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryWin32HandlePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryGetWin32HandleInfoKHR.html>"]
+pub struct MemoryGetWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for MemoryGetWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryGetWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_WIN32_HANDLE_INFO_KHR;
+}
+impl MemoryGetWin32HandleInfoKHR {
+ pub fn builder<'a>() -> MemoryGetWin32HandleInfoKHRBuilder<'a> {
+ MemoryGetWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryGetWin32HandleInfoKHRBuilder<'a> {
+ inner: MemoryGetWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryGetWin32HandleInfoKHRBuilder<'a> {
+ type Target = MemoryGetWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryGetWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryGetWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryGetWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMemoryFdInfoKHR.html>"]
+pub struct ImportMemoryFdInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+ pub fd: c_int,
+}
+impl ::std::default::Default for ImportMemoryFdInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ fd: c_int::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMemoryFdInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_FD_INFO_KHR;
+}
+impl ImportMemoryFdInfoKHR {
+ pub fn builder<'a>() -> ImportMemoryFdInfoKHRBuilder<'a> {
+ ImportMemoryFdInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMemoryFdInfoKHRBuilder<'a> {
+ inner: ImportMemoryFdInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryFdInfoKHRBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryFdInfoKHR {}
+impl<'a> ::std::ops::Deref for ImportMemoryFdInfoKHRBuilder<'a> {
+ type Target = ImportMemoryFdInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMemoryFdInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMemoryFdInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn fd(mut self, fd: c_int) -> Self {
+ self.inner.fd = fd;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMemoryFdInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryFdPropertiesKHR.html>"]
+pub struct MemoryFdPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_type_bits: u32,
+}
+impl ::std::default::Default for MemoryFdPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_type_bits: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryFdPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_FD_PROPERTIES_KHR;
+}
+impl MemoryFdPropertiesKHR {
+ pub fn builder<'a>() -> MemoryFdPropertiesKHRBuilder<'a> {
+ MemoryFdPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryFdPropertiesKHRBuilder<'a> {
+ inner: MemoryFdPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryFdPropertiesKHRBuilder<'a> {
+ type Target = MemoryFdPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryFdPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryFdPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryFdPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryGetFdInfoKHR.html>"]
+pub struct MemoryGetFdInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for MemoryGetFdInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryGetFdInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_FD_INFO_KHR;
+}
+impl MemoryGetFdInfoKHR {
+ pub fn builder<'a>() -> MemoryGetFdInfoKHRBuilder<'a> {
+ MemoryGetFdInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryGetFdInfoKHRBuilder<'a> {
+ inner: MemoryGetFdInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryGetFdInfoKHRBuilder<'a> {
+ type Target = MemoryGetFdInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryGetFdInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryGetFdInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryGetFdInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWin32KeyedMutexAcquireReleaseInfoKHR.html>"]
+pub struct Win32KeyedMutexAcquireReleaseInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acquire_count: u32,
+ pub p_acquire_syncs: *const DeviceMemory,
+ pub p_acquire_keys: *const u64,
+ pub p_acquire_timeouts: *const u32,
+ pub release_count: u32,
+ pub p_release_syncs: *const DeviceMemory,
+ pub p_release_keys: *const u64,
+}
+impl ::std::default::Default for Win32KeyedMutexAcquireReleaseInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acquire_count: u32::default(),
+ p_acquire_syncs: ::std::ptr::null(),
+ p_acquire_keys: ::std::ptr::null(),
+ p_acquire_timeouts: ::std::ptr::null(),
+ release_count: u32::default(),
+ p_release_syncs: ::std::ptr::null(),
+ p_release_keys: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for Win32KeyedMutexAcquireReleaseInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR;
+}
+impl Win32KeyedMutexAcquireReleaseInfoKHR {
+ pub fn builder<'a>() -> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
+ Win32KeyedMutexAcquireReleaseInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
+ inner: Win32KeyedMutexAcquireReleaseInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for Win32KeyedMutexAcquireReleaseInfoKHR {}
+unsafe impl ExtendsSubmitInfo2 for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo2 for Win32KeyedMutexAcquireReleaseInfoKHR {}
+impl<'a> ::std::ops::Deref for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
+ type Target = Win32KeyedMutexAcquireReleaseInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> Win32KeyedMutexAcquireReleaseInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn acquire_syncs(mut self, acquire_syncs: &'a [DeviceMemory]) -> Self {
+ self.inner.acquire_count = acquire_syncs.len() as _;
+ self.inner.p_acquire_syncs = acquire_syncs.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn acquire_keys(mut self, acquire_keys: &'a [u64]) -> Self {
+ self.inner.acquire_count = acquire_keys.len() as _;
+ self.inner.p_acquire_keys = acquire_keys.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn acquire_timeouts(mut self, acquire_timeouts: &'a [u32]) -> Self {
+ self.inner.acquire_count = acquire_timeouts.len() as _;
+ self.inner.p_acquire_timeouts = acquire_timeouts.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn release_syncs(mut self, release_syncs: &'a [DeviceMemory]) -> Self {
+ self.inner.release_count = release_syncs.len() as _;
+ self.inner.p_release_syncs = release_syncs.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn release_keys(mut self, release_keys: &'a [u64]) -> Self {
+ self.inner.release_count = release_keys.len() as _;
+ self.inner.p_release_keys = release_keys.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> Win32KeyedMutexAcquireReleaseInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExternalSemaphoreInfo.html>"]
+pub struct PhysicalDeviceExternalSemaphoreInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+}
+impl ::std::default::Default for PhysicalDeviceExternalSemaphoreInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExternalSemaphoreInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
+}
+impl PhysicalDeviceExternalSemaphoreInfo {
+ pub fn builder<'a>() -> PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
+ PhysicalDeviceExternalSemaphoreInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
+ inner: PhysicalDeviceExternalSemaphoreInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceExternalSemaphoreInfo {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
+ type Target = PhysicalDeviceExternalSemaphoreInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExternalSemaphoreInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceExternalSemaphoreInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExternalSemaphoreInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalSemaphoreProperties.html>"]
+pub struct ExternalSemaphoreProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags,
+ pub compatible_handle_types: ExternalSemaphoreHandleTypeFlags,
+ pub external_semaphore_features: ExternalSemaphoreFeatureFlags,
+}
+impl ::std::default::Default for ExternalSemaphoreProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags::default(),
+ compatible_handle_types: ExternalSemaphoreHandleTypeFlags::default(),
+ external_semaphore_features: ExternalSemaphoreFeatureFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalSemaphoreProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_SEMAPHORE_PROPERTIES;
+}
+impl ExternalSemaphoreProperties {
+ pub fn builder<'a>() -> ExternalSemaphorePropertiesBuilder<'a> {
+ ExternalSemaphorePropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalSemaphorePropertiesBuilder<'a> {
+ inner: ExternalSemaphoreProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ExternalSemaphorePropertiesBuilder<'a> {
+ type Target = ExternalSemaphoreProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalSemaphorePropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalSemaphorePropertiesBuilder<'a> {
+ #[inline]
+ pub fn export_from_imported_handle_types(
+ mut self,
+ export_from_imported_handle_types: ExternalSemaphoreHandleTypeFlags,
+ ) -> Self {
+ self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
+ self
+ }
+ #[inline]
+ pub fn compatible_handle_types(
+ mut self,
+ compatible_handle_types: ExternalSemaphoreHandleTypeFlags,
+ ) -> Self {
+ self.inner.compatible_handle_types = compatible_handle_types;
+ self
+ }
+ #[inline]
+ pub fn external_semaphore_features(
+ mut self,
+ external_semaphore_features: ExternalSemaphoreFeatureFlags,
+ ) -> Self {
+ self.inner.external_semaphore_features = external_semaphore_features;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalSemaphoreProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportSemaphoreCreateInfo.html>"]
+pub struct ExportSemaphoreCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalSemaphoreHandleTypeFlags,
+}
+impl ::std::default::Default for ExportSemaphoreCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalSemaphoreHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportSemaphoreCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_SEMAPHORE_CREATE_INFO;
+}
+impl ExportSemaphoreCreateInfo {
+ pub fn builder<'a>() -> ExportSemaphoreCreateInfoBuilder<'a> {
+ ExportSemaphoreCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportSemaphoreCreateInfoBuilder<'a> {
+ inner: ExportSemaphoreCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreCreateInfoBuilder<'_> {}
+unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreCreateInfo {}
+impl<'a> ::std::ops::Deref for ExportSemaphoreCreateInfoBuilder<'a> {
+ type Target = ExportSemaphoreCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportSemaphoreCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportSemaphoreCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportSemaphoreCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportSemaphoreWin32HandleInfoKHR.html>"]
+pub struct ImportSemaphoreWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub flags: SemaphoreImportFlags,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+ pub handle: HANDLE,
+ pub name: LPCWSTR,
+}
+impl ::std::default::Default for ImportSemaphoreWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ flags: SemaphoreImportFlags::default(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ handle: unsafe { ::std::mem::zeroed() },
+ name: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportSemaphoreWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
+}
+impl ImportSemaphoreWin32HandleInfoKHR {
+ pub fn builder<'a>() -> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ ImportSemaphoreWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ inner: ImportSemaphoreWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ type Target = ImportSemaphoreWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: SemaphoreImportFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn handle(mut self, handle: HANDLE) -> Self {
+ self.inner.handle = handle;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: LPCWSTR) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportSemaphoreWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportSemaphoreWin32HandleInfoKHR.html>"]
+pub struct ExportSemaphoreWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_attributes: *const SECURITY_ATTRIBUTES,
+ pub dw_access: DWORD,
+ pub name: LPCWSTR,
+}
+impl ::std::default::Default for ExportSemaphoreWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_attributes: ::std::ptr::null(),
+ dw_access: DWORD::default(),
+ name: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportSemaphoreWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR;
+}
+impl ExportSemaphoreWin32HandleInfoKHR {
+ pub fn builder<'a>() -> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ ExportSemaphoreWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ inner: ExportSemaphoreWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreWin32HandleInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSemaphoreCreateInfo for ExportSemaphoreWin32HandleInfoKHR {}
+impl<'a> ::std::ops::Deref for ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ type Target = ExportSemaphoreWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportSemaphoreWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self {
+ self.inner.p_attributes = attributes;
+ self
+ }
+ #[inline]
+ pub fn dw_access(mut self, dw_access: DWORD) -> Self {
+ self.inner.dw_access = dw_access;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: LPCWSTR) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportSemaphoreWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkD3D12FenceSubmitInfoKHR.html>"]
+pub struct D3D12FenceSubmitInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub wait_semaphore_values_count: u32,
+ pub p_wait_semaphore_values: *const u64,
+ pub signal_semaphore_values_count: u32,
+ pub p_signal_semaphore_values: *const u64,
+}
+impl ::std::default::Default for D3D12FenceSubmitInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ wait_semaphore_values_count: u32::default(),
+ p_wait_semaphore_values: ::std::ptr::null(),
+ signal_semaphore_values_count: u32::default(),
+ p_signal_semaphore_values: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for D3D12FenceSubmitInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::D3D12_FENCE_SUBMIT_INFO_KHR;
+}
+impl D3D12FenceSubmitInfoKHR {
+ pub fn builder<'a>() -> D3D12FenceSubmitInfoKHRBuilder<'a> {
+ D3D12FenceSubmitInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct D3D12FenceSubmitInfoKHRBuilder<'a> {
+ inner: D3D12FenceSubmitInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for D3D12FenceSubmitInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for D3D12FenceSubmitInfoKHR {}
+impl<'a> ::std::ops::Deref for D3D12FenceSubmitInfoKHRBuilder<'a> {
+ type Target = D3D12FenceSubmitInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for D3D12FenceSubmitInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> D3D12FenceSubmitInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn wait_semaphore_values(mut self, wait_semaphore_values: &'a [u64]) -> Self {
+ self.inner.wait_semaphore_values_count = wait_semaphore_values.len() as _;
+ self.inner.p_wait_semaphore_values = wait_semaphore_values.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn signal_semaphore_values(mut self, signal_semaphore_values: &'a [u64]) -> Self {
+ self.inner.signal_semaphore_values_count = signal_semaphore_values.len() as _;
+ self.inner.p_signal_semaphore_values = signal_semaphore_values.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> D3D12FenceSubmitInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreGetWin32HandleInfoKHR.html>"]
+pub struct SemaphoreGetWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+}
+impl ::std::default::Default for SemaphoreGetWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreGetWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR;
+}
+impl SemaphoreGetWin32HandleInfoKHR {
+ pub fn builder<'a>() -> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
+ SemaphoreGetWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
+ inner: SemaphoreGetWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
+ type Target = SemaphoreGetWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreGetWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreGetWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportSemaphoreFdInfoKHR.html>"]
+pub struct ImportSemaphoreFdInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub flags: SemaphoreImportFlags,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+ pub fd: c_int,
+}
+impl ::std::default::Default for ImportSemaphoreFdInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ flags: SemaphoreImportFlags::default(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ fd: c_int::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportSemaphoreFdInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_SEMAPHORE_FD_INFO_KHR;
+}
+impl ImportSemaphoreFdInfoKHR {
+ pub fn builder<'a>() -> ImportSemaphoreFdInfoKHRBuilder<'a> {
+ ImportSemaphoreFdInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportSemaphoreFdInfoKHRBuilder<'a> {
+ inner: ImportSemaphoreFdInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImportSemaphoreFdInfoKHRBuilder<'a> {
+ type Target = ImportSemaphoreFdInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportSemaphoreFdInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportSemaphoreFdInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: SemaphoreImportFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn fd(mut self, fd: c_int) -> Self {
+ self.inner.fd = fd;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportSemaphoreFdInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreGetFdInfoKHR.html>"]
+pub struct SemaphoreGetFdInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+}
+impl ::std::default::Default for SemaphoreGetFdInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreGetFdInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_GET_FD_INFO_KHR;
+}
+impl SemaphoreGetFdInfoKHR {
+ pub fn builder<'a>() -> SemaphoreGetFdInfoKHRBuilder<'a> {
+ SemaphoreGetFdInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreGetFdInfoKHRBuilder<'a> {
+ inner: SemaphoreGetFdInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SemaphoreGetFdInfoKHRBuilder<'a> {
+ type Target = SemaphoreGetFdInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreGetFdInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreGetFdInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreGetFdInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportSemaphoreZirconHandleInfoFUCHSIA.html>"]
+pub struct ImportSemaphoreZirconHandleInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub flags: SemaphoreImportFlags,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+ pub zircon_handle: zx_handle_t,
+}
+impl ::std::default::Default for ImportSemaphoreZirconHandleInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ flags: SemaphoreImportFlags::default(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ zircon_handle: zx_handle_t::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportSemaphoreZirconHandleInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA;
+}
+impl ImportSemaphoreZirconHandleInfoFUCHSIA {
+ pub fn builder<'a>() -> ImportSemaphoreZirconHandleInfoFUCHSIABuilder<'a> {
+ ImportSemaphoreZirconHandleInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportSemaphoreZirconHandleInfoFUCHSIABuilder<'a> {
+ inner: ImportSemaphoreZirconHandleInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImportSemaphoreZirconHandleInfoFUCHSIABuilder<'a> {
+ type Target = ImportSemaphoreZirconHandleInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportSemaphoreZirconHandleInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportSemaphoreZirconHandleInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: SemaphoreImportFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn zircon_handle(mut self, zircon_handle: zx_handle_t) -> Self {
+ self.inner.zircon_handle = zircon_handle;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportSemaphoreZirconHandleInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreGetZirconHandleInfoFUCHSIA.html>"]
+pub struct SemaphoreGetZirconHandleInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub handle_type: ExternalSemaphoreHandleTypeFlags,
+}
+impl ::std::default::Default for SemaphoreGetZirconHandleInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ handle_type: ExternalSemaphoreHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreGetZirconHandleInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA;
+}
+impl SemaphoreGetZirconHandleInfoFUCHSIA {
+ pub fn builder<'a>() -> SemaphoreGetZirconHandleInfoFUCHSIABuilder<'a> {
+ SemaphoreGetZirconHandleInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreGetZirconHandleInfoFUCHSIABuilder<'a> {
+ inner: SemaphoreGetZirconHandleInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SemaphoreGetZirconHandleInfoFUCHSIABuilder<'a> {
+ type Target = SemaphoreGetZirconHandleInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreGetZirconHandleInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreGetZirconHandleInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalSemaphoreHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreGetZirconHandleInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExternalFenceInfo.html>"]
+pub struct PhysicalDeviceExternalFenceInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalFenceHandleTypeFlags,
+}
+impl ::std::default::Default for PhysicalDeviceExternalFenceInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalFenceHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExternalFenceInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
+}
+impl PhysicalDeviceExternalFenceInfo {
+ pub fn builder<'a>() -> PhysicalDeviceExternalFenceInfoBuilder<'a> {
+ PhysicalDeviceExternalFenceInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExternalFenceInfoBuilder<'a> {
+ inner: PhysicalDeviceExternalFenceInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExternalFenceInfoBuilder<'a> {
+ type Target = PhysicalDeviceExternalFenceInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalFenceInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExternalFenceInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExternalFenceInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalFenceProperties.html>"]
+pub struct ExternalFenceProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub export_from_imported_handle_types: ExternalFenceHandleTypeFlags,
+ pub compatible_handle_types: ExternalFenceHandleTypeFlags,
+ pub external_fence_features: ExternalFenceFeatureFlags,
+}
+impl ::std::default::Default for ExternalFenceProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ export_from_imported_handle_types: ExternalFenceHandleTypeFlags::default(),
+ compatible_handle_types: ExternalFenceHandleTypeFlags::default(),
+ external_fence_features: ExternalFenceFeatureFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalFenceProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_FENCE_PROPERTIES;
+}
+impl ExternalFenceProperties {
+ pub fn builder<'a>() -> ExternalFencePropertiesBuilder<'a> {
+ ExternalFencePropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalFencePropertiesBuilder<'a> {
+ inner: ExternalFenceProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ExternalFencePropertiesBuilder<'a> {
+ type Target = ExternalFenceProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalFencePropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalFencePropertiesBuilder<'a> {
+ #[inline]
+ pub fn export_from_imported_handle_types(
+ mut self,
+ export_from_imported_handle_types: ExternalFenceHandleTypeFlags,
+ ) -> Self {
+ self.inner.export_from_imported_handle_types = export_from_imported_handle_types;
+ self
+ }
+ #[inline]
+ pub fn compatible_handle_types(
+ mut self,
+ compatible_handle_types: ExternalFenceHandleTypeFlags,
+ ) -> Self {
+ self.inner.compatible_handle_types = compatible_handle_types;
+ self
+ }
+ #[inline]
+ pub fn external_fence_features(
+ mut self,
+ external_fence_features: ExternalFenceFeatureFlags,
+ ) -> Self {
+ self.inner.external_fence_features = external_fence_features;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalFenceProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportFenceCreateInfo.html>"]
+pub struct ExportFenceCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_types: ExternalFenceHandleTypeFlags,
+}
+impl ::std::default::Default for ExportFenceCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_types: ExternalFenceHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportFenceCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_FENCE_CREATE_INFO;
+}
+impl ExportFenceCreateInfo {
+ pub fn builder<'a>() -> ExportFenceCreateInfoBuilder<'a> {
+ ExportFenceCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportFenceCreateInfoBuilder<'a> {
+ inner: ExportFenceCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFenceCreateInfo for ExportFenceCreateInfoBuilder<'_> {}
+unsafe impl ExtendsFenceCreateInfo for ExportFenceCreateInfo {}
+impl<'a> ::std::ops::Deref for ExportFenceCreateInfoBuilder<'a> {
+ type Target = ExportFenceCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportFenceCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportFenceCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn handle_types(mut self, handle_types: ExternalFenceHandleTypeFlags) -> Self {
+ self.inner.handle_types = handle_types;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportFenceCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportFenceWin32HandleInfoKHR.html>"]
+pub struct ImportFenceWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fence: Fence,
+ pub flags: FenceImportFlags,
+ pub handle_type: ExternalFenceHandleTypeFlags,
+ pub handle: HANDLE,
+ pub name: LPCWSTR,
+}
+impl ::std::default::Default for ImportFenceWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fence: Fence::default(),
+ flags: FenceImportFlags::default(),
+ handle_type: ExternalFenceHandleTypeFlags::default(),
+ handle: unsafe { ::std::mem::zeroed() },
+ name: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportFenceWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_FENCE_WIN32_HANDLE_INFO_KHR;
+}
+impl ImportFenceWin32HandleInfoKHR {
+ pub fn builder<'a>() -> ImportFenceWin32HandleInfoKHRBuilder<'a> {
+ ImportFenceWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportFenceWin32HandleInfoKHRBuilder<'a> {
+ inner: ImportFenceWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImportFenceWin32HandleInfoKHRBuilder<'a> {
+ type Target = ImportFenceWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportFenceWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportFenceWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn fence(mut self, fence: Fence) -> Self {
+ self.inner.fence = fence;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: FenceImportFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn handle(mut self, handle: HANDLE) -> Self {
+ self.inner.handle = handle;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: LPCWSTR) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportFenceWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportFenceWin32HandleInfoKHR.html>"]
+pub struct ExportFenceWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_attributes: *const SECURITY_ATTRIBUTES,
+ pub dw_access: DWORD,
+ pub name: LPCWSTR,
+}
+impl ::std::default::Default for ExportFenceWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_attributes: ::std::ptr::null(),
+ dw_access: DWORD::default(),
+ name: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportFenceWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_FENCE_WIN32_HANDLE_INFO_KHR;
+}
+impl ExportFenceWin32HandleInfoKHR {
+ pub fn builder<'a>() -> ExportFenceWin32HandleInfoKHRBuilder<'a> {
+ ExportFenceWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportFenceWin32HandleInfoKHRBuilder<'a> {
+ inner: ExportFenceWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFenceCreateInfo for ExportFenceWin32HandleInfoKHRBuilder<'_> {}
+unsafe impl ExtendsFenceCreateInfo for ExportFenceWin32HandleInfoKHR {}
+impl<'a> ::std::ops::Deref for ExportFenceWin32HandleInfoKHRBuilder<'a> {
+ type Target = ExportFenceWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportFenceWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportFenceWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn attributes(mut self, attributes: &'a SECURITY_ATTRIBUTES) -> Self {
+ self.inner.p_attributes = attributes;
+ self
+ }
+ #[inline]
+ pub fn dw_access(mut self, dw_access: DWORD) -> Self {
+ self.inner.dw_access = dw_access;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: LPCWSTR) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportFenceWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFenceGetWin32HandleInfoKHR.html>"]
+pub struct FenceGetWin32HandleInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fence: Fence,
+ pub handle_type: ExternalFenceHandleTypeFlags,
+}
+impl ::std::default::Default for FenceGetWin32HandleInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fence: Fence::default(),
+ handle_type: ExternalFenceHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FenceGetWin32HandleInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::FENCE_GET_WIN32_HANDLE_INFO_KHR;
+}
+impl FenceGetWin32HandleInfoKHR {
+ pub fn builder<'a>() -> FenceGetWin32HandleInfoKHRBuilder<'a> {
+ FenceGetWin32HandleInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FenceGetWin32HandleInfoKHRBuilder<'a> {
+ inner: FenceGetWin32HandleInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for FenceGetWin32HandleInfoKHRBuilder<'a> {
+ type Target = FenceGetWin32HandleInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FenceGetWin32HandleInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FenceGetWin32HandleInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn fence(mut self, fence: Fence) -> Self {
+ self.inner.fence = fence;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FenceGetWin32HandleInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportFenceFdInfoKHR.html>"]
+pub struct ImportFenceFdInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fence: Fence,
+ pub flags: FenceImportFlags,
+ pub handle_type: ExternalFenceHandleTypeFlags,
+ pub fd: c_int,
+}
+impl ::std::default::Default for ImportFenceFdInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fence: Fence::default(),
+ flags: FenceImportFlags::default(),
+ handle_type: ExternalFenceHandleTypeFlags::default(),
+ fd: c_int::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportFenceFdInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_FENCE_FD_INFO_KHR;
+}
+impl ImportFenceFdInfoKHR {
+ pub fn builder<'a>() -> ImportFenceFdInfoKHRBuilder<'a> {
+ ImportFenceFdInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportFenceFdInfoKHRBuilder<'a> {
+ inner: ImportFenceFdInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImportFenceFdInfoKHRBuilder<'a> {
+ type Target = ImportFenceFdInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportFenceFdInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportFenceFdInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn fence(mut self, fence: Fence) -> Self {
+ self.inner.fence = fence;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: FenceImportFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn fd(mut self, fd: c_int) -> Self {
+ self.inner.fd = fd;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportFenceFdInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFenceGetFdInfoKHR.html>"]
+pub struct FenceGetFdInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fence: Fence,
+ pub handle_type: ExternalFenceHandleTypeFlags,
+}
+impl ::std::default::Default for FenceGetFdInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fence: Fence::default(),
+ handle_type: ExternalFenceHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FenceGetFdInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::FENCE_GET_FD_INFO_KHR;
+}
+impl FenceGetFdInfoKHR {
+ pub fn builder<'a>() -> FenceGetFdInfoKHRBuilder<'a> {
+ FenceGetFdInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FenceGetFdInfoKHRBuilder<'a> {
+ inner: FenceGetFdInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for FenceGetFdInfoKHRBuilder<'a> {
+ type Target = FenceGetFdInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FenceGetFdInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FenceGetFdInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn fence(mut self, fence: Fence) -> Self {
+ self.inner.fence = fence;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalFenceHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FenceGetFdInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultiviewFeatures.html>"]
+pub struct PhysicalDeviceMultiviewFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub multiview: Bool32,
+ pub multiview_geometry_shader: Bool32,
+ pub multiview_tessellation_shader: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMultiviewFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ multiview: Bool32::default(),
+ multiview_geometry_shader: Bool32::default(),
+ multiview_tessellation_shader: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultiviewFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
+}
+impl PhysicalDeviceMultiviewFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
+ PhysicalDeviceMultiviewFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultiviewFeaturesBuilder<'a> {
+ inner: PhysicalDeviceMultiviewFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiviewFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiviewFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceMultiviewFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultiviewFeaturesBuilder<'a> {
+ #[inline]
+ pub fn multiview(mut self, multiview: bool) -> Self {
+ self.inner.multiview = multiview.into();
+ self
+ }
+ #[inline]
+ pub fn multiview_geometry_shader(mut self, multiview_geometry_shader: bool) -> Self {
+ self.inner.multiview_geometry_shader = multiview_geometry_shader.into();
+ self
+ }
+ #[inline]
+ pub fn multiview_tessellation_shader(mut self, multiview_tessellation_shader: bool) -> Self {
+ self.inner.multiview_tessellation_shader = multiview_tessellation_shader.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultiviewFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultiviewProperties.html>"]
+pub struct PhysicalDeviceMultiviewProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_multiview_view_count: u32,
+ pub max_multiview_instance_index: u32,
+}
+impl ::std::default::Default for PhysicalDeviceMultiviewProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_multiview_view_count: u32::default(),
+ max_multiview_instance_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultiviewProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
+}
+impl PhysicalDeviceMultiviewProperties {
+ pub fn builder<'a>() -> PhysicalDeviceMultiviewPropertiesBuilder<'a> {
+ PhysicalDeviceMultiviewPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultiviewPropertiesBuilder<'a> {
+ inner: PhysicalDeviceMultiviewProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiviewPropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMultiviewProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceMultiviewProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultiviewPropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_multiview_view_count(mut self, max_multiview_view_count: u32) -> Self {
+ self.inner.max_multiview_view_count = max_multiview_view_count;
+ self
+ }
+ #[inline]
+ pub fn max_multiview_instance_index(mut self, max_multiview_instance_index: u32) -> Self {
+ self.inner.max_multiview_instance_index = max_multiview_instance_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultiviewProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassMultiviewCreateInfo.html>"]
+pub struct RenderPassMultiviewCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub subpass_count: u32,
+ pub p_view_masks: *const u32,
+ pub dependency_count: u32,
+ pub p_view_offsets: *const i32,
+ pub correlation_mask_count: u32,
+ pub p_correlation_masks: *const u32,
+}
+impl ::std::default::Default for RenderPassMultiviewCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ subpass_count: u32::default(),
+ p_view_masks: ::std::ptr::null(),
+ dependency_count: u32::default(),
+ p_view_offsets: ::std::ptr::null(),
+ correlation_mask_count: u32::default(),
+ p_correlation_masks: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassMultiviewCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_MULTIVIEW_CREATE_INFO;
+}
+impl RenderPassMultiviewCreateInfo {
+ pub fn builder<'a>() -> RenderPassMultiviewCreateInfoBuilder<'a> {
+ RenderPassMultiviewCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassMultiviewCreateInfoBuilder<'a> {
+ inner: RenderPassMultiviewCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassCreateInfo for RenderPassMultiviewCreateInfoBuilder<'_> {}
+unsafe impl ExtendsRenderPassCreateInfo for RenderPassMultiviewCreateInfo {}
+impl<'a> ::std::ops::Deref for RenderPassMultiviewCreateInfoBuilder<'a> {
+ type Target = RenderPassMultiviewCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassMultiviewCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassMultiviewCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn view_masks(mut self, view_masks: &'a [u32]) -> Self {
+ self.inner.subpass_count = view_masks.len() as _;
+ self.inner.p_view_masks = view_masks.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn view_offsets(mut self, view_offsets: &'a [i32]) -> Self {
+ self.inner.dependency_count = view_offsets.len() as _;
+ self.inner.p_view_offsets = view_offsets.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn correlation_masks(mut self, correlation_masks: &'a [u32]) -> Self {
+ self.inner.correlation_mask_count = correlation_masks.len() as _;
+ self.inner.p_correlation_masks = correlation_masks.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassMultiviewCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilities2EXT.html>"]
+pub struct SurfaceCapabilities2EXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_image_count: u32,
+ pub max_image_count: u32,
+ pub current_extent: Extent2D,
+ pub min_image_extent: Extent2D,
+ pub max_image_extent: Extent2D,
+ pub max_image_array_layers: u32,
+ pub supported_transforms: SurfaceTransformFlagsKHR,
+ pub current_transform: SurfaceTransformFlagsKHR,
+ pub supported_composite_alpha: CompositeAlphaFlagsKHR,
+ pub supported_usage_flags: ImageUsageFlags,
+ pub supported_surface_counters: SurfaceCounterFlagsEXT,
+}
+impl ::std::default::Default for SurfaceCapabilities2EXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_image_count: u32::default(),
+ max_image_count: u32::default(),
+ current_extent: Extent2D::default(),
+ min_image_extent: Extent2D::default(),
+ max_image_extent: Extent2D::default(),
+ max_image_array_layers: u32::default(),
+ supported_transforms: SurfaceTransformFlagsKHR::default(),
+ current_transform: SurfaceTransformFlagsKHR::default(),
+ supported_composite_alpha: CompositeAlphaFlagsKHR::default(),
+ supported_usage_flags: ImageUsageFlags::default(),
+ supported_surface_counters: SurfaceCounterFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceCapabilities2EXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_2_EXT;
+}
+impl SurfaceCapabilities2EXT {
+ pub fn builder<'a>() -> SurfaceCapabilities2EXTBuilder<'a> {
+ SurfaceCapabilities2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceCapabilities2EXTBuilder<'a> {
+ inner: SurfaceCapabilities2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SurfaceCapabilities2EXTBuilder<'a> {
+ type Target = SurfaceCapabilities2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceCapabilities2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceCapabilities2EXTBuilder<'a> {
+ #[inline]
+ pub fn min_image_count(mut self, min_image_count: u32) -> Self {
+ self.inner.min_image_count = min_image_count;
+ self
+ }
+ #[inline]
+ pub fn max_image_count(mut self, max_image_count: u32) -> Self {
+ self.inner.max_image_count = max_image_count;
+ self
+ }
+ #[inline]
+ pub fn current_extent(mut self, current_extent: Extent2D) -> Self {
+ self.inner.current_extent = current_extent;
+ self
+ }
+ #[inline]
+ pub fn min_image_extent(mut self, min_image_extent: Extent2D) -> Self {
+ self.inner.min_image_extent = min_image_extent;
+ self
+ }
+ #[inline]
+ pub fn max_image_extent(mut self, max_image_extent: Extent2D) -> Self {
+ self.inner.max_image_extent = max_image_extent;
+ self
+ }
+ #[inline]
+ pub fn max_image_array_layers(mut self, max_image_array_layers: u32) -> Self {
+ self.inner.max_image_array_layers = max_image_array_layers;
+ self
+ }
+ #[inline]
+ pub fn supported_transforms(mut self, supported_transforms: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.supported_transforms = supported_transforms;
+ self
+ }
+ #[inline]
+ pub fn current_transform(mut self, current_transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.current_transform = current_transform;
+ self
+ }
+ #[inline]
+ pub fn supported_composite_alpha(
+ mut self,
+ supported_composite_alpha: CompositeAlphaFlagsKHR,
+ ) -> Self {
+ self.inner.supported_composite_alpha = supported_composite_alpha;
+ self
+ }
+ #[inline]
+ pub fn supported_usage_flags(mut self, supported_usage_flags: ImageUsageFlags) -> Self {
+ self.inner.supported_usage_flags = supported_usage_flags;
+ self
+ }
+ #[inline]
+ pub fn supported_surface_counters(
+ mut self,
+ supported_surface_counters: SurfaceCounterFlagsEXT,
+ ) -> Self {
+ self.inner.supported_surface_counters = supported_surface_counters;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceCapabilities2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPowerInfoEXT.html>"]
+pub struct DisplayPowerInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub power_state: DisplayPowerStateEXT,
+}
+impl ::std::default::Default for DisplayPowerInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ power_state: DisplayPowerStateEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayPowerInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_POWER_INFO_EXT;
+}
+impl DisplayPowerInfoEXT {
+ pub fn builder<'a>() -> DisplayPowerInfoEXTBuilder<'a> {
+ DisplayPowerInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPowerInfoEXTBuilder<'a> {
+ inner: DisplayPowerInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPowerInfoEXTBuilder<'a> {
+ type Target = DisplayPowerInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPowerInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPowerInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn power_state(mut self, power_state: DisplayPowerStateEXT) -> Self {
+ self.inner.power_state = power_state;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPowerInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceEventInfoEXT.html>"]
+pub struct DeviceEventInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub device_event: DeviceEventTypeEXT,
+}
+impl ::std::default::Default for DeviceEventInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ device_event: DeviceEventTypeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceEventInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_EVENT_INFO_EXT;
+}
+impl DeviceEventInfoEXT {
+ pub fn builder<'a>() -> DeviceEventInfoEXTBuilder<'a> {
+ DeviceEventInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceEventInfoEXTBuilder<'a> {
+ inner: DeviceEventInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceEventInfoEXTBuilder<'a> {
+ type Target = DeviceEventInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceEventInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceEventInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn device_event(mut self, device_event: DeviceEventTypeEXT) -> Self {
+ self.inner.device_event = device_event;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceEventInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayEventInfoEXT.html>"]
+pub struct DisplayEventInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub display_event: DisplayEventTypeEXT,
+}
+impl ::std::default::Default for DisplayEventInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ display_event: DisplayEventTypeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayEventInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_EVENT_INFO_EXT;
+}
+impl DisplayEventInfoEXT {
+ pub fn builder<'a>() -> DisplayEventInfoEXTBuilder<'a> {
+ DisplayEventInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayEventInfoEXTBuilder<'a> {
+ inner: DisplayEventInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayEventInfoEXTBuilder<'a> {
+ type Target = DisplayEventInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayEventInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayEventInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn display_event(mut self, display_event: DisplayEventTypeEXT) -> Self {
+ self.inner.display_event = display_event;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayEventInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainCounterCreateInfoEXT.html>"]
+pub struct SwapchainCounterCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub surface_counters: SurfaceCounterFlagsEXT,
+}
+impl ::std::default::Default for SwapchainCounterCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ surface_counters: SurfaceCounterFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainCounterCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_COUNTER_CREATE_INFO_EXT;
+}
+impl SwapchainCounterCreateInfoEXT {
+ pub fn builder<'a>() -> SwapchainCounterCreateInfoEXTBuilder<'a> {
+ SwapchainCounterCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainCounterCreateInfoEXTBuilder<'a> {
+ inner: SwapchainCounterCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainCounterCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainCounterCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for SwapchainCounterCreateInfoEXTBuilder<'a> {
+ type Target = SwapchainCounterCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainCounterCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainCounterCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn surface_counters(mut self, surface_counters: SurfaceCounterFlagsEXT) -> Self {
+ self.inner.surface_counters = surface_counters;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainCounterCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceGroupProperties.html>"]
+pub struct PhysicalDeviceGroupProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub physical_device_count: u32,
+ pub physical_devices: [PhysicalDevice; MAX_DEVICE_GROUP_SIZE],
+ pub subset_allocation: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceGroupProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ physical_device_count: u32::default(),
+ physical_devices: unsafe { ::std::mem::zeroed() },
+ subset_allocation: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceGroupProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_GROUP_PROPERTIES;
+}
+impl PhysicalDeviceGroupProperties {
+ pub fn builder<'a>() -> PhysicalDeviceGroupPropertiesBuilder<'a> {
+ PhysicalDeviceGroupPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceGroupPropertiesBuilder<'a> {
+ inner: PhysicalDeviceGroupProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceGroupPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceGroupProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceGroupPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceGroupPropertiesBuilder<'a> {
+ #[inline]
+ pub fn physical_device_count(mut self, physical_device_count: u32) -> Self {
+ self.inner.physical_device_count = physical_device_count;
+ self
+ }
+ #[inline]
+ pub fn physical_devices(
+ mut self,
+ physical_devices: [PhysicalDevice; MAX_DEVICE_GROUP_SIZE],
+ ) -> Self {
+ self.inner.physical_devices = physical_devices;
+ self
+ }
+ #[inline]
+ pub fn subset_allocation(mut self, subset_allocation: bool) -> Self {
+ self.inner.subset_allocation = subset_allocation.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceGroupProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryAllocateFlagsInfo.html>"]
+pub struct MemoryAllocateFlagsInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: MemoryAllocateFlags,
+ pub device_mask: u32,
+}
+impl ::std::default::Default for MemoryAllocateFlagsInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: MemoryAllocateFlags::default(),
+ device_mask: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryAllocateFlagsInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_ALLOCATE_FLAGS_INFO;
+}
+impl MemoryAllocateFlagsInfo {
+ pub fn builder<'a>() -> MemoryAllocateFlagsInfoBuilder<'a> {
+ MemoryAllocateFlagsInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryAllocateFlagsInfoBuilder<'a> {
+ inner: MemoryAllocateFlagsInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryAllocateFlagsInfoBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryAllocateFlagsInfo {}
+impl<'a> ::std::ops::Deref for MemoryAllocateFlagsInfoBuilder<'a> {
+ type Target = MemoryAllocateFlagsInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryAllocateFlagsInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryAllocateFlagsInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: MemoryAllocateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn device_mask(mut self, device_mask: u32) -> Self {
+ self.inner.device_mask = device_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryAllocateFlagsInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindBufferMemoryInfo.html>"]
+pub struct BindBufferMemoryInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer: Buffer,
+ pub memory: DeviceMemory,
+ pub memory_offset: DeviceSize,
+}
+impl ::std::default::Default for BindBufferMemoryInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer: Buffer::default(),
+ memory: DeviceMemory::default(),
+ memory_offset: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindBufferMemoryInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_BUFFER_MEMORY_INFO;
+}
+impl BindBufferMemoryInfo {
+ pub fn builder<'a>() -> BindBufferMemoryInfoBuilder<'a> {
+ BindBufferMemoryInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindBufferMemoryInfoBuilder<'a> {
+ inner: BindBufferMemoryInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsBindBufferMemoryInfo {}
+impl<'a> ::std::ops::Deref for BindBufferMemoryInfoBuilder<'a> {
+ type Target = BindBufferMemoryInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindBufferMemoryInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindBufferMemoryInfoBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self {
+ self.inner.memory_offset = memory_offset;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsBindBufferMemoryInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindBufferMemoryInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindBufferMemoryDeviceGroupInfo.html>"]
+pub struct BindBufferMemoryDeviceGroupInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub device_index_count: u32,
+ pub p_device_indices: *const u32,
+}
+impl ::std::default::Default for BindBufferMemoryDeviceGroupInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ device_index_count: u32::default(),
+ p_device_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindBufferMemoryDeviceGroupInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO;
+}
+impl BindBufferMemoryDeviceGroupInfo {
+ pub fn builder<'a>() -> BindBufferMemoryDeviceGroupInfoBuilder<'a> {
+ BindBufferMemoryDeviceGroupInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindBufferMemoryDeviceGroupInfoBuilder<'a> {
+ inner: BindBufferMemoryDeviceGroupInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBindBufferMemoryInfo for BindBufferMemoryDeviceGroupInfoBuilder<'_> {}
+unsafe impl ExtendsBindBufferMemoryInfo for BindBufferMemoryDeviceGroupInfo {}
+impl<'a> ::std::ops::Deref for BindBufferMemoryDeviceGroupInfoBuilder<'a> {
+ type Target = BindBufferMemoryDeviceGroupInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindBufferMemoryDeviceGroupInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindBufferMemoryDeviceGroupInfoBuilder<'a> {
+ #[inline]
+ pub fn device_indices(mut self, device_indices: &'a [u32]) -> Self {
+ self.inner.device_index_count = device_indices.len() as _;
+ self.inner.p_device_indices = device_indices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindBufferMemoryDeviceGroupInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindImageMemoryInfo.html>"]
+pub struct BindImageMemoryInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+ pub memory: DeviceMemory,
+ pub memory_offset: DeviceSize,
+}
+impl ::std::default::Default for BindImageMemoryInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ memory: DeviceMemory::default(),
+ memory_offset: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindImageMemoryInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_MEMORY_INFO;
+}
+impl BindImageMemoryInfo {
+ pub fn builder<'a>() -> BindImageMemoryInfoBuilder<'a> {
+ BindImageMemoryInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindImageMemoryInfoBuilder<'a> {
+ inner: BindImageMemoryInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsBindImageMemoryInfo {}
+impl<'a> ::std::ops::Deref for BindImageMemoryInfoBuilder<'a> {
+ type Target = BindImageMemoryInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindImageMemoryInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindImageMemoryInfoBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self {
+ self.inner.memory_offset = memory_offset;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsBindImageMemoryInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindImageMemoryInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindImageMemoryDeviceGroupInfo.html>"]
+pub struct BindImageMemoryDeviceGroupInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub device_index_count: u32,
+ pub p_device_indices: *const u32,
+ pub split_instance_bind_region_count: u32,
+ pub p_split_instance_bind_regions: *const Rect2D,
+}
+impl ::std::default::Default for BindImageMemoryDeviceGroupInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ device_index_count: u32::default(),
+ p_device_indices: ::std::ptr::null(),
+ split_instance_bind_region_count: u32::default(),
+ p_split_instance_bind_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindImageMemoryDeviceGroupInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO;
+}
+impl BindImageMemoryDeviceGroupInfo {
+ pub fn builder<'a>() -> BindImageMemoryDeviceGroupInfoBuilder<'a> {
+ BindImageMemoryDeviceGroupInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindImageMemoryDeviceGroupInfoBuilder<'a> {
+ inner: BindImageMemoryDeviceGroupInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBindImageMemoryInfo for BindImageMemoryDeviceGroupInfoBuilder<'_> {}
+unsafe impl ExtendsBindImageMemoryInfo for BindImageMemoryDeviceGroupInfo {}
+impl<'a> ::std::ops::Deref for BindImageMemoryDeviceGroupInfoBuilder<'a> {
+ type Target = BindImageMemoryDeviceGroupInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindImageMemoryDeviceGroupInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindImageMemoryDeviceGroupInfoBuilder<'a> {
+ #[inline]
+ pub fn device_indices(mut self, device_indices: &'a [u32]) -> Self {
+ self.inner.device_index_count = device_indices.len() as _;
+ self.inner.p_device_indices = device_indices.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn split_instance_bind_regions(
+ mut self,
+ split_instance_bind_regions: &'a [Rect2D],
+ ) -> Self {
+ self.inner.split_instance_bind_region_count = split_instance_bind_regions.len() as _;
+ self.inner.p_split_instance_bind_regions = split_instance_bind_regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindImageMemoryDeviceGroupInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupRenderPassBeginInfo.html>"]
+pub struct DeviceGroupRenderPassBeginInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub device_mask: u32,
+ pub device_render_area_count: u32,
+ pub p_device_render_areas: *const Rect2D,
+}
+impl ::std::default::Default for DeviceGroupRenderPassBeginInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ device_mask: u32::default(),
+ device_render_area_count: u32::default(),
+ p_device_render_areas: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupRenderPassBeginInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
+}
+impl DeviceGroupRenderPassBeginInfo {
+ pub fn builder<'a>() -> DeviceGroupRenderPassBeginInfoBuilder<'a> {
+ DeviceGroupRenderPassBeginInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupRenderPassBeginInfoBuilder<'a> {
+ inner: DeviceGroupRenderPassBeginInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassBeginInfo for DeviceGroupRenderPassBeginInfoBuilder<'_> {}
+unsafe impl ExtendsRenderPassBeginInfo for DeviceGroupRenderPassBeginInfo {}
+unsafe impl ExtendsRenderingInfo for DeviceGroupRenderPassBeginInfoBuilder<'_> {}
+unsafe impl ExtendsRenderingInfo for DeviceGroupRenderPassBeginInfo {}
+impl<'a> ::std::ops::Deref for DeviceGroupRenderPassBeginInfoBuilder<'a> {
+ type Target = DeviceGroupRenderPassBeginInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupRenderPassBeginInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupRenderPassBeginInfoBuilder<'a> {
+ #[inline]
+ pub fn device_mask(mut self, device_mask: u32) -> Self {
+ self.inner.device_mask = device_mask;
+ self
+ }
+ #[inline]
+ pub fn device_render_areas(mut self, device_render_areas: &'a [Rect2D]) -> Self {
+ self.inner.device_render_area_count = device_render_areas.len() as _;
+ self.inner.p_device_render_areas = device_render_areas.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupRenderPassBeginInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupCommandBufferBeginInfo.html>"]
+pub struct DeviceGroupCommandBufferBeginInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub device_mask: u32,
+}
+impl ::std::default::Default for DeviceGroupCommandBufferBeginInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ device_mask: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupCommandBufferBeginInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
+}
+impl DeviceGroupCommandBufferBeginInfo {
+ pub fn builder<'a>() -> DeviceGroupCommandBufferBeginInfoBuilder<'a> {
+ DeviceGroupCommandBufferBeginInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupCommandBufferBeginInfoBuilder<'a> {
+ inner: DeviceGroupCommandBufferBeginInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferBeginInfo for DeviceGroupCommandBufferBeginInfoBuilder<'_> {}
+unsafe impl ExtendsCommandBufferBeginInfo for DeviceGroupCommandBufferBeginInfo {}
+impl<'a> ::std::ops::Deref for DeviceGroupCommandBufferBeginInfoBuilder<'a> {
+ type Target = DeviceGroupCommandBufferBeginInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupCommandBufferBeginInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupCommandBufferBeginInfoBuilder<'a> {
+ #[inline]
+ pub fn device_mask(mut self, device_mask: u32) -> Self {
+ self.inner.device_mask = device_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupCommandBufferBeginInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupSubmitInfo.html>"]
+pub struct DeviceGroupSubmitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub wait_semaphore_count: u32,
+ pub p_wait_semaphore_device_indices: *const u32,
+ pub command_buffer_count: u32,
+ pub p_command_buffer_device_masks: *const u32,
+ pub signal_semaphore_count: u32,
+ pub p_signal_semaphore_device_indices: *const u32,
+}
+impl ::std::default::Default for DeviceGroupSubmitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ wait_semaphore_count: u32::default(),
+ p_wait_semaphore_device_indices: ::std::ptr::null(),
+ command_buffer_count: u32::default(),
+ p_command_buffer_device_masks: ::std::ptr::null(),
+ signal_semaphore_count: u32::default(),
+ p_signal_semaphore_device_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupSubmitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_SUBMIT_INFO;
+}
+impl DeviceGroupSubmitInfo {
+ pub fn builder<'a>() -> DeviceGroupSubmitInfoBuilder<'a> {
+ DeviceGroupSubmitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupSubmitInfoBuilder<'a> {
+ inner: DeviceGroupSubmitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for DeviceGroupSubmitInfoBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for DeviceGroupSubmitInfo {}
+impl<'a> ::std::ops::Deref for DeviceGroupSubmitInfoBuilder<'a> {
+ type Target = DeviceGroupSubmitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupSubmitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupSubmitInfoBuilder<'a> {
+ #[inline]
+ pub fn wait_semaphore_device_indices(
+ mut self,
+ wait_semaphore_device_indices: &'a [u32],
+ ) -> Self {
+ self.inner.wait_semaphore_count = wait_semaphore_device_indices.len() as _;
+ self.inner.p_wait_semaphore_device_indices = wait_semaphore_device_indices.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn command_buffer_device_masks(mut self, command_buffer_device_masks: &'a [u32]) -> Self {
+ self.inner.command_buffer_count = command_buffer_device_masks.len() as _;
+ self.inner.p_command_buffer_device_masks = command_buffer_device_masks.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn signal_semaphore_device_indices(
+ mut self,
+ signal_semaphore_device_indices: &'a [u32],
+ ) -> Self {
+ self.inner.signal_semaphore_count = signal_semaphore_device_indices.len() as _;
+ self.inner.p_signal_semaphore_device_indices = signal_semaphore_device_indices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupSubmitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupBindSparseInfo.html>"]
+pub struct DeviceGroupBindSparseInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub resource_device_index: u32,
+ pub memory_device_index: u32,
+}
+impl ::std::default::Default for DeviceGroupBindSparseInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ resource_device_index: u32::default(),
+ memory_device_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupBindSparseInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_BIND_SPARSE_INFO;
+}
+impl DeviceGroupBindSparseInfo {
+ pub fn builder<'a>() -> DeviceGroupBindSparseInfoBuilder<'a> {
+ DeviceGroupBindSparseInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupBindSparseInfoBuilder<'a> {
+ inner: DeviceGroupBindSparseInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBindSparseInfo for DeviceGroupBindSparseInfoBuilder<'_> {}
+unsafe impl ExtendsBindSparseInfo for DeviceGroupBindSparseInfo {}
+impl<'a> ::std::ops::Deref for DeviceGroupBindSparseInfoBuilder<'a> {
+ type Target = DeviceGroupBindSparseInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupBindSparseInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupBindSparseInfoBuilder<'a> {
+ #[inline]
+ pub fn resource_device_index(mut self, resource_device_index: u32) -> Self {
+ self.inner.resource_device_index = resource_device_index;
+ self
+ }
+ #[inline]
+ pub fn memory_device_index(mut self, memory_device_index: u32) -> Self {
+ self.inner.memory_device_index = memory_device_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupBindSparseInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupPresentCapabilitiesKHR.html>"]
+pub struct DeviceGroupPresentCapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_mask: [u32; MAX_DEVICE_GROUP_SIZE],
+ pub modes: DeviceGroupPresentModeFlagsKHR,
+}
+impl ::std::default::Default for DeviceGroupPresentCapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_mask: unsafe { ::std::mem::zeroed() },
+ modes: DeviceGroupPresentModeFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupPresentCapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_PRESENT_CAPABILITIES_KHR;
+}
+impl DeviceGroupPresentCapabilitiesKHR {
+ pub fn builder<'a>() -> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
+ DeviceGroupPresentCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
+ inner: DeviceGroupPresentCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
+ type Target = DeviceGroupPresentCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupPresentCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn present_mask(mut self, present_mask: [u32; MAX_DEVICE_GROUP_SIZE]) -> Self {
+ self.inner.present_mask = present_mask;
+ self
+ }
+ #[inline]
+ pub fn modes(mut self, modes: DeviceGroupPresentModeFlagsKHR) -> Self {
+ self.inner.modes = modes;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupPresentCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageSwapchainCreateInfoKHR.html>"]
+pub struct ImageSwapchainCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain: SwapchainKHR,
+}
+impl ::std::default::Default for ImageSwapchainCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain: SwapchainKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageSwapchainCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_SWAPCHAIN_CREATE_INFO_KHR;
+}
+impl ImageSwapchainCreateInfoKHR {
+ pub fn builder<'a>() -> ImageSwapchainCreateInfoKHRBuilder<'a> {
+ ImageSwapchainCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageSwapchainCreateInfoKHRBuilder<'a> {
+ inner: ImageSwapchainCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImageSwapchainCreateInfoKHRBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImageSwapchainCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for ImageSwapchainCreateInfoKHRBuilder<'a> {
+ type Target = ImageSwapchainCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageSwapchainCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageSwapchainCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self {
+ self.inner.swapchain = swapchain;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageSwapchainCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindImageMemorySwapchainInfoKHR.html>"]
+pub struct BindImageMemorySwapchainInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain: SwapchainKHR,
+ pub image_index: u32,
+}
+impl ::std::default::Default for BindImageMemorySwapchainInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain: SwapchainKHR::default(),
+ image_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindImageMemorySwapchainInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR;
+}
+impl BindImageMemorySwapchainInfoKHR {
+ pub fn builder<'a>() -> BindImageMemorySwapchainInfoKHRBuilder<'a> {
+ BindImageMemorySwapchainInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindImageMemorySwapchainInfoKHRBuilder<'a> {
+ inner: BindImageMemorySwapchainInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBindImageMemoryInfo for BindImageMemorySwapchainInfoKHRBuilder<'_> {}
+unsafe impl ExtendsBindImageMemoryInfo for BindImageMemorySwapchainInfoKHR {}
+impl<'a> ::std::ops::Deref for BindImageMemorySwapchainInfoKHRBuilder<'a> {
+ type Target = BindImageMemorySwapchainInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindImageMemorySwapchainInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindImageMemorySwapchainInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self {
+ self.inner.swapchain = swapchain;
+ self
+ }
+ #[inline]
+ pub fn image_index(mut self, image_index: u32) -> Self {
+ self.inner.image_index = image_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindImageMemorySwapchainInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAcquireNextImageInfoKHR.html>"]
+pub struct AcquireNextImageInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain: SwapchainKHR,
+ pub timeout: u64,
+ pub semaphore: Semaphore,
+ pub fence: Fence,
+ pub device_mask: u32,
+}
+impl ::std::default::Default for AcquireNextImageInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain: SwapchainKHR::default(),
+ timeout: u64::default(),
+ semaphore: Semaphore::default(),
+ fence: Fence::default(),
+ device_mask: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AcquireNextImageInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACQUIRE_NEXT_IMAGE_INFO_KHR;
+}
+impl AcquireNextImageInfoKHR {
+ pub fn builder<'a>() -> AcquireNextImageInfoKHRBuilder<'a> {
+ AcquireNextImageInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AcquireNextImageInfoKHRBuilder<'a> {
+ inner: AcquireNextImageInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AcquireNextImageInfoKHRBuilder<'a> {
+ type Target = AcquireNextImageInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AcquireNextImageInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AcquireNextImageInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self {
+ self.inner.swapchain = swapchain;
+ self
+ }
+ #[inline]
+ pub fn timeout(mut self, timeout: u64) -> Self {
+ self.inner.timeout = timeout;
+ self
+ }
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn fence(mut self, fence: Fence) -> Self {
+ self.inner.fence = fence;
+ self
+ }
+ #[inline]
+ pub fn device_mask(mut self, device_mask: u32) -> Self {
+ self.inner.device_mask = device_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AcquireNextImageInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupPresentInfoKHR.html>"]
+pub struct DeviceGroupPresentInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain_count: u32,
+ pub p_device_masks: *const u32,
+ pub mode: DeviceGroupPresentModeFlagsKHR,
+}
+impl ::std::default::Default for DeviceGroupPresentInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain_count: u32::default(),
+ p_device_masks: ::std::ptr::null(),
+ mode: DeviceGroupPresentModeFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupPresentInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_PRESENT_INFO_KHR;
+}
+impl DeviceGroupPresentInfoKHR {
+ pub fn builder<'a>() -> DeviceGroupPresentInfoKHRBuilder<'a> {
+ DeviceGroupPresentInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupPresentInfoKHRBuilder<'a> {
+ inner: DeviceGroupPresentInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for DeviceGroupPresentInfoKHRBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for DeviceGroupPresentInfoKHR {}
+impl<'a> ::std::ops::Deref for DeviceGroupPresentInfoKHRBuilder<'a> {
+ type Target = DeviceGroupPresentInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupPresentInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupPresentInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn device_masks(mut self, device_masks: &'a [u32]) -> Self {
+ self.inner.swapchain_count = device_masks.len() as _;
+ self.inner.p_device_masks = device_masks.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: DeviceGroupPresentModeFlagsKHR) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupPresentInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupDeviceCreateInfo.html>"]
+pub struct DeviceGroupDeviceCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub physical_device_count: u32,
+ pub p_physical_devices: *const PhysicalDevice,
+}
+impl ::std::default::Default for DeviceGroupDeviceCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ physical_device_count: u32::default(),
+ p_physical_devices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupDeviceCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_DEVICE_CREATE_INFO;
+}
+impl DeviceGroupDeviceCreateInfo {
+ pub fn builder<'a>() -> DeviceGroupDeviceCreateInfoBuilder<'a> {
+ DeviceGroupDeviceCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupDeviceCreateInfoBuilder<'a> {
+ inner: DeviceGroupDeviceCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceCreateInfo for DeviceGroupDeviceCreateInfoBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for DeviceGroupDeviceCreateInfo {}
+impl<'a> ::std::ops::Deref for DeviceGroupDeviceCreateInfoBuilder<'a> {
+ type Target = DeviceGroupDeviceCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupDeviceCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupDeviceCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn physical_devices(mut self, physical_devices: &'a [PhysicalDevice]) -> Self {
+ self.inner.physical_device_count = physical_devices.len() as _;
+ self.inner.p_physical_devices = physical_devices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupDeviceCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceGroupSwapchainCreateInfoKHR.html>"]
+pub struct DeviceGroupSwapchainCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub modes: DeviceGroupPresentModeFlagsKHR,
+}
+impl ::std::default::Default for DeviceGroupSwapchainCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ modes: DeviceGroupPresentModeFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceGroupSwapchainCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR;
+}
+impl DeviceGroupSwapchainCreateInfoKHR {
+ pub fn builder<'a>() -> DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
+ DeviceGroupSwapchainCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
+ inner: DeviceGroupSwapchainCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSwapchainCreateInfoKHR for DeviceGroupSwapchainCreateInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for DeviceGroupSwapchainCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
+ type Target = DeviceGroupSwapchainCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceGroupSwapchainCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn modes(mut self, modes: DeviceGroupPresentModeFlagsKHR) -> Self {
+ self.inner.modes = modes;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceGroupSwapchainCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorUpdateTemplateEntry.html>"]
+pub struct DescriptorUpdateTemplateEntry {
+ pub dst_binding: u32,
+ pub dst_array_element: u32,
+ pub descriptor_count: u32,
+ pub descriptor_type: DescriptorType,
+ pub offset: usize,
+ pub stride: usize,
+}
+impl DescriptorUpdateTemplateEntry {
+ pub fn builder<'a>() -> DescriptorUpdateTemplateEntryBuilder<'a> {
+ DescriptorUpdateTemplateEntryBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorUpdateTemplateEntryBuilder<'a> {
+ inner: DescriptorUpdateTemplateEntry,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorUpdateTemplateEntryBuilder<'a> {
+ type Target = DescriptorUpdateTemplateEntry;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorUpdateTemplateEntryBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorUpdateTemplateEntryBuilder<'a> {
+ #[inline]
+ pub fn dst_binding(mut self, dst_binding: u32) -> Self {
+ self.inner.dst_binding = dst_binding;
+ self
+ }
+ #[inline]
+ pub fn dst_array_element(mut self, dst_array_element: u32) -> Self {
+ self.inner.dst_array_element = dst_array_element;
+ self
+ }
+ #[inline]
+ pub fn descriptor_count(mut self, descriptor_count: u32) -> Self {
+ self.inner.descriptor_count = descriptor_count;
+ self
+ }
+ #[inline]
+ pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self {
+ self.inner.descriptor_type = descriptor_type;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: usize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: usize) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorUpdateTemplateEntry {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorUpdateTemplateCreateInfo.html>"]
+pub struct DescriptorUpdateTemplateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DescriptorUpdateTemplateCreateFlags,
+ pub descriptor_update_entry_count: u32,
+ pub p_descriptor_update_entries: *const DescriptorUpdateTemplateEntry,
+ pub template_type: DescriptorUpdateTemplateType,
+ pub descriptor_set_layout: DescriptorSetLayout,
+ pub pipeline_bind_point: PipelineBindPoint,
+ pub pipeline_layout: PipelineLayout,
+ pub set: u32,
+}
+impl ::std::default::Default for DescriptorUpdateTemplateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DescriptorUpdateTemplateCreateFlags::default(),
+ descriptor_update_entry_count: u32::default(),
+ p_descriptor_update_entries: ::std::ptr::null(),
+ template_type: DescriptorUpdateTemplateType::default(),
+ descriptor_set_layout: DescriptorSetLayout::default(),
+ pipeline_bind_point: PipelineBindPoint::default(),
+ pipeline_layout: PipelineLayout::default(),
+ set: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorUpdateTemplateCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
+}
+impl DescriptorUpdateTemplateCreateInfo {
+ pub fn builder<'a>() -> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
+ DescriptorUpdateTemplateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorUpdateTemplateCreateInfoBuilder<'a> {
+ inner: DescriptorUpdateTemplateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorUpdateTemplateCreateInfoBuilder<'a> {
+ type Target = DescriptorUpdateTemplateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorUpdateTemplateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorUpdateTemplateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DescriptorUpdateTemplateCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn descriptor_update_entries(
+ mut self,
+ descriptor_update_entries: &'a [DescriptorUpdateTemplateEntry],
+ ) -> Self {
+ self.inner.descriptor_update_entry_count = descriptor_update_entries.len() as _;
+ self.inner.p_descriptor_update_entries = descriptor_update_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn template_type(mut self, template_type: DescriptorUpdateTemplateType) -> Self {
+ self.inner.template_type = template_type;
+ self
+ }
+ #[inline]
+ pub fn descriptor_set_layout(mut self, descriptor_set_layout: DescriptorSetLayout) -> Self {
+ self.inner.descriptor_set_layout = descriptor_set_layout;
+ self
+ }
+ #[inline]
+ pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self {
+ self.inner.pipeline_bind_point = pipeline_bind_point;
+ self
+ }
+ #[inline]
+ pub fn pipeline_layout(mut self, pipeline_layout: PipelineLayout) -> Self {
+ self.inner.pipeline_layout = pipeline_layout;
+ self
+ }
+ #[inline]
+ pub fn set(mut self, set: u32) -> Self {
+ self.inner.set = set;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorUpdateTemplateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkXYColorEXT.html>"]
+pub struct XYColorEXT {
+ pub x: f32,
+ pub y: f32,
+}
+impl XYColorEXT {
+ pub fn builder<'a>() -> XYColorEXTBuilder<'a> {
+ XYColorEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct XYColorEXTBuilder<'a> {
+ inner: XYColorEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for XYColorEXTBuilder<'a> {
+ type Target = XYColorEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for XYColorEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> XYColorEXTBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: f32) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: f32) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> XYColorEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePresentIdFeaturesKHR.html>"]
+pub struct PhysicalDevicePresentIdFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_id: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePresentIdFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_id: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePresentIdFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR;
+}
+impl PhysicalDevicePresentIdFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePresentIdFeaturesKHRBuilder<'a> {
+ PhysicalDevicePresentIdFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePresentIdFeaturesKHRBuilder<'a> {
+ inner: PhysicalDevicePresentIdFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentIdFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentIdFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentIdFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentIdFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePresentIdFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDevicePresentIdFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePresentIdFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePresentIdFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn present_id(mut self, present_id: bool) -> Self {
+ self.inner.present_id = present_id.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePresentIdFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentIdKHR.html>"]
+pub struct PresentIdKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain_count: u32,
+ pub p_present_ids: *const u64,
+}
+impl ::std::default::Default for PresentIdKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain_count: u32::default(),
+ p_present_ids: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PresentIdKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_ID_KHR;
+}
+impl PresentIdKHR {
+ pub fn builder<'a>() -> PresentIdKHRBuilder<'a> {
+ PresentIdKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentIdKHRBuilder<'a> {
+ inner: PresentIdKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for PresentIdKHRBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for PresentIdKHR {}
+impl<'a> ::std::ops::Deref for PresentIdKHRBuilder<'a> {
+ type Target = PresentIdKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentIdKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentIdKHRBuilder<'a> {
+ #[inline]
+ pub fn present_ids(mut self, present_ids: &'a [u64]) -> Self {
+ self.inner.swapchain_count = present_ids.len() as _;
+ self.inner.p_present_ids = present_ids.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentIdKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePresentWaitFeaturesKHR.html>"]
+pub struct PhysicalDevicePresentWaitFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_wait: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePresentWaitFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_wait: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePresentWaitFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR;
+}
+impl PhysicalDevicePresentWaitFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePresentWaitFeaturesKHRBuilder<'a> {
+ PhysicalDevicePresentWaitFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePresentWaitFeaturesKHRBuilder<'a> {
+ inner: PhysicalDevicePresentWaitFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentWaitFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentWaitFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentWaitFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentWaitFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePresentWaitFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDevicePresentWaitFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePresentWaitFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePresentWaitFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn present_wait(mut self, present_wait: bool) -> Self {
+ self.inner.present_wait = present_wait.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePresentWaitFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkHdrMetadataEXT.html>"]
+pub struct HdrMetadataEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub display_primary_red: XYColorEXT,
+ pub display_primary_green: XYColorEXT,
+ pub display_primary_blue: XYColorEXT,
+ pub white_point: XYColorEXT,
+ pub max_luminance: f32,
+ pub min_luminance: f32,
+ pub max_content_light_level: f32,
+ pub max_frame_average_light_level: f32,
+}
+impl ::std::default::Default for HdrMetadataEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ display_primary_red: XYColorEXT::default(),
+ display_primary_green: XYColorEXT::default(),
+ display_primary_blue: XYColorEXT::default(),
+ white_point: XYColorEXT::default(),
+ max_luminance: f32::default(),
+ min_luminance: f32::default(),
+ max_content_light_level: f32::default(),
+ max_frame_average_light_level: f32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for HdrMetadataEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::HDR_METADATA_EXT;
+}
+impl HdrMetadataEXT {
+ pub fn builder<'a>() -> HdrMetadataEXTBuilder<'a> {
+ HdrMetadataEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct HdrMetadataEXTBuilder<'a> {
+ inner: HdrMetadataEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for HdrMetadataEXTBuilder<'a> {
+ type Target = HdrMetadataEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for HdrMetadataEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> HdrMetadataEXTBuilder<'a> {
+ #[inline]
+ pub fn display_primary_red(mut self, display_primary_red: XYColorEXT) -> Self {
+ self.inner.display_primary_red = display_primary_red;
+ self
+ }
+ #[inline]
+ pub fn display_primary_green(mut self, display_primary_green: XYColorEXT) -> Self {
+ self.inner.display_primary_green = display_primary_green;
+ self
+ }
+ #[inline]
+ pub fn display_primary_blue(mut self, display_primary_blue: XYColorEXT) -> Self {
+ self.inner.display_primary_blue = display_primary_blue;
+ self
+ }
+ #[inline]
+ pub fn white_point(mut self, white_point: XYColorEXT) -> Self {
+ self.inner.white_point = white_point;
+ self
+ }
+ #[inline]
+ pub fn max_luminance(mut self, max_luminance: f32) -> Self {
+ self.inner.max_luminance = max_luminance;
+ self
+ }
+ #[inline]
+ pub fn min_luminance(mut self, min_luminance: f32) -> Self {
+ self.inner.min_luminance = min_luminance;
+ self
+ }
+ #[inline]
+ pub fn max_content_light_level(mut self, max_content_light_level: f32) -> Self {
+ self.inner.max_content_light_level = max_content_light_level;
+ self
+ }
+ #[inline]
+ pub fn max_frame_average_light_level(mut self, max_frame_average_light_level: f32) -> Self {
+ self.inner.max_frame_average_light_level = max_frame_average_light_level;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> HdrMetadataEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayNativeHdrSurfaceCapabilitiesAMD.html>"]
+pub struct DisplayNativeHdrSurfaceCapabilitiesAMD {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub local_dimming_support: Bool32,
+}
+impl ::std::default::Default for DisplayNativeHdrSurfaceCapabilitiesAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ local_dimming_support: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayNativeHdrSurfaceCapabilitiesAMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD;
+}
+impl DisplayNativeHdrSurfaceCapabilitiesAMD {
+ pub fn builder<'a>() -> DisplayNativeHdrSurfaceCapabilitiesAMDBuilder<'a> {
+ DisplayNativeHdrSurfaceCapabilitiesAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayNativeHdrSurfaceCapabilitiesAMDBuilder<'a> {
+ inner: DisplayNativeHdrSurfaceCapabilitiesAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for DisplayNativeHdrSurfaceCapabilitiesAMDBuilder<'_> {}
+unsafe impl ExtendsSurfaceCapabilities2KHR for DisplayNativeHdrSurfaceCapabilitiesAMD {}
+impl<'a> ::std::ops::Deref for DisplayNativeHdrSurfaceCapabilitiesAMDBuilder<'a> {
+ type Target = DisplayNativeHdrSurfaceCapabilitiesAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayNativeHdrSurfaceCapabilitiesAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayNativeHdrSurfaceCapabilitiesAMDBuilder<'a> {
+ #[inline]
+ pub fn local_dimming_support(mut self, local_dimming_support: bool) -> Self {
+ self.inner.local_dimming_support = local_dimming_support.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayNativeHdrSurfaceCapabilitiesAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainDisplayNativeHdrCreateInfoAMD.html>"]
+pub struct SwapchainDisplayNativeHdrCreateInfoAMD {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub local_dimming_enable: Bool32,
+}
+impl ::std::default::Default for SwapchainDisplayNativeHdrCreateInfoAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ local_dimming_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainDisplayNativeHdrCreateInfoAMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD;
+}
+impl SwapchainDisplayNativeHdrCreateInfoAMD {
+ pub fn builder<'a>() -> SwapchainDisplayNativeHdrCreateInfoAMDBuilder<'a> {
+ SwapchainDisplayNativeHdrCreateInfoAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainDisplayNativeHdrCreateInfoAMDBuilder<'a> {
+ inner: SwapchainDisplayNativeHdrCreateInfoAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainDisplayNativeHdrCreateInfoAMDBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainDisplayNativeHdrCreateInfoAMD {}
+impl<'a> ::std::ops::Deref for SwapchainDisplayNativeHdrCreateInfoAMDBuilder<'a> {
+ type Target = SwapchainDisplayNativeHdrCreateInfoAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainDisplayNativeHdrCreateInfoAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainDisplayNativeHdrCreateInfoAMDBuilder<'a> {
+ #[inline]
+ pub fn local_dimming_enable(mut self, local_dimming_enable: bool) -> Self {
+ self.inner.local_dimming_enable = local_dimming_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainDisplayNativeHdrCreateInfoAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRefreshCycleDurationGOOGLE.html>"]
+pub struct RefreshCycleDurationGOOGLE {
+ pub refresh_duration: u64,
+}
+impl RefreshCycleDurationGOOGLE {
+ pub fn builder<'a>() -> RefreshCycleDurationGOOGLEBuilder<'a> {
+ RefreshCycleDurationGOOGLEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RefreshCycleDurationGOOGLEBuilder<'a> {
+ inner: RefreshCycleDurationGOOGLE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RefreshCycleDurationGOOGLEBuilder<'a> {
+ type Target = RefreshCycleDurationGOOGLE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RefreshCycleDurationGOOGLEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RefreshCycleDurationGOOGLEBuilder<'a> {
+ #[inline]
+ pub fn refresh_duration(mut self, refresh_duration: u64) -> Self {
+ self.inner.refresh_duration = refresh_duration;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RefreshCycleDurationGOOGLE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPastPresentationTimingGOOGLE.html>"]
+pub struct PastPresentationTimingGOOGLE {
+ pub present_id: u32,
+ pub desired_present_time: u64,
+ pub actual_present_time: u64,
+ pub earliest_present_time: u64,
+ pub present_margin: u64,
+}
+impl PastPresentationTimingGOOGLE {
+ pub fn builder<'a>() -> PastPresentationTimingGOOGLEBuilder<'a> {
+ PastPresentationTimingGOOGLEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PastPresentationTimingGOOGLEBuilder<'a> {
+ inner: PastPresentationTimingGOOGLE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PastPresentationTimingGOOGLEBuilder<'a> {
+ type Target = PastPresentationTimingGOOGLE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PastPresentationTimingGOOGLEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PastPresentationTimingGOOGLEBuilder<'a> {
+ #[inline]
+ pub fn present_id(mut self, present_id: u32) -> Self {
+ self.inner.present_id = present_id;
+ self
+ }
+ #[inline]
+ pub fn desired_present_time(mut self, desired_present_time: u64) -> Self {
+ self.inner.desired_present_time = desired_present_time;
+ self
+ }
+ #[inline]
+ pub fn actual_present_time(mut self, actual_present_time: u64) -> Self {
+ self.inner.actual_present_time = actual_present_time;
+ self
+ }
+ #[inline]
+ pub fn earliest_present_time(mut self, earliest_present_time: u64) -> Self {
+ self.inner.earliest_present_time = earliest_present_time;
+ self
+ }
+ #[inline]
+ pub fn present_margin(mut self, present_margin: u64) -> Self {
+ self.inner.present_margin = present_margin;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PastPresentationTimingGOOGLE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentTimesInfoGOOGLE.html>"]
+pub struct PresentTimesInfoGOOGLE {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain_count: u32,
+ pub p_times: *const PresentTimeGOOGLE,
+}
+impl ::std::default::Default for PresentTimesInfoGOOGLE {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain_count: u32::default(),
+ p_times: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PresentTimesInfoGOOGLE {
+ const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_TIMES_INFO_GOOGLE;
+}
+impl PresentTimesInfoGOOGLE {
+ pub fn builder<'a>() -> PresentTimesInfoGOOGLEBuilder<'a> {
+ PresentTimesInfoGOOGLEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentTimesInfoGOOGLEBuilder<'a> {
+ inner: PresentTimesInfoGOOGLE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for PresentTimesInfoGOOGLEBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for PresentTimesInfoGOOGLE {}
+impl<'a> ::std::ops::Deref for PresentTimesInfoGOOGLEBuilder<'a> {
+ type Target = PresentTimesInfoGOOGLE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentTimesInfoGOOGLEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentTimesInfoGOOGLEBuilder<'a> {
+ #[inline]
+ pub fn times(mut self, times: &'a [PresentTimeGOOGLE]) -> Self {
+ self.inner.swapchain_count = times.len() as _;
+ self.inner.p_times = times.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentTimesInfoGOOGLE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentTimeGOOGLE.html>"]
+pub struct PresentTimeGOOGLE {
+ pub present_id: u32,
+ pub desired_present_time: u64,
+}
+impl PresentTimeGOOGLE {
+ pub fn builder<'a>() -> PresentTimeGOOGLEBuilder<'a> {
+ PresentTimeGOOGLEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentTimeGOOGLEBuilder<'a> {
+ inner: PresentTimeGOOGLE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PresentTimeGOOGLEBuilder<'a> {
+ type Target = PresentTimeGOOGLE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentTimeGOOGLEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentTimeGOOGLEBuilder<'a> {
+ #[inline]
+ pub fn present_id(mut self, present_id: u32) -> Self {
+ self.inner.present_id = present_id;
+ self
+ }
+ #[inline]
+ pub fn desired_present_time(mut self, desired_present_time: u64) -> Self {
+ self.inner.desired_present_time = desired_present_time;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentTimeGOOGLE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIOSSurfaceCreateInfoMVK.html>"]
+pub struct IOSSurfaceCreateInfoMVK {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: IOSSurfaceCreateFlagsMVK,
+ pub p_view: *const c_void,
+}
+impl ::std::default::Default for IOSSurfaceCreateInfoMVK {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: IOSSurfaceCreateFlagsMVK::default(),
+ p_view: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for IOSSurfaceCreateInfoMVK {
+ const STRUCTURE_TYPE: StructureType = StructureType::IOS_SURFACE_CREATE_INFO_MVK;
+}
+impl IOSSurfaceCreateInfoMVK {
+ pub fn builder<'a>() -> IOSSurfaceCreateInfoMVKBuilder<'a> {
+ IOSSurfaceCreateInfoMVKBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct IOSSurfaceCreateInfoMVKBuilder<'a> {
+ inner: IOSSurfaceCreateInfoMVK,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for IOSSurfaceCreateInfoMVKBuilder<'a> {
+ type Target = IOSSurfaceCreateInfoMVK;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for IOSSurfaceCreateInfoMVKBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> IOSSurfaceCreateInfoMVKBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: IOSSurfaceCreateFlagsMVK) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn view(mut self, view: *const c_void) -> Self {
+ self.inner.p_view = view;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> IOSSurfaceCreateInfoMVK {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMacOSSurfaceCreateInfoMVK.html>"]
+pub struct MacOSSurfaceCreateInfoMVK {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: MacOSSurfaceCreateFlagsMVK,
+ pub p_view: *const c_void,
+}
+impl ::std::default::Default for MacOSSurfaceCreateInfoMVK {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: MacOSSurfaceCreateFlagsMVK::default(),
+ p_view: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MacOSSurfaceCreateInfoMVK {
+ const STRUCTURE_TYPE: StructureType = StructureType::MACOS_SURFACE_CREATE_INFO_MVK;
+}
+impl MacOSSurfaceCreateInfoMVK {
+ pub fn builder<'a>() -> MacOSSurfaceCreateInfoMVKBuilder<'a> {
+ MacOSSurfaceCreateInfoMVKBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MacOSSurfaceCreateInfoMVKBuilder<'a> {
+ inner: MacOSSurfaceCreateInfoMVK,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MacOSSurfaceCreateInfoMVKBuilder<'a> {
+ type Target = MacOSSurfaceCreateInfoMVK;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MacOSSurfaceCreateInfoMVKBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MacOSSurfaceCreateInfoMVKBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: MacOSSurfaceCreateFlagsMVK) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn view(mut self, view: *const c_void) -> Self {
+ self.inner.p_view = view;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MacOSSurfaceCreateInfoMVK {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMetalSurfaceCreateInfoEXT.html>"]
+pub struct MetalSurfaceCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: MetalSurfaceCreateFlagsEXT,
+ pub p_layer: *const CAMetalLayer,
+}
+impl ::std::default::Default for MetalSurfaceCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: MetalSurfaceCreateFlagsEXT::default(),
+ p_layer: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MetalSurfaceCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::METAL_SURFACE_CREATE_INFO_EXT;
+}
+impl MetalSurfaceCreateInfoEXT {
+ pub fn builder<'a>() -> MetalSurfaceCreateInfoEXTBuilder<'a> {
+ MetalSurfaceCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MetalSurfaceCreateInfoEXTBuilder<'a> {
+ inner: MetalSurfaceCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MetalSurfaceCreateInfoEXTBuilder<'a> {
+ type Target = MetalSurfaceCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MetalSurfaceCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MetalSurfaceCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: MetalSurfaceCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn layer(mut self, layer: *const CAMetalLayer) -> Self {
+ self.inner.p_layer = layer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MetalSurfaceCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkViewportWScalingNV.html>"]
+pub struct ViewportWScalingNV {
+ pub xcoeff: f32,
+ pub ycoeff: f32,
+}
+impl ViewportWScalingNV {
+ pub fn builder<'a>() -> ViewportWScalingNVBuilder<'a> {
+ ViewportWScalingNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ViewportWScalingNVBuilder<'a> {
+ inner: ViewportWScalingNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ViewportWScalingNVBuilder<'a> {
+ type Target = ViewportWScalingNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ViewportWScalingNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ViewportWScalingNVBuilder<'a> {
+ #[inline]
+ pub fn xcoeff(mut self, xcoeff: f32) -> Self {
+ self.inner.xcoeff = xcoeff;
+ self
+ }
+ #[inline]
+ pub fn ycoeff(mut self, ycoeff: f32) -> Self {
+ self.inner.ycoeff = ycoeff;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ViewportWScalingNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportWScalingStateCreateInfoNV.html>"]
+pub struct PipelineViewportWScalingStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub viewport_w_scaling_enable: Bool32,
+ pub viewport_count: u32,
+ pub p_viewport_w_scalings: *const ViewportWScalingNV,
+}
+impl ::std::default::Default for PipelineViewportWScalingStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ viewport_w_scaling_enable: Bool32::default(),
+ viewport_count: u32::default(),
+ p_viewport_w_scalings: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportWScalingStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV;
+}
+impl PipelineViewportWScalingStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
+ PipelineViewportWScalingStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
+ inner: PipelineViewportWScalingStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportWScalingStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportWScalingStateCreateInfoNV {}
+impl<'a> ::std::ops::Deref for PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineViewportWScalingStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportWScalingStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn viewport_w_scaling_enable(mut self, viewport_w_scaling_enable: bool) -> Self {
+ self.inner.viewport_w_scaling_enable = viewport_w_scaling_enable.into();
+ self
+ }
+ #[inline]
+ pub fn viewport_w_scalings(mut self, viewport_w_scalings: &'a [ViewportWScalingNV]) -> Self {
+ self.inner.viewport_count = viewport_w_scalings.len() as _;
+ self.inner.p_viewport_w_scalings = viewport_w_scalings.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportWScalingStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkViewportSwizzleNV.html>"]
+pub struct ViewportSwizzleNV {
+ pub x: ViewportCoordinateSwizzleNV,
+ pub y: ViewportCoordinateSwizzleNV,
+ pub z: ViewportCoordinateSwizzleNV,
+ pub w: ViewportCoordinateSwizzleNV,
+}
+impl ViewportSwizzleNV {
+ pub fn builder<'a>() -> ViewportSwizzleNVBuilder<'a> {
+ ViewportSwizzleNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ViewportSwizzleNVBuilder<'a> {
+ inner: ViewportSwizzleNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ViewportSwizzleNVBuilder<'a> {
+ type Target = ViewportSwizzleNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ViewportSwizzleNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ViewportSwizzleNVBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: ViewportCoordinateSwizzleNV) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: ViewportCoordinateSwizzleNV) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[inline]
+ pub fn z(mut self, z: ViewportCoordinateSwizzleNV) -> Self {
+ self.inner.z = z;
+ self
+ }
+ #[inline]
+ pub fn w(mut self, w: ViewportCoordinateSwizzleNV) -> Self {
+ self.inner.w = w;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ViewportSwizzleNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportSwizzleStateCreateInfoNV.html>"]
+pub struct PipelineViewportSwizzleStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineViewportSwizzleStateCreateFlagsNV,
+ pub viewport_count: u32,
+ pub p_viewport_swizzles: *const ViewportSwizzleNV,
+}
+impl ::std::default::Default for PipelineViewportSwizzleStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineViewportSwizzleStateCreateFlagsNV::default(),
+ viewport_count: u32::default(),
+ p_viewport_swizzles: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportSwizzleStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV;
+}
+impl PipelineViewportSwizzleStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
+ PipelineViewportSwizzleStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
+ inner: PipelineViewportSwizzleStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportSwizzleStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo for PipelineViewportSwizzleStateCreateInfoNV {}
+impl<'a> ::std::ops::Deref for PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineViewportSwizzleStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportSwizzleStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineViewportSwizzleStateCreateFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn viewport_swizzles(mut self, viewport_swizzles: &'a [ViewportSwizzleNV]) -> Self {
+ self.inner.viewport_count = viewport_swizzles.len() as _;
+ self.inner.p_viewport_swizzles = viewport_swizzles.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportSwizzleStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDiscardRectanglePropertiesEXT.html>"]
+pub struct PhysicalDeviceDiscardRectanglePropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_discard_rectangles: u32,
+}
+impl ::std::default::Default for PhysicalDeviceDiscardRectanglePropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_discard_rectangles: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDiscardRectanglePropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT;
+}
+impl PhysicalDeviceDiscardRectanglePropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
+ PhysicalDeviceDiscardRectanglePropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceDiscardRectanglePropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDiscardRectanglePropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDiscardRectanglePropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDiscardRectanglePropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_discard_rectangles(mut self, max_discard_rectangles: u32) -> Self {
+ self.inner.max_discard_rectangles = max_discard_rectangles;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDiscardRectanglePropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineDiscardRectangleStateCreateInfoEXT.html>"]
+pub struct PipelineDiscardRectangleStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineDiscardRectangleStateCreateFlagsEXT,
+ pub discard_rectangle_mode: DiscardRectangleModeEXT,
+ pub discard_rectangle_count: u32,
+ pub p_discard_rectangles: *const Rect2D,
+}
+impl ::std::default::Default for PipelineDiscardRectangleStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineDiscardRectangleStateCreateFlagsEXT::default(),
+ discard_rectangle_mode: DiscardRectangleModeEXT::default(),
+ discard_rectangle_count: u32::default(),
+ p_discard_rectangles: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineDiscardRectangleStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT;
+}
+impl PipelineDiscardRectangleStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
+ PipelineDiscardRectangleStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineDiscardRectangleStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo
+ for PipelineDiscardRectangleStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineDiscardRectangleStateCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineDiscardRectangleStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineDiscardRectangleStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineDiscardRectangleStateCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn discard_rectangle_mode(
+ mut self,
+ discard_rectangle_mode: DiscardRectangleModeEXT,
+ ) -> Self {
+ self.inner.discard_rectangle_mode = discard_rectangle_mode;
+ self
+ }
+ #[inline]
+ pub fn discard_rectangles(mut self, discard_rectangles: &'a [Rect2D]) -> Self {
+ self.inner.discard_rectangle_count = discard_rectangles.len() as _;
+ self.inner.p_discard_rectangles = discard_rectangles.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineDiscardRectangleStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX.html>"]
+pub struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub per_view_position_all_components: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ per_view_position_all_components: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX;
+}
+impl PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+ pub fn builder<'a>() -> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
+ PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
+ inner: PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
+ type Target = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVXBuilder<'a> {
+ #[inline]
+ pub fn per_view_position_all_components(
+ mut self,
+ per_view_position_all_components: bool,
+ ) -> Self {
+ self.inner.per_view_position_all_components = per_view_position_all_components.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInputAttachmentAspectReference.html>"]
+pub struct InputAttachmentAspectReference {
+ pub subpass: u32,
+ pub input_attachment_index: u32,
+ pub aspect_mask: ImageAspectFlags,
+}
+impl InputAttachmentAspectReference {
+ pub fn builder<'a>() -> InputAttachmentAspectReferenceBuilder<'a> {
+ InputAttachmentAspectReferenceBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct InputAttachmentAspectReferenceBuilder<'a> {
+ inner: InputAttachmentAspectReference,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for InputAttachmentAspectReferenceBuilder<'a> {
+ type Target = InputAttachmentAspectReference;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for InputAttachmentAspectReferenceBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> InputAttachmentAspectReferenceBuilder<'a> {
+ #[inline]
+ pub fn subpass(mut self, subpass: u32) -> Self {
+ self.inner.subpass = subpass;
+ self
+ }
+ #[inline]
+ pub fn input_attachment_index(mut self, input_attachment_index: u32) -> Self {
+ self.inner.input_attachment_index = input_attachment_index;
+ self
+ }
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> InputAttachmentAspectReference {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassInputAttachmentAspectCreateInfo.html>"]
+pub struct RenderPassInputAttachmentAspectCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub aspect_reference_count: u32,
+ pub p_aspect_references: *const InputAttachmentAspectReference,
+}
+impl ::std::default::Default for RenderPassInputAttachmentAspectCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ aspect_reference_count: u32::default(),
+ p_aspect_references: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassInputAttachmentAspectCreateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO;
+}
+impl RenderPassInputAttachmentAspectCreateInfo {
+ pub fn builder<'a>() -> RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
+ RenderPassInputAttachmentAspectCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
+ inner: RenderPassInputAttachmentAspectCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassCreateInfo for RenderPassInputAttachmentAspectCreateInfoBuilder<'_> {}
+unsafe impl ExtendsRenderPassCreateInfo for RenderPassInputAttachmentAspectCreateInfo {}
+impl<'a> ::std::ops::Deref for RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
+ type Target = RenderPassInputAttachmentAspectCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassInputAttachmentAspectCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn aspect_references(
+ mut self,
+ aspect_references: &'a [InputAttachmentAspectReference],
+ ) -> Self {
+ self.inner.aspect_reference_count = aspect_references.len() as _;
+ self.inner.p_aspect_references = aspect_references.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassInputAttachmentAspectCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSurfaceInfo2KHR.html>"]
+pub struct PhysicalDeviceSurfaceInfo2KHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub surface: SurfaceKHR,
+}
+impl ::std::default::Default for PhysicalDeviceSurfaceInfo2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ surface: SurfaceKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSurfaceInfo2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SURFACE_INFO_2_KHR;
+}
+impl PhysicalDeviceSurfaceInfo2KHR {
+ pub fn builder<'a>() -> PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
+ PhysicalDeviceSurfaceInfo2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
+ inner: PhysicalDeviceSurfaceInfo2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceSurfaceInfo2KHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
+ type Target = PhysicalDeviceSurfaceInfo2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSurfaceInfo2KHRBuilder<'a> {
+ #[inline]
+ pub fn surface(mut self, surface: SurfaceKHR) -> Self {
+ self.inner.surface = surface;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceSurfaceInfo2KHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSurfaceInfo2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilities2KHR.html>"]
+pub struct SurfaceCapabilities2KHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub surface_capabilities: SurfaceCapabilitiesKHR,
+}
+impl ::std::default::Default for SurfaceCapabilities2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ surface_capabilities: SurfaceCapabilitiesKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceCapabilities2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_2_KHR;
+}
+impl SurfaceCapabilities2KHR {
+ pub fn builder<'a>() -> SurfaceCapabilities2KHRBuilder<'a> {
+ SurfaceCapabilities2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceCapabilities2KHRBuilder<'a> {
+ inner: SurfaceCapabilities2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSurfaceCapabilities2KHR {}
+impl<'a> ::std::ops::Deref for SurfaceCapabilities2KHRBuilder<'a> {
+ type Target = SurfaceCapabilities2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceCapabilities2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceCapabilities2KHRBuilder<'a> {
+ #[inline]
+ pub fn surface_capabilities(mut self, surface_capabilities: SurfaceCapabilitiesKHR) -> Self {
+ self.inner.surface_capabilities = surface_capabilities;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSurfaceCapabilities2KHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceCapabilities2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceFormat2KHR.html>"]
+pub struct SurfaceFormat2KHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub surface_format: SurfaceFormatKHR,
+}
+impl ::std::default::Default for SurfaceFormat2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ surface_format: SurfaceFormatKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceFormat2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_FORMAT_2_KHR;
+}
+impl SurfaceFormat2KHR {
+ pub fn builder<'a>() -> SurfaceFormat2KHRBuilder<'a> {
+ SurfaceFormat2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceFormat2KHRBuilder<'a> {
+ inner: SurfaceFormat2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSurfaceFormat2KHR {}
+impl<'a> ::std::ops::Deref for SurfaceFormat2KHRBuilder<'a> {
+ type Target = SurfaceFormat2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceFormat2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceFormat2KHRBuilder<'a> {
+ #[inline]
+ pub fn surface_format(mut self, surface_format: SurfaceFormatKHR) -> Self {
+ self.inner.surface_format = surface_format;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSurfaceFormat2KHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceFormat2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayProperties2KHR.html>"]
+pub struct DisplayProperties2KHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub display_properties: DisplayPropertiesKHR,
+}
+impl ::std::default::Default for DisplayProperties2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ display_properties: DisplayPropertiesKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayProperties2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PROPERTIES_2_KHR;
+}
+impl DisplayProperties2KHR {
+ pub fn builder<'a>() -> DisplayProperties2KHRBuilder<'a> {
+ DisplayProperties2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayProperties2KHRBuilder<'a> {
+ inner: DisplayProperties2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayProperties2KHRBuilder<'a> {
+ type Target = DisplayProperties2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayProperties2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayProperties2KHRBuilder<'a> {
+ #[inline]
+ pub fn display_properties(mut self, display_properties: DisplayPropertiesKHR) -> Self {
+ self.inner.display_properties = display_properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayProperties2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPlaneProperties2KHR.html>"]
+pub struct DisplayPlaneProperties2KHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub display_plane_properties: DisplayPlanePropertiesKHR,
+}
+impl ::std::default::Default for DisplayPlaneProperties2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ display_plane_properties: DisplayPlanePropertiesKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayPlaneProperties2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PLANE_PROPERTIES_2_KHR;
+}
+impl DisplayPlaneProperties2KHR {
+ pub fn builder<'a>() -> DisplayPlaneProperties2KHRBuilder<'a> {
+ DisplayPlaneProperties2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPlaneProperties2KHRBuilder<'a> {
+ inner: DisplayPlaneProperties2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPlaneProperties2KHRBuilder<'a> {
+ type Target = DisplayPlaneProperties2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPlaneProperties2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPlaneProperties2KHRBuilder<'a> {
+ #[inline]
+ pub fn display_plane_properties(
+ mut self,
+ display_plane_properties: DisplayPlanePropertiesKHR,
+ ) -> Self {
+ self.inner.display_plane_properties = display_plane_properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPlaneProperties2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayModeProperties2KHR.html>"]
+pub struct DisplayModeProperties2KHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub display_mode_properties: DisplayModePropertiesKHR,
+}
+impl ::std::default::Default for DisplayModeProperties2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ display_mode_properties: DisplayModePropertiesKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayModeProperties2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_MODE_PROPERTIES_2_KHR;
+}
+impl DisplayModeProperties2KHR {
+ pub fn builder<'a>() -> DisplayModeProperties2KHRBuilder<'a> {
+ DisplayModeProperties2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayModeProperties2KHRBuilder<'a> {
+ inner: DisplayModeProperties2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayModeProperties2KHRBuilder<'a> {
+ type Target = DisplayModeProperties2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayModeProperties2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayModeProperties2KHRBuilder<'a> {
+ #[inline]
+ pub fn display_mode_properties(
+ mut self,
+ display_mode_properties: DisplayModePropertiesKHR,
+ ) -> Self {
+ self.inner.display_mode_properties = display_mode_properties;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayModeProperties2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPlaneInfo2KHR.html>"]
+pub struct DisplayPlaneInfo2KHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub mode: DisplayModeKHR,
+ pub plane_index: u32,
+}
+impl ::std::default::Default for DisplayPlaneInfo2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ mode: DisplayModeKHR::default(),
+ plane_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayPlaneInfo2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PLANE_INFO_2_KHR;
+}
+impl DisplayPlaneInfo2KHR {
+ pub fn builder<'a>() -> DisplayPlaneInfo2KHRBuilder<'a> {
+ DisplayPlaneInfo2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPlaneInfo2KHRBuilder<'a> {
+ inner: DisplayPlaneInfo2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPlaneInfo2KHRBuilder<'a> {
+ type Target = DisplayPlaneInfo2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPlaneInfo2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPlaneInfo2KHRBuilder<'a> {
+ #[inline]
+ pub fn mode(mut self, mode: DisplayModeKHR) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[inline]
+ pub fn plane_index(mut self, plane_index: u32) -> Self {
+ self.inner.plane_index = plane_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPlaneInfo2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPlaneCapabilities2KHR.html>"]
+pub struct DisplayPlaneCapabilities2KHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub capabilities: DisplayPlaneCapabilitiesKHR,
+}
+impl ::std::default::Default for DisplayPlaneCapabilities2KHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ capabilities: DisplayPlaneCapabilitiesKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DisplayPlaneCapabilities2KHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::DISPLAY_PLANE_CAPABILITIES_2_KHR;
+}
+impl DisplayPlaneCapabilities2KHR {
+ pub fn builder<'a>() -> DisplayPlaneCapabilities2KHRBuilder<'a> {
+ DisplayPlaneCapabilities2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DisplayPlaneCapabilities2KHRBuilder<'a> {
+ inner: DisplayPlaneCapabilities2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DisplayPlaneCapabilities2KHRBuilder<'a> {
+ type Target = DisplayPlaneCapabilities2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DisplayPlaneCapabilities2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DisplayPlaneCapabilities2KHRBuilder<'a> {
+ #[inline]
+ pub fn capabilities(mut self, capabilities: DisplayPlaneCapabilitiesKHR) -> Self {
+ self.inner.capabilities = capabilities;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DisplayPlaneCapabilities2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSharedPresentSurfaceCapabilitiesKHR.html>"]
+pub struct SharedPresentSurfaceCapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shared_present_supported_usage_flags: ImageUsageFlags,
+}
+impl ::std::default::Default for SharedPresentSurfaceCapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shared_present_supported_usage_flags: ImageUsageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SharedPresentSurfaceCapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SHARED_PRESENT_SURFACE_CAPABILITIES_KHR;
+}
+impl SharedPresentSurfaceCapabilitiesKHR {
+ pub fn builder<'a>() -> SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
+ SharedPresentSurfaceCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
+ inner: SharedPresentSurfaceCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SharedPresentSurfaceCapabilitiesKHRBuilder<'_> {}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SharedPresentSurfaceCapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
+ type Target = SharedPresentSurfaceCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SharedPresentSurfaceCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn shared_present_supported_usage_flags(
+ mut self,
+ shared_present_supported_usage_flags: ImageUsageFlags,
+ ) -> Self {
+ self.inner.shared_present_supported_usage_flags = shared_present_supported_usage_flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SharedPresentSurfaceCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevice16BitStorageFeatures.html>"]
+pub struct PhysicalDevice16BitStorageFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub storage_buffer16_bit_access: Bool32,
+ pub uniform_and_storage_buffer16_bit_access: Bool32,
+ pub storage_push_constant16: Bool32,
+ pub storage_input_output16: Bool32,
+}
+impl ::std::default::Default for PhysicalDevice16BitStorageFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ storage_buffer16_bit_access: Bool32::default(),
+ uniform_and_storage_buffer16_bit_access: Bool32::default(),
+ storage_push_constant16: Bool32::default(),
+ storage_input_output16: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevice16BitStorageFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
+}
+impl PhysicalDevice16BitStorageFeatures {
+ pub fn builder<'a>() -> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
+ PhysicalDevice16BitStorageFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevice16BitStorageFeaturesBuilder<'a> {
+ inner: PhysicalDevice16BitStorageFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice16BitStorageFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice16BitStorageFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice16BitStorageFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice16BitStorageFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDevice16BitStorageFeaturesBuilder<'a> {
+ type Target = PhysicalDevice16BitStorageFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevice16BitStorageFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevice16BitStorageFeaturesBuilder<'a> {
+ #[inline]
+ pub fn storage_buffer16_bit_access(mut self, storage_buffer16_bit_access: bool) -> Self {
+ self.inner.storage_buffer16_bit_access = storage_buffer16_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_and_storage_buffer16_bit_access(
+ mut self,
+ uniform_and_storage_buffer16_bit_access: bool,
+ ) -> Self {
+ self.inner.uniform_and_storage_buffer16_bit_access =
+ uniform_and_storage_buffer16_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn storage_push_constant16(mut self, storage_push_constant16: bool) -> Self {
+ self.inner.storage_push_constant16 = storage_push_constant16.into();
+ self
+ }
+ #[inline]
+ pub fn storage_input_output16(mut self, storage_input_output16: bool) -> Self {
+ self.inner.storage_input_output16 = storage_input_output16.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevice16BitStorageFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSubgroupProperties.html>"]
+pub struct PhysicalDeviceSubgroupProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub subgroup_size: u32,
+ pub supported_stages: ShaderStageFlags,
+ pub supported_operations: SubgroupFeatureFlags,
+ pub quad_operations_in_all_stages: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSubgroupProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ subgroup_size: u32::default(),
+ supported_stages: ShaderStageFlags::default(),
+ supported_operations: SubgroupFeatureFlags::default(),
+ quad_operations_in_all_stages: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSubgroupProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
+}
+impl PhysicalDeviceSubgroupProperties {
+ pub fn builder<'a>() -> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
+ PhysicalDeviceSubgroupPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSubgroupPropertiesBuilder<'a> {
+ inner: PhysicalDeviceSubgroupProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupPropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSubgroupPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceSubgroupProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubgroupPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSubgroupPropertiesBuilder<'a> {
+ #[inline]
+ pub fn subgroup_size(mut self, subgroup_size: u32) -> Self {
+ self.inner.subgroup_size = subgroup_size;
+ self
+ }
+ #[inline]
+ pub fn supported_stages(mut self, supported_stages: ShaderStageFlags) -> Self {
+ self.inner.supported_stages = supported_stages;
+ self
+ }
+ #[inline]
+ pub fn supported_operations(mut self, supported_operations: SubgroupFeatureFlags) -> Self {
+ self.inner.supported_operations = supported_operations;
+ self
+ }
+ #[inline]
+ pub fn quad_operations_in_all_stages(mut self, quad_operations_in_all_stages: bool) -> Self {
+ self.inner.quad_operations_in_all_stages = quad_operations_in_all_stages.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSubgroupProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures.html>"]
+pub struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_subgroup_extended_types: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_subgroup_extended_types: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES;
+}
+impl PhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'a> {
+ PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderSubgroupExtendedTypesFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSubgroupExtendedTypesFeatures {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSubgroupExtendedTypesFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderSubgroupExtendedTypesFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_subgroup_extended_types(mut self, shader_subgroup_extended_types: bool) -> Self {
+ self.inner.shader_subgroup_extended_types = shader_subgroup_extended_types.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferMemoryRequirementsInfo2.html>"]
+pub struct BufferMemoryRequirementsInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer: Buffer,
+}
+impl ::std::default::Default for BufferMemoryRequirementsInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer: Buffer::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferMemoryRequirementsInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_MEMORY_REQUIREMENTS_INFO_2;
+}
+impl BufferMemoryRequirementsInfo2 {
+ pub fn builder<'a>() -> BufferMemoryRequirementsInfo2Builder<'a> {
+ BufferMemoryRequirementsInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferMemoryRequirementsInfo2Builder<'a> {
+ inner: BufferMemoryRequirementsInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferMemoryRequirementsInfo2Builder<'a> {
+ type Target = BufferMemoryRequirementsInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferMemoryRequirementsInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferMemoryRequirementsInfo2Builder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferMemoryRequirementsInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceBufferMemoryRequirements.html>"]
+pub struct DeviceBufferMemoryRequirements {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_create_info: *const BufferCreateInfo,
+}
+impl ::std::default::Default for DeviceBufferMemoryRequirements {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_create_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceBufferMemoryRequirements {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_BUFFER_MEMORY_REQUIREMENTS;
+}
+impl DeviceBufferMemoryRequirements {
+ pub fn builder<'a>() -> DeviceBufferMemoryRequirementsBuilder<'a> {
+ DeviceBufferMemoryRequirementsBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceBufferMemoryRequirementsBuilder<'a> {
+ inner: DeviceBufferMemoryRequirements,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceBufferMemoryRequirementsBuilder<'a> {
+ type Target = DeviceBufferMemoryRequirements;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceBufferMemoryRequirementsBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceBufferMemoryRequirementsBuilder<'a> {
+ #[inline]
+ pub fn create_info(mut self, create_info: &'a BufferCreateInfo) -> Self {
+ self.inner.p_create_info = create_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceBufferMemoryRequirements {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageMemoryRequirementsInfo2.html>"]
+pub struct ImageMemoryRequirementsInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+}
+impl ::std::default::Default for ImageMemoryRequirementsInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageMemoryRequirementsInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+}
+impl ImageMemoryRequirementsInfo2 {
+ pub fn builder<'a>() -> ImageMemoryRequirementsInfo2Builder<'a> {
+ ImageMemoryRequirementsInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageMemoryRequirementsInfo2Builder<'a> {
+ inner: ImageMemoryRequirementsInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageMemoryRequirementsInfo2 {}
+impl<'a> ::std::ops::Deref for ImageMemoryRequirementsInfo2Builder<'a> {
+ type Target = ImageMemoryRequirementsInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageMemoryRequirementsInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageMemoryRequirementsInfo2Builder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageMemoryRequirementsInfo2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageMemoryRequirementsInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageSparseMemoryRequirementsInfo2.html>"]
+pub struct ImageSparseMemoryRequirementsInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+}
+impl ::std::default::Default for ImageSparseMemoryRequirementsInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageSparseMemoryRequirementsInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2;
+}
+impl ImageSparseMemoryRequirementsInfo2 {
+ pub fn builder<'a>() -> ImageSparseMemoryRequirementsInfo2Builder<'a> {
+ ImageSparseMemoryRequirementsInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageSparseMemoryRequirementsInfo2Builder<'a> {
+ inner: ImageSparseMemoryRequirementsInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageSparseMemoryRequirementsInfo2Builder<'a> {
+ type Target = ImageSparseMemoryRequirementsInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageSparseMemoryRequirementsInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageSparseMemoryRequirementsInfo2Builder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageSparseMemoryRequirementsInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceImageMemoryRequirements.html>"]
+pub struct DeviceImageMemoryRequirements {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_create_info: *const ImageCreateInfo,
+ pub plane_aspect: ImageAspectFlags,
+}
+impl ::std::default::Default for DeviceImageMemoryRequirements {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_create_info: ::std::ptr::null(),
+ plane_aspect: ImageAspectFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceImageMemoryRequirements {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_IMAGE_MEMORY_REQUIREMENTS;
+}
+impl DeviceImageMemoryRequirements {
+ pub fn builder<'a>() -> DeviceImageMemoryRequirementsBuilder<'a> {
+ DeviceImageMemoryRequirementsBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceImageMemoryRequirementsBuilder<'a> {
+ inner: DeviceImageMemoryRequirements,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceImageMemoryRequirementsBuilder<'a> {
+ type Target = DeviceImageMemoryRequirements;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceImageMemoryRequirementsBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceImageMemoryRequirementsBuilder<'a> {
+ #[inline]
+ pub fn create_info(mut self, create_info: &'a ImageCreateInfo) -> Self {
+ self.inner.p_create_info = create_info;
+ self
+ }
+ #[inline]
+ pub fn plane_aspect(mut self, plane_aspect: ImageAspectFlags) -> Self {
+ self.inner.plane_aspect = plane_aspect;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceImageMemoryRequirements {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryRequirements2.html>"]
+pub struct MemoryRequirements2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_requirements: MemoryRequirements,
+}
+impl ::std::default::Default for MemoryRequirements2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_requirements: MemoryRequirements::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryRequirements2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_REQUIREMENTS_2;
+}
+impl MemoryRequirements2 {
+ pub fn builder<'a>() -> MemoryRequirements2Builder<'a> {
+ MemoryRequirements2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryRequirements2Builder<'a> {
+ inner: MemoryRequirements2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsMemoryRequirements2 {}
+impl<'a> ::std::ops::Deref for MemoryRequirements2Builder<'a> {
+ type Target = MemoryRequirements2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryRequirements2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryRequirements2Builder<'a> {
+ #[inline]
+ pub fn memory_requirements(mut self, memory_requirements: MemoryRequirements) -> Self {
+ self.inner.memory_requirements = memory_requirements;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsMemoryRequirements2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryRequirements2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSparseImageMemoryRequirements2.html>"]
+pub struct SparseImageMemoryRequirements2 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_requirements: SparseImageMemoryRequirements,
+}
+impl ::std::default::Default for SparseImageMemoryRequirements2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_requirements: SparseImageMemoryRequirements::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SparseImageMemoryRequirements2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
+}
+impl SparseImageMemoryRequirements2 {
+ pub fn builder<'a>() -> SparseImageMemoryRequirements2Builder<'a> {
+ SparseImageMemoryRequirements2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SparseImageMemoryRequirements2Builder<'a> {
+ inner: SparseImageMemoryRequirements2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SparseImageMemoryRequirements2Builder<'a> {
+ type Target = SparseImageMemoryRequirements2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SparseImageMemoryRequirements2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SparseImageMemoryRequirements2Builder<'a> {
+ #[inline]
+ pub fn memory_requirements(
+ mut self,
+ memory_requirements: SparseImageMemoryRequirements,
+ ) -> Self {
+ self.inner.memory_requirements = memory_requirements;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SparseImageMemoryRequirements2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePointClippingProperties.html>"]
+pub struct PhysicalDevicePointClippingProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub point_clipping_behavior: PointClippingBehavior,
+}
+impl ::std::default::Default for PhysicalDevicePointClippingProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ point_clipping_behavior: PointClippingBehavior::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePointClippingProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES;
+}
+impl PhysicalDevicePointClippingProperties {
+ pub fn builder<'a>() -> PhysicalDevicePointClippingPropertiesBuilder<'a> {
+ PhysicalDevicePointClippingPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePointClippingPropertiesBuilder<'a> {
+ inner: PhysicalDevicePointClippingProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePointClippingPropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePointClippingProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePointClippingPropertiesBuilder<'a> {
+ type Target = PhysicalDevicePointClippingProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePointClippingPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePointClippingPropertiesBuilder<'a> {
+ #[inline]
+ pub fn point_clipping_behavior(
+ mut self,
+ point_clipping_behavior: PointClippingBehavior,
+ ) -> Self {
+ self.inner.point_clipping_behavior = point_clipping_behavior;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePointClippingProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryDedicatedRequirements.html>"]
+pub struct MemoryDedicatedRequirements {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub prefers_dedicated_allocation: Bool32,
+ pub requires_dedicated_allocation: Bool32,
+}
+impl ::std::default::Default for MemoryDedicatedRequirements {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ prefers_dedicated_allocation: Bool32::default(),
+ requires_dedicated_allocation: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryDedicatedRequirements {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_DEDICATED_REQUIREMENTS;
+}
+impl MemoryDedicatedRequirements {
+ pub fn builder<'a>() -> MemoryDedicatedRequirementsBuilder<'a> {
+ MemoryDedicatedRequirementsBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryDedicatedRequirementsBuilder<'a> {
+ inner: MemoryDedicatedRequirements,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryRequirements2 for MemoryDedicatedRequirementsBuilder<'_> {}
+unsafe impl ExtendsMemoryRequirements2 for MemoryDedicatedRequirements {}
+impl<'a> ::std::ops::Deref for MemoryDedicatedRequirementsBuilder<'a> {
+ type Target = MemoryDedicatedRequirements;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryDedicatedRequirementsBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryDedicatedRequirementsBuilder<'a> {
+ #[inline]
+ pub fn prefers_dedicated_allocation(mut self, prefers_dedicated_allocation: bool) -> Self {
+ self.inner.prefers_dedicated_allocation = prefers_dedicated_allocation.into();
+ self
+ }
+ #[inline]
+ pub fn requires_dedicated_allocation(mut self, requires_dedicated_allocation: bool) -> Self {
+ self.inner.requires_dedicated_allocation = requires_dedicated_allocation.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryDedicatedRequirements {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryDedicatedAllocateInfo.html>"]
+pub struct MemoryDedicatedAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+ pub buffer: Buffer,
+}
+impl ::std::default::Default for MemoryDedicatedAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ buffer: Buffer::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryDedicatedAllocateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_DEDICATED_ALLOCATE_INFO;
+}
+impl MemoryDedicatedAllocateInfo {
+ pub fn builder<'a>() -> MemoryDedicatedAllocateInfoBuilder<'a> {
+ MemoryDedicatedAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryDedicatedAllocateInfoBuilder<'a> {
+ inner: MemoryDedicatedAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryDedicatedAllocateInfoBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryDedicatedAllocateInfo {}
+impl<'a> ::std::ops::Deref for MemoryDedicatedAllocateInfoBuilder<'a> {
+ type Target = MemoryDedicatedAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryDedicatedAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryDedicatedAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryDedicatedAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewUsageCreateInfo.html>"]
+pub struct ImageViewUsageCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub usage: ImageUsageFlags,
+}
+impl ::std::default::Default for ImageViewUsageCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ usage: ImageUsageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewUsageCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_USAGE_CREATE_INFO;
+}
+impl ImageViewUsageCreateInfo {
+ pub fn builder<'a>() -> ImageViewUsageCreateInfoBuilder<'a> {
+ ImageViewUsageCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewUsageCreateInfoBuilder<'a> {
+ inner: ImageViewUsageCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewUsageCreateInfoBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewUsageCreateInfo {}
+impl<'a> ::std::ops::Deref for ImageViewUsageCreateInfoBuilder<'a> {
+ type Target = ImageViewUsageCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewUsageCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewUsageCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn usage(mut self, usage: ImageUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewUsageCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineTessellationDomainOriginStateCreateInfo.html>"]
+pub struct PipelineTessellationDomainOriginStateCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub domain_origin: TessellationDomainOrigin,
+}
+impl ::std::default::Default for PipelineTessellationDomainOriginStateCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ domain_origin: TessellationDomainOrigin::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineTessellationDomainOriginStateCreateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
+}
+impl PipelineTessellationDomainOriginStateCreateInfo {
+ pub fn builder<'a>() -> PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
+ PipelineTessellationDomainOriginStateCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
+ inner: PipelineTessellationDomainOriginStateCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineTessellationStateCreateInfo
+ for PipelineTessellationDomainOriginStateCreateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineTessellationStateCreateInfo
+ for PipelineTessellationDomainOriginStateCreateInfo
+{
+}
+impl<'a> ::std::ops::Deref for PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
+ type Target = PipelineTessellationDomainOriginStateCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineTessellationDomainOriginStateCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn domain_origin(mut self, domain_origin: TessellationDomainOrigin) -> Self {
+ self.inner.domain_origin = domain_origin;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineTessellationDomainOriginStateCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerYcbcrConversionInfo.html>"]
+pub struct SamplerYcbcrConversionInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub conversion: SamplerYcbcrConversion,
+}
+impl ::std::default::Default for SamplerYcbcrConversionInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ conversion: SamplerYcbcrConversion::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerYcbcrConversionInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_YCBCR_CONVERSION_INFO;
+}
+impl SamplerYcbcrConversionInfo {
+ pub fn builder<'a>() -> SamplerYcbcrConversionInfoBuilder<'a> {
+ SamplerYcbcrConversionInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerYcbcrConversionInfoBuilder<'a> {
+ inner: SamplerYcbcrConversionInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSamplerCreateInfo for SamplerYcbcrConversionInfoBuilder<'_> {}
+unsafe impl ExtendsSamplerCreateInfo for SamplerYcbcrConversionInfo {}
+unsafe impl ExtendsImageViewCreateInfo for SamplerYcbcrConversionInfoBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for SamplerYcbcrConversionInfo {}
+impl<'a> ::std::ops::Deref for SamplerYcbcrConversionInfoBuilder<'a> {
+ type Target = SamplerYcbcrConversionInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerYcbcrConversionInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerYcbcrConversionInfoBuilder<'a> {
+ #[inline]
+ pub fn conversion(mut self, conversion: SamplerYcbcrConversion) -> Self {
+ self.inner.conversion = conversion;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerYcbcrConversionInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerYcbcrConversionCreateInfo.html>"]
+pub struct SamplerYcbcrConversionCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub format: Format,
+ pub ycbcr_model: SamplerYcbcrModelConversion,
+ pub ycbcr_range: SamplerYcbcrRange,
+ pub components: ComponentMapping,
+ pub x_chroma_offset: ChromaLocation,
+ pub y_chroma_offset: ChromaLocation,
+ pub chroma_filter: Filter,
+ pub force_explicit_reconstruction: Bool32,
+}
+impl ::std::default::Default for SamplerYcbcrConversionCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ format: Format::default(),
+ ycbcr_model: SamplerYcbcrModelConversion::default(),
+ ycbcr_range: SamplerYcbcrRange::default(),
+ components: ComponentMapping::default(),
+ x_chroma_offset: ChromaLocation::default(),
+ y_chroma_offset: ChromaLocation::default(),
+ chroma_filter: Filter::default(),
+ force_explicit_reconstruction: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerYcbcrConversionCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+}
+impl SamplerYcbcrConversionCreateInfo {
+ pub fn builder<'a>() -> SamplerYcbcrConversionCreateInfoBuilder<'a> {
+ SamplerYcbcrConversionCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerYcbcrConversionCreateInfoBuilder<'a> {
+ inner: SamplerYcbcrConversionCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSamplerYcbcrConversionCreateInfo {}
+impl<'a> ::std::ops::Deref for SamplerYcbcrConversionCreateInfoBuilder<'a> {
+ type Target = SamplerYcbcrConversionCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerYcbcrConversionCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerYcbcrConversionCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn ycbcr_model(mut self, ycbcr_model: SamplerYcbcrModelConversion) -> Self {
+ self.inner.ycbcr_model = ycbcr_model;
+ self
+ }
+ #[inline]
+ pub fn ycbcr_range(mut self, ycbcr_range: SamplerYcbcrRange) -> Self {
+ self.inner.ycbcr_range = ycbcr_range;
+ self
+ }
+ #[inline]
+ pub fn components(mut self, components: ComponentMapping) -> Self {
+ self.inner.components = components;
+ self
+ }
+ #[inline]
+ pub fn x_chroma_offset(mut self, x_chroma_offset: ChromaLocation) -> Self {
+ self.inner.x_chroma_offset = x_chroma_offset;
+ self
+ }
+ #[inline]
+ pub fn y_chroma_offset(mut self, y_chroma_offset: ChromaLocation) -> Self {
+ self.inner.y_chroma_offset = y_chroma_offset;
+ self
+ }
+ #[inline]
+ pub fn chroma_filter(mut self, chroma_filter: Filter) -> Self {
+ self.inner.chroma_filter = chroma_filter;
+ self
+ }
+ #[inline]
+ pub fn force_explicit_reconstruction(mut self, force_explicit_reconstruction: bool) -> Self {
+ self.inner.force_explicit_reconstruction = force_explicit_reconstruction.into();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSamplerYcbcrConversionCreateInfo>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerYcbcrConversionCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindImagePlaneMemoryInfo.html>"]
+pub struct BindImagePlaneMemoryInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub plane_aspect: ImageAspectFlags,
+}
+impl ::std::default::Default for BindImagePlaneMemoryInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ plane_aspect: ImageAspectFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindImagePlaneMemoryInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_IMAGE_PLANE_MEMORY_INFO;
+}
+impl BindImagePlaneMemoryInfo {
+ pub fn builder<'a>() -> BindImagePlaneMemoryInfoBuilder<'a> {
+ BindImagePlaneMemoryInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindImagePlaneMemoryInfoBuilder<'a> {
+ inner: BindImagePlaneMemoryInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBindImageMemoryInfo for BindImagePlaneMemoryInfoBuilder<'_> {}
+unsafe impl ExtendsBindImageMemoryInfo for BindImagePlaneMemoryInfo {}
+impl<'a> ::std::ops::Deref for BindImagePlaneMemoryInfoBuilder<'a> {
+ type Target = BindImagePlaneMemoryInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindImagePlaneMemoryInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindImagePlaneMemoryInfoBuilder<'a> {
+ #[inline]
+ pub fn plane_aspect(mut self, plane_aspect: ImageAspectFlags) -> Self {
+ self.inner.plane_aspect = plane_aspect;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindImagePlaneMemoryInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImagePlaneMemoryRequirementsInfo.html>"]
+pub struct ImagePlaneMemoryRequirementsInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub plane_aspect: ImageAspectFlags,
+}
+impl ::std::default::Default for ImagePlaneMemoryRequirementsInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ plane_aspect: ImageAspectFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImagePlaneMemoryRequirementsInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
+}
+impl ImagePlaneMemoryRequirementsInfo {
+ pub fn builder<'a>() -> ImagePlaneMemoryRequirementsInfoBuilder<'a> {
+ ImagePlaneMemoryRequirementsInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImagePlaneMemoryRequirementsInfoBuilder<'a> {
+ inner: ImagePlaneMemoryRequirementsInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageMemoryRequirementsInfo2 for ImagePlaneMemoryRequirementsInfoBuilder<'_> {}
+unsafe impl ExtendsImageMemoryRequirementsInfo2 for ImagePlaneMemoryRequirementsInfo {}
+impl<'a> ::std::ops::Deref for ImagePlaneMemoryRequirementsInfoBuilder<'a> {
+ type Target = ImagePlaneMemoryRequirementsInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImagePlaneMemoryRequirementsInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImagePlaneMemoryRequirementsInfoBuilder<'a> {
+ #[inline]
+ pub fn plane_aspect(mut self, plane_aspect: ImageAspectFlags) -> Self {
+ self.inner.plane_aspect = plane_aspect;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImagePlaneMemoryRequirementsInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSamplerYcbcrConversionFeatures.html>"]
+pub struct PhysicalDeviceSamplerYcbcrConversionFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub sampler_ycbcr_conversion: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSamplerYcbcrConversionFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ sampler_ycbcr_conversion: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSamplerYcbcrConversionFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
+}
+impl PhysicalDeviceSamplerYcbcrConversionFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
+ PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
+ inner: PhysicalDeviceSamplerYcbcrConversionFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSamplerYcbcrConversionFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSamplerYcbcrConversionFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceSamplerYcbcrConversionFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSamplerYcbcrConversionFeaturesBuilder<'a> {
+ #[inline]
+ pub fn sampler_ycbcr_conversion(mut self, sampler_ycbcr_conversion: bool) -> Self {
+ self.inner.sampler_ycbcr_conversion = sampler_ycbcr_conversion.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSamplerYcbcrConversionFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerYcbcrConversionImageFormatProperties.html>"]
+pub struct SamplerYcbcrConversionImageFormatProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub combined_image_sampler_descriptor_count: u32,
+}
+impl ::std::default::Default for SamplerYcbcrConversionImageFormatProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ combined_image_sampler_descriptor_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerYcbcrConversionImageFormatProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
+}
+impl SamplerYcbcrConversionImageFormatProperties {
+ pub fn builder<'a>() -> SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
+ SamplerYcbcrConversionImageFormatPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
+ inner: SamplerYcbcrConversionImageFormatProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageFormatProperties2
+ for SamplerYcbcrConversionImageFormatPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsImageFormatProperties2 for SamplerYcbcrConversionImageFormatProperties {}
+impl<'a> ::std::ops::Deref for SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
+ type Target = SamplerYcbcrConversionImageFormatProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerYcbcrConversionImageFormatPropertiesBuilder<'a> {
+ #[inline]
+ pub fn combined_image_sampler_descriptor_count(
+ mut self,
+ combined_image_sampler_descriptor_count: u32,
+ ) -> Self {
+ self.inner.combined_image_sampler_descriptor_count =
+ combined_image_sampler_descriptor_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerYcbcrConversionImageFormatProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTextureLODGatherFormatPropertiesAMD.html>"]
+pub struct TextureLODGatherFormatPropertiesAMD {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub supports_texture_gather_lod_bias_amd: Bool32,
+}
+impl ::std::default::Default for TextureLODGatherFormatPropertiesAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ supports_texture_gather_lod_bias_amd: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for TextureLODGatherFormatPropertiesAMD {
+ const STRUCTURE_TYPE: StructureType = StructureType::TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD;
+}
+impl TextureLODGatherFormatPropertiesAMD {
+ pub fn builder<'a>() -> TextureLODGatherFormatPropertiesAMDBuilder<'a> {
+ TextureLODGatherFormatPropertiesAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct TextureLODGatherFormatPropertiesAMDBuilder<'a> {
+ inner: TextureLODGatherFormatPropertiesAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageFormatProperties2 for TextureLODGatherFormatPropertiesAMDBuilder<'_> {}
+unsafe impl ExtendsImageFormatProperties2 for TextureLODGatherFormatPropertiesAMD {}
+impl<'a> ::std::ops::Deref for TextureLODGatherFormatPropertiesAMDBuilder<'a> {
+ type Target = TextureLODGatherFormatPropertiesAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for TextureLODGatherFormatPropertiesAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> TextureLODGatherFormatPropertiesAMDBuilder<'a> {
+ #[inline]
+ pub fn supports_texture_gather_lod_bias_amd(
+ mut self,
+ supports_texture_gather_lod_bias_amd: bool,
+ ) -> Self {
+ self.inner.supports_texture_gather_lod_bias_amd =
+ supports_texture_gather_lod_bias_amd.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> TextureLODGatherFormatPropertiesAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkConditionalRenderingBeginInfoEXT.html>"]
+pub struct ConditionalRenderingBeginInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+ pub flags: ConditionalRenderingFlagsEXT,
+}
+impl ::std::default::Default for ConditionalRenderingBeginInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer: Buffer::default(),
+ offset: DeviceSize::default(),
+ flags: ConditionalRenderingFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ConditionalRenderingBeginInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::CONDITIONAL_RENDERING_BEGIN_INFO_EXT;
+}
+impl ConditionalRenderingBeginInfoEXT {
+ pub fn builder<'a>() -> ConditionalRenderingBeginInfoEXTBuilder<'a> {
+ ConditionalRenderingBeginInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ConditionalRenderingBeginInfoEXTBuilder<'a> {
+ inner: ConditionalRenderingBeginInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ConditionalRenderingBeginInfoEXTBuilder<'a> {
+ type Target = ConditionalRenderingBeginInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ConditionalRenderingBeginInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ConditionalRenderingBeginInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: ConditionalRenderingFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ConditionalRenderingBeginInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkProtectedSubmitInfo.html>"]
+pub struct ProtectedSubmitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub protected_submit: Bool32,
+}
+impl ::std::default::Default for ProtectedSubmitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ protected_submit: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ProtectedSubmitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PROTECTED_SUBMIT_INFO;
+}
+impl ProtectedSubmitInfo {
+ pub fn builder<'a>() -> ProtectedSubmitInfoBuilder<'a> {
+ ProtectedSubmitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ProtectedSubmitInfoBuilder<'a> {
+ inner: ProtectedSubmitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for ProtectedSubmitInfoBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for ProtectedSubmitInfo {}
+impl<'a> ::std::ops::Deref for ProtectedSubmitInfoBuilder<'a> {
+ type Target = ProtectedSubmitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ProtectedSubmitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ProtectedSubmitInfoBuilder<'a> {
+ #[inline]
+ pub fn protected_submit(mut self, protected_submit: bool) -> Self {
+ self.inner.protected_submit = protected_submit.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ProtectedSubmitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceProtectedMemoryFeatures.html>"]
+pub struct PhysicalDeviceProtectedMemoryFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub protected_memory: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceProtectedMemoryFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ protected_memory: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceProtectedMemoryFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
+}
+impl PhysicalDeviceProtectedMemoryFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
+ PhysicalDeviceProtectedMemoryFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
+ inner: PhysicalDeviceProtectedMemoryFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceProtectedMemoryFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceProtectedMemoryFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProtectedMemoryFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProtectedMemoryFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceProtectedMemoryFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceProtectedMemoryFeaturesBuilder<'a> {
+ #[inline]
+ pub fn protected_memory(mut self, protected_memory: bool) -> Self {
+ self.inner.protected_memory = protected_memory.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceProtectedMemoryFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceProtectedMemoryProperties.html>"]
+pub struct PhysicalDeviceProtectedMemoryProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub protected_no_fault: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceProtectedMemoryProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ protected_no_fault: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceProtectedMemoryProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES;
+}
+impl PhysicalDeviceProtectedMemoryProperties {
+ pub fn builder<'a>() -> PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
+ PhysicalDeviceProtectedMemoryPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
+ inner: PhysicalDeviceProtectedMemoryProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceProtectedMemoryPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceProtectedMemoryProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceProtectedMemoryProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceProtectedMemoryPropertiesBuilder<'a> {
+ #[inline]
+ pub fn protected_no_fault(mut self, protected_no_fault: bool) -> Self {
+ self.inner.protected_no_fault = protected_no_fault.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceProtectedMemoryProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceQueueInfo2.html>"]
+pub struct DeviceQueueInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DeviceQueueCreateFlags,
+ pub queue_family_index: u32,
+ pub queue_index: u32,
+}
+impl ::std::default::Default for DeviceQueueInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DeviceQueueCreateFlags::default(),
+ queue_family_index: u32::default(),
+ queue_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceQueueInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_QUEUE_INFO_2;
+}
+impl DeviceQueueInfo2 {
+ pub fn builder<'a>() -> DeviceQueueInfo2Builder<'a> {
+ DeviceQueueInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceQueueInfo2Builder<'a> {
+ inner: DeviceQueueInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceQueueInfo2Builder<'a> {
+ type Target = DeviceQueueInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceQueueInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceQueueInfo2Builder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceQueueCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn queue_family_index(mut self, queue_family_index: u32) -> Self {
+ self.inner.queue_family_index = queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn queue_index(mut self, queue_index: u32) -> Self {
+ self.inner.queue_index = queue_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceQueueInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCoverageToColorStateCreateInfoNV.html>"]
+pub struct PipelineCoverageToColorStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCoverageToColorStateCreateFlagsNV,
+ pub coverage_to_color_enable: Bool32,
+ pub coverage_to_color_location: u32,
+}
+impl ::std::default::Default for PipelineCoverageToColorStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCoverageToColorStateCreateFlagsNV::default(),
+ coverage_to_color_enable: Bool32::default(),
+ coverage_to_color_location: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineCoverageToColorStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV;
+}
+impl PipelineCoverageToColorStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
+ PipelineCoverageToColorStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
+ inner: PipelineCoverageToColorStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineCoverageToColorStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo for PipelineCoverageToColorStateCreateInfoNV {}
+impl<'a> ::std::ops::Deref for PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineCoverageToColorStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCoverageToColorStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCoverageToColorStateCreateFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn coverage_to_color_enable(mut self, coverage_to_color_enable: bool) -> Self {
+ self.inner.coverage_to_color_enable = coverage_to_color_enable.into();
+ self
+ }
+ #[inline]
+ pub fn coverage_to_color_location(mut self, coverage_to_color_location: u32) -> Self {
+ self.inner.coverage_to_color_location = coverage_to_color_location;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCoverageToColorStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSamplerFilterMinmaxProperties.html>"]
+pub struct PhysicalDeviceSamplerFilterMinmaxProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub filter_minmax_single_component_formats: Bool32,
+ pub filter_minmax_image_component_mapping: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSamplerFilterMinmaxProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ filter_minmax_single_component_formats: Bool32::default(),
+ filter_minmax_image_component_mapping: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSamplerFilterMinmaxProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES;
+}
+impl PhysicalDeviceSamplerFilterMinmaxProperties {
+ pub fn builder<'a>() -> PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder<'a> {
+ PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder<'a> {
+ inner: PhysicalDeviceSamplerFilterMinmaxProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSamplerFilterMinmaxProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceSamplerFilterMinmaxProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSamplerFilterMinmaxPropertiesBuilder<'a> {
+ #[inline]
+ pub fn filter_minmax_single_component_formats(
+ mut self,
+ filter_minmax_single_component_formats: bool,
+ ) -> Self {
+ self.inner.filter_minmax_single_component_formats =
+ filter_minmax_single_component_formats.into();
+ self
+ }
+ #[inline]
+ pub fn filter_minmax_image_component_mapping(
+ mut self,
+ filter_minmax_image_component_mapping: bool,
+ ) -> Self {
+ self.inner.filter_minmax_image_component_mapping =
+ filter_minmax_image_component_mapping.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSamplerFilterMinmaxProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSampleLocationEXT.html>"]
+pub struct SampleLocationEXT {
+ pub x: f32,
+ pub y: f32,
+}
+impl SampleLocationEXT {
+ pub fn builder<'a>() -> SampleLocationEXTBuilder<'a> {
+ SampleLocationEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SampleLocationEXTBuilder<'a> {
+ inner: SampleLocationEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SampleLocationEXTBuilder<'a> {
+ type Target = SampleLocationEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SampleLocationEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SampleLocationEXTBuilder<'a> {
+ #[inline]
+ pub fn x(mut self, x: f32) -> Self {
+ self.inner.x = x;
+ self
+ }
+ #[inline]
+ pub fn y(mut self, y: f32) -> Self {
+ self.inner.y = y;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SampleLocationEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSampleLocationsInfoEXT.html>"]
+pub struct SampleLocationsInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub sample_locations_per_pixel: SampleCountFlags,
+ pub sample_location_grid_size: Extent2D,
+ pub sample_locations_count: u32,
+ pub p_sample_locations: *const SampleLocationEXT,
+}
+impl ::std::default::Default for SampleLocationsInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ sample_locations_per_pixel: SampleCountFlags::default(),
+ sample_location_grid_size: Extent2D::default(),
+ sample_locations_count: u32::default(),
+ p_sample_locations: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SampleLocationsInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SAMPLE_LOCATIONS_INFO_EXT;
+}
+impl SampleLocationsInfoEXT {
+ pub fn builder<'a>() -> SampleLocationsInfoEXTBuilder<'a> {
+ SampleLocationsInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SampleLocationsInfoEXTBuilder<'a> {
+ inner: SampleLocationsInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageMemoryBarrier for SampleLocationsInfoEXT {}
+unsafe impl ExtendsImageMemoryBarrier2 for SampleLocationsInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageMemoryBarrier2 for SampleLocationsInfoEXT {}
+impl<'a> ::std::ops::Deref for SampleLocationsInfoEXTBuilder<'a> {
+ type Target = SampleLocationsInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SampleLocationsInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SampleLocationsInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn sample_locations_per_pixel(
+ mut self,
+ sample_locations_per_pixel: SampleCountFlags,
+ ) -> Self {
+ self.inner.sample_locations_per_pixel = sample_locations_per_pixel;
+ self
+ }
+ #[inline]
+ pub fn sample_location_grid_size(mut self, sample_location_grid_size: Extent2D) -> Self {
+ self.inner.sample_location_grid_size = sample_location_grid_size;
+ self
+ }
+ #[inline]
+ pub fn sample_locations(mut self, sample_locations: &'a [SampleLocationEXT]) -> Self {
+ self.inner.sample_locations_count = sample_locations.len() as _;
+ self.inner.p_sample_locations = sample_locations.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SampleLocationsInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentSampleLocationsEXT.html>"]
+pub struct AttachmentSampleLocationsEXT {
+ pub attachment_index: u32,
+ pub sample_locations_info: SampleLocationsInfoEXT,
+}
+impl AttachmentSampleLocationsEXT {
+ pub fn builder<'a>() -> AttachmentSampleLocationsEXTBuilder<'a> {
+ AttachmentSampleLocationsEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentSampleLocationsEXTBuilder<'a> {
+ inner: AttachmentSampleLocationsEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AttachmentSampleLocationsEXTBuilder<'a> {
+ type Target = AttachmentSampleLocationsEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentSampleLocationsEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentSampleLocationsEXTBuilder<'a> {
+ #[inline]
+ pub fn attachment_index(mut self, attachment_index: u32) -> Self {
+ self.inner.attachment_index = attachment_index;
+ self
+ }
+ #[inline]
+ pub fn sample_locations_info(mut self, sample_locations_info: SampleLocationsInfoEXT) -> Self {
+ self.inner.sample_locations_info = sample_locations_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentSampleLocationsEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassSampleLocationsEXT.html>"]
+pub struct SubpassSampleLocationsEXT {
+ pub subpass_index: u32,
+ pub sample_locations_info: SampleLocationsInfoEXT,
+}
+impl SubpassSampleLocationsEXT {
+ pub fn builder<'a>() -> SubpassSampleLocationsEXTBuilder<'a> {
+ SubpassSampleLocationsEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassSampleLocationsEXTBuilder<'a> {
+ inner: SubpassSampleLocationsEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SubpassSampleLocationsEXTBuilder<'a> {
+ type Target = SubpassSampleLocationsEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassSampleLocationsEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassSampleLocationsEXTBuilder<'a> {
+ #[inline]
+ pub fn subpass_index(mut self, subpass_index: u32) -> Self {
+ self.inner.subpass_index = subpass_index;
+ self
+ }
+ #[inline]
+ pub fn sample_locations_info(mut self, sample_locations_info: SampleLocationsInfoEXT) -> Self {
+ self.inner.sample_locations_info = sample_locations_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassSampleLocationsEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassSampleLocationsBeginInfoEXT.html>"]
+pub struct RenderPassSampleLocationsBeginInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub attachment_initial_sample_locations_count: u32,
+ pub p_attachment_initial_sample_locations: *const AttachmentSampleLocationsEXT,
+ pub post_subpass_sample_locations_count: u32,
+ pub p_post_subpass_sample_locations: *const SubpassSampleLocationsEXT,
+}
+impl ::std::default::Default for RenderPassSampleLocationsBeginInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ attachment_initial_sample_locations_count: u32::default(),
+ p_attachment_initial_sample_locations: ::std::ptr::null(),
+ post_subpass_sample_locations_count: u32::default(),
+ p_post_subpass_sample_locations: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassSampleLocationsBeginInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT;
+}
+impl RenderPassSampleLocationsBeginInfoEXT {
+ pub fn builder<'a>() -> RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
+ RenderPassSampleLocationsBeginInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
+ inner: RenderPassSampleLocationsBeginInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassBeginInfo for RenderPassSampleLocationsBeginInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRenderPassBeginInfo for RenderPassSampleLocationsBeginInfoEXT {}
+impl<'a> ::std::ops::Deref for RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
+ type Target = RenderPassSampleLocationsBeginInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassSampleLocationsBeginInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn attachment_initial_sample_locations(
+ mut self,
+ attachment_initial_sample_locations: &'a [AttachmentSampleLocationsEXT],
+ ) -> Self {
+ self.inner.attachment_initial_sample_locations_count =
+ attachment_initial_sample_locations.len() as _;
+ self.inner.p_attachment_initial_sample_locations =
+ attachment_initial_sample_locations.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn post_subpass_sample_locations(
+ mut self,
+ post_subpass_sample_locations: &'a [SubpassSampleLocationsEXT],
+ ) -> Self {
+ self.inner.post_subpass_sample_locations_count = post_subpass_sample_locations.len() as _;
+ self.inner.p_post_subpass_sample_locations = post_subpass_sample_locations.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassSampleLocationsBeginInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineSampleLocationsStateCreateInfoEXT.html>"]
+pub struct PipelineSampleLocationsStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub sample_locations_enable: Bool32,
+ pub sample_locations_info: SampleLocationsInfoEXT,
+}
+impl ::std::default::Default for PipelineSampleLocationsStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ sample_locations_enable: Bool32::default(),
+ sample_locations_info: SampleLocationsInfoEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineSampleLocationsStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT;
+}
+impl PipelineSampleLocationsStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
+ PipelineSampleLocationsStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineSampleLocationsStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineSampleLocationsStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineSampleLocationsStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineSampleLocationsStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineSampleLocationsStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn sample_locations_enable(mut self, sample_locations_enable: bool) -> Self {
+ self.inner.sample_locations_enable = sample_locations_enable.into();
+ self
+ }
+ #[inline]
+ pub fn sample_locations_info(mut self, sample_locations_info: SampleLocationsInfoEXT) -> Self {
+ self.inner.sample_locations_info = sample_locations_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineSampleLocationsStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSampleLocationsPropertiesEXT.html>"]
+pub struct PhysicalDeviceSampleLocationsPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub sample_location_sample_counts: SampleCountFlags,
+ pub max_sample_location_grid_size: Extent2D,
+ pub sample_location_coordinate_range: [f32; 2],
+ pub sample_location_sub_pixel_bits: u32,
+ pub variable_sample_locations: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSampleLocationsPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ sample_location_sample_counts: SampleCountFlags::default(),
+ max_sample_location_grid_size: Extent2D::default(),
+ sample_location_coordinate_range: unsafe { ::std::mem::zeroed() },
+ sample_location_sub_pixel_bits: u32::default(),
+ variable_sample_locations: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSampleLocationsPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT;
+}
+impl PhysicalDeviceSampleLocationsPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
+ PhysicalDeviceSampleLocationsPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceSampleLocationsPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSampleLocationsPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceSampleLocationsPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSampleLocationsPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn sample_location_sample_counts(
+ mut self,
+ sample_location_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.sample_location_sample_counts = sample_location_sample_counts;
+ self
+ }
+ #[inline]
+ pub fn max_sample_location_grid_size(
+ mut self,
+ max_sample_location_grid_size: Extent2D,
+ ) -> Self {
+ self.inner.max_sample_location_grid_size = max_sample_location_grid_size;
+ self
+ }
+ #[inline]
+ pub fn sample_location_coordinate_range(
+ mut self,
+ sample_location_coordinate_range: [f32; 2],
+ ) -> Self {
+ self.inner.sample_location_coordinate_range = sample_location_coordinate_range;
+ self
+ }
+ #[inline]
+ pub fn sample_location_sub_pixel_bits(mut self, sample_location_sub_pixel_bits: u32) -> Self {
+ self.inner.sample_location_sub_pixel_bits = sample_location_sub_pixel_bits;
+ self
+ }
+ #[inline]
+ pub fn variable_sample_locations(mut self, variable_sample_locations: bool) -> Self {
+ self.inner.variable_sample_locations = variable_sample_locations.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSampleLocationsPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMultisamplePropertiesEXT.html>"]
+pub struct MultisamplePropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_sample_location_grid_size: Extent2D,
+}
+impl ::std::default::Default for MultisamplePropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_sample_location_grid_size: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MultisamplePropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MULTISAMPLE_PROPERTIES_EXT;
+}
+impl MultisamplePropertiesEXT {
+ pub fn builder<'a>() -> MultisamplePropertiesEXTBuilder<'a> {
+ MultisamplePropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MultisamplePropertiesEXTBuilder<'a> {
+ inner: MultisamplePropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MultisamplePropertiesEXTBuilder<'a> {
+ type Target = MultisamplePropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MultisamplePropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MultisamplePropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_sample_location_grid_size(
+ mut self,
+ max_sample_location_grid_size: Extent2D,
+ ) -> Self {
+ self.inner.max_sample_location_grid_size = max_sample_location_grid_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MultisamplePropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerReductionModeCreateInfo.html>"]
+pub struct SamplerReductionModeCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub reduction_mode: SamplerReductionMode,
+}
+impl ::std::default::Default for SamplerReductionModeCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ reduction_mode: SamplerReductionMode::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerReductionModeCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_REDUCTION_MODE_CREATE_INFO;
+}
+impl SamplerReductionModeCreateInfo {
+ pub fn builder<'a>() -> SamplerReductionModeCreateInfoBuilder<'a> {
+ SamplerReductionModeCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerReductionModeCreateInfoBuilder<'a> {
+ inner: SamplerReductionModeCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSamplerCreateInfo for SamplerReductionModeCreateInfoBuilder<'_> {}
+unsafe impl ExtendsSamplerCreateInfo for SamplerReductionModeCreateInfo {}
+impl<'a> ::std::ops::Deref for SamplerReductionModeCreateInfoBuilder<'a> {
+ type Target = SamplerReductionModeCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerReductionModeCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerReductionModeCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn reduction_mode(mut self, reduction_mode: SamplerReductionMode) -> Self {
+ self.inner.reduction_mode = reduction_mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerReductionModeCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT.html>"]
+pub struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub advanced_blend_coherent_operations: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ advanced_blend_coherent_operations: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT;
+}
+impl PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
+ PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceBlendOperationAdvancedFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBlendOperationAdvancedFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBlendOperationAdvancedFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceBlendOperationAdvancedFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn advanced_blend_coherent_operations(
+ mut self,
+ advanced_blend_coherent_operations: bool,
+ ) -> Self {
+ self.inner.advanced_blend_coherent_operations = advanced_blend_coherent_operations.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultiDrawFeaturesEXT.html>"]
+pub struct PhysicalDeviceMultiDrawFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub multi_draw: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMultiDrawFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ multi_draw: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultiDrawFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT;
+}
+impl PhysicalDeviceMultiDrawFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMultiDrawFeaturesEXTBuilder<'a> {
+ PhysicalDeviceMultiDrawFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultiDrawFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceMultiDrawFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiDrawFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiDrawFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiDrawFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiDrawFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMultiDrawFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMultiDrawFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiDrawFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultiDrawFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn multi_draw(mut self, multi_draw: bool) -> Self {
+ self.inner.multi_draw = multi_draw.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultiDrawFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT.html>"]
+pub struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub advanced_blend_max_color_attachments: u32,
+ pub advanced_blend_independent_blend: Bool32,
+ pub advanced_blend_non_premultiplied_src_color: Bool32,
+ pub advanced_blend_non_premultiplied_dst_color: Bool32,
+ pub advanced_blend_correlated_overlap: Bool32,
+ pub advanced_blend_all_operations: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ advanced_blend_max_color_attachments: u32::default(),
+ advanced_blend_independent_blend: Bool32::default(),
+ advanced_blend_non_premultiplied_src_color: Bool32::default(),
+ advanced_blend_non_premultiplied_dst_color: Bool32::default(),
+ advanced_blend_correlated_overlap: Bool32::default(),
+ advanced_blend_all_operations: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT;
+}
+impl PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
+ PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceBlendOperationAdvancedPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceBlendOperationAdvancedPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceBlendOperationAdvancedPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn advanced_blend_max_color_attachments(
+ mut self,
+ advanced_blend_max_color_attachments: u32,
+ ) -> Self {
+ self.inner.advanced_blend_max_color_attachments = advanced_blend_max_color_attachments;
+ self
+ }
+ #[inline]
+ pub fn advanced_blend_independent_blend(
+ mut self,
+ advanced_blend_independent_blend: bool,
+ ) -> Self {
+ self.inner.advanced_blend_independent_blend = advanced_blend_independent_blend.into();
+ self
+ }
+ #[inline]
+ pub fn advanced_blend_non_premultiplied_src_color(
+ mut self,
+ advanced_blend_non_premultiplied_src_color: bool,
+ ) -> Self {
+ self.inner.advanced_blend_non_premultiplied_src_color =
+ advanced_blend_non_premultiplied_src_color.into();
+ self
+ }
+ #[inline]
+ pub fn advanced_blend_non_premultiplied_dst_color(
+ mut self,
+ advanced_blend_non_premultiplied_dst_color: bool,
+ ) -> Self {
+ self.inner.advanced_blend_non_premultiplied_dst_color =
+ advanced_blend_non_premultiplied_dst_color.into();
+ self
+ }
+ #[inline]
+ pub fn advanced_blend_correlated_overlap(
+ mut self,
+ advanced_blend_correlated_overlap: bool,
+ ) -> Self {
+ self.inner.advanced_blend_correlated_overlap = advanced_blend_correlated_overlap.into();
+ self
+ }
+ #[inline]
+ pub fn advanced_blend_all_operations(mut self, advanced_blend_all_operations: bool) -> Self {
+ self.inner.advanced_blend_all_operations = advanced_blend_all_operations.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineColorBlendAdvancedStateCreateInfoEXT.html>"]
+pub struct PipelineColorBlendAdvancedStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_premultiplied: Bool32,
+ pub dst_premultiplied: Bool32,
+ pub blend_overlap: BlendOverlapEXT,
+}
+impl ::std::default::Default for PipelineColorBlendAdvancedStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_premultiplied: Bool32::default(),
+ dst_premultiplied: Bool32::default(),
+ blend_overlap: BlendOverlapEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineColorBlendAdvancedStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT;
+}
+impl PipelineColorBlendAdvancedStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
+ PipelineColorBlendAdvancedStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineColorBlendAdvancedStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineColorBlendStateCreateInfo
+ for PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineColorBlendStateCreateInfo
+ for PipelineColorBlendAdvancedStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineColorBlendAdvancedStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineColorBlendAdvancedStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn src_premultiplied(mut self, src_premultiplied: bool) -> Self {
+ self.inner.src_premultiplied = src_premultiplied.into();
+ self
+ }
+ #[inline]
+ pub fn dst_premultiplied(mut self, dst_premultiplied: bool) -> Self {
+ self.inner.dst_premultiplied = dst_premultiplied.into();
+ self
+ }
+ #[inline]
+ pub fn blend_overlap(mut self, blend_overlap: BlendOverlapEXT) -> Self {
+ self.inner.blend_overlap = blend_overlap;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineColorBlendAdvancedStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceInlineUniformBlockFeatures.html>"]
+pub struct PhysicalDeviceInlineUniformBlockFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub inline_uniform_block: Bool32,
+ pub descriptor_binding_inline_uniform_block_update_after_bind: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceInlineUniformBlockFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ inline_uniform_block: Bool32::default(),
+ descriptor_binding_inline_uniform_block_update_after_bind: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceInlineUniformBlockFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES;
+}
+impl PhysicalDeviceInlineUniformBlockFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceInlineUniformBlockFeaturesBuilder<'a> {
+ PhysicalDeviceInlineUniformBlockFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceInlineUniformBlockFeaturesBuilder<'a> {
+ inner: PhysicalDeviceInlineUniformBlockFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInlineUniformBlockFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInlineUniformBlockFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInlineUniformBlockFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInlineUniformBlockFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceInlineUniformBlockFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceInlineUniformBlockFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceInlineUniformBlockFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceInlineUniformBlockFeaturesBuilder<'a> {
+ #[inline]
+ pub fn inline_uniform_block(mut self, inline_uniform_block: bool) -> Self {
+ self.inner.inline_uniform_block = inline_uniform_block.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_inline_uniform_block_update_after_bind(
+ mut self,
+ descriptor_binding_inline_uniform_block_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_inline_uniform_block_update_after_bind =
+ descriptor_binding_inline_uniform_block_update_after_bind.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceInlineUniformBlockFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceInlineUniformBlockProperties.html>"]
+pub struct PhysicalDeviceInlineUniformBlockProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_inline_uniform_block_size: u32,
+ pub max_per_stage_descriptor_inline_uniform_blocks: u32,
+ pub max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32,
+ pub max_descriptor_set_inline_uniform_blocks: u32,
+ pub max_descriptor_set_update_after_bind_inline_uniform_blocks: u32,
+}
+impl ::std::default::Default for PhysicalDeviceInlineUniformBlockProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_inline_uniform_block_size: u32::default(),
+ max_per_stage_descriptor_inline_uniform_blocks: u32::default(),
+ max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32::default(),
+ max_descriptor_set_inline_uniform_blocks: u32::default(),
+ max_descriptor_set_update_after_bind_inline_uniform_blocks: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceInlineUniformBlockProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES;
+}
+impl PhysicalDeviceInlineUniformBlockProperties {
+ pub fn builder<'a>() -> PhysicalDeviceInlineUniformBlockPropertiesBuilder<'a> {
+ PhysicalDeviceInlineUniformBlockPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceInlineUniformBlockPropertiesBuilder<'a> {
+ inner: PhysicalDeviceInlineUniformBlockProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceInlineUniformBlockPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceInlineUniformBlockProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceInlineUniformBlockPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceInlineUniformBlockProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceInlineUniformBlockPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceInlineUniformBlockPropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_inline_uniform_block_size(mut self, max_inline_uniform_block_size: u32) -> Self {
+ self.inner.max_inline_uniform_block_size = max_inline_uniform_block_size;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_inline_uniform_blocks(
+ mut self,
+ max_per_stage_descriptor_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_inline_uniform_blocks =
+ max_per_stage_descriptor_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_inline_uniform_blocks(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_inline_uniform_blocks =
+ max_per_stage_descriptor_update_after_bind_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_inline_uniform_blocks(
+ mut self,
+ max_descriptor_set_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_inline_uniform_blocks =
+ max_descriptor_set_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_inline_uniform_blocks(
+ mut self,
+ max_descriptor_set_update_after_bind_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_inline_uniform_blocks =
+ max_descriptor_set_update_after_bind_inline_uniform_blocks;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceInlineUniformBlockProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWriteDescriptorSetInlineUniformBlock.html>"]
+pub struct WriteDescriptorSetInlineUniformBlock {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub data_size: u32,
+ pub p_data: *const c_void,
+}
+impl ::std::default::Default for WriteDescriptorSetInlineUniformBlock {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ data_size: u32::default(),
+ p_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for WriteDescriptorSetInlineUniformBlock {
+ const STRUCTURE_TYPE: StructureType = StructureType::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK;
+}
+impl WriteDescriptorSetInlineUniformBlock {
+ pub fn builder<'a>() -> WriteDescriptorSetInlineUniformBlockBuilder<'a> {
+ WriteDescriptorSetInlineUniformBlockBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct WriteDescriptorSetInlineUniformBlockBuilder<'a> {
+ inner: WriteDescriptorSetInlineUniformBlock,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetInlineUniformBlockBuilder<'_> {}
+unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetInlineUniformBlock {}
+impl<'a> ::std::ops::Deref for WriteDescriptorSetInlineUniformBlockBuilder<'a> {
+ type Target = WriteDescriptorSetInlineUniformBlock;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for WriteDescriptorSetInlineUniformBlockBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> WriteDescriptorSetInlineUniformBlockBuilder<'a> {
+ #[inline]
+ pub fn data(mut self, data: &'a [u8]) -> Self {
+ self.inner.data_size = data.len() as _;
+ self.inner.p_data = data.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> WriteDescriptorSetInlineUniformBlock {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorPoolInlineUniformBlockCreateInfo.html>"]
+pub struct DescriptorPoolInlineUniformBlockCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_inline_uniform_block_bindings: u32,
+}
+impl ::std::default::Default for DescriptorPoolInlineUniformBlockCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_inline_uniform_block_bindings: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorPoolInlineUniformBlockCreateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO;
+}
+impl DescriptorPoolInlineUniformBlockCreateInfo {
+ pub fn builder<'a>() -> DescriptorPoolInlineUniformBlockCreateInfoBuilder<'a> {
+ DescriptorPoolInlineUniformBlockCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorPoolInlineUniformBlockCreateInfoBuilder<'a> {
+ inner: DescriptorPoolInlineUniformBlockCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDescriptorPoolCreateInfo
+ for DescriptorPoolInlineUniformBlockCreateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsDescriptorPoolCreateInfo for DescriptorPoolInlineUniformBlockCreateInfo {}
+impl<'a> ::std::ops::Deref for DescriptorPoolInlineUniformBlockCreateInfoBuilder<'a> {
+ type Target = DescriptorPoolInlineUniformBlockCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorPoolInlineUniformBlockCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorPoolInlineUniformBlockCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn max_inline_uniform_block_bindings(
+ mut self,
+ max_inline_uniform_block_bindings: u32,
+ ) -> Self {
+ self.inner.max_inline_uniform_block_bindings = max_inline_uniform_block_bindings;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorPoolInlineUniformBlockCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCoverageModulationStateCreateInfoNV.html>"]
+pub struct PipelineCoverageModulationStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCoverageModulationStateCreateFlagsNV,
+ pub coverage_modulation_mode: CoverageModulationModeNV,
+ pub coverage_modulation_table_enable: Bool32,
+ pub coverage_modulation_table_count: u32,
+ pub p_coverage_modulation_table: *const f32,
+}
+impl ::std::default::Default for PipelineCoverageModulationStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCoverageModulationStateCreateFlagsNV::default(),
+ coverage_modulation_mode: CoverageModulationModeNV::default(),
+ coverage_modulation_table_enable: Bool32::default(),
+ coverage_modulation_table_count: u32::default(),
+ p_coverage_modulation_table: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineCoverageModulationStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV;
+}
+impl PipelineCoverageModulationStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
+ PipelineCoverageModulationStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
+ inner: PipelineCoverageModulationStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineCoverageModulationStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineCoverageModulationStateCreateInfoNV
+{
+}
+impl<'a> ::std::ops::Deref for PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineCoverageModulationStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCoverageModulationStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCoverageModulationStateCreateFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn coverage_modulation_mode(
+ mut self,
+ coverage_modulation_mode: CoverageModulationModeNV,
+ ) -> Self {
+ self.inner.coverage_modulation_mode = coverage_modulation_mode;
+ self
+ }
+ #[inline]
+ pub fn coverage_modulation_table_enable(
+ mut self,
+ coverage_modulation_table_enable: bool,
+ ) -> Self {
+ self.inner.coverage_modulation_table_enable = coverage_modulation_table_enable.into();
+ self
+ }
+ #[inline]
+ pub fn coverage_modulation_table(mut self, coverage_modulation_table: &'a [f32]) -> Self {
+ self.inner.coverage_modulation_table_count = coverage_modulation_table.len() as _;
+ self.inner.p_coverage_modulation_table = coverage_modulation_table.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCoverageModulationStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageFormatListCreateInfo.html>"]
+pub struct ImageFormatListCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub view_format_count: u32,
+ pub p_view_formats: *const Format,
+}
+impl ::std::default::Default for ImageFormatListCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ view_format_count: u32::default(),
+ p_view_formats: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageFormatListCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_FORMAT_LIST_CREATE_INFO;
+}
+impl ImageFormatListCreateInfo {
+ pub fn builder<'a>() -> ImageFormatListCreateInfoBuilder<'a> {
+ ImageFormatListCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageFormatListCreateInfoBuilder<'a> {
+ inner: ImageFormatListCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImageFormatListCreateInfoBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImageFormatListCreateInfo {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for ImageFormatListCreateInfoBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for ImageFormatListCreateInfo {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageFormatListCreateInfoBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageFormatListCreateInfo {}
+impl<'a> ::std::ops::Deref for ImageFormatListCreateInfoBuilder<'a> {
+ type Target = ImageFormatListCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageFormatListCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageFormatListCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn view_formats(mut self, view_formats: &'a [Format]) -> Self {
+ self.inner.view_format_count = view_formats.len() as _;
+ self.inner.p_view_formats = view_formats.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageFormatListCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationCacheCreateInfoEXT.html>"]
+pub struct ValidationCacheCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ValidationCacheCreateFlagsEXT,
+ pub initial_data_size: usize,
+ pub p_initial_data: *const c_void,
+}
+impl ::std::default::Default for ValidationCacheCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ValidationCacheCreateFlagsEXT::default(),
+ initial_data_size: usize::default(),
+ p_initial_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ValidationCacheCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VALIDATION_CACHE_CREATE_INFO_EXT;
+}
+impl ValidationCacheCreateInfoEXT {
+ pub fn builder<'a>() -> ValidationCacheCreateInfoEXTBuilder<'a> {
+ ValidationCacheCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ValidationCacheCreateInfoEXTBuilder<'a> {
+ inner: ValidationCacheCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ValidationCacheCreateInfoEXTBuilder<'a> {
+ type Target = ValidationCacheCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ValidationCacheCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ValidationCacheCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ValidationCacheCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn initial_data(mut self, initial_data: &'a [u8]) -> Self {
+ self.inner.initial_data_size = initial_data.len();
+ self.inner.p_initial_data = initial_data.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ValidationCacheCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderModuleValidationCacheCreateInfoEXT.html>"]
+pub struct ShaderModuleValidationCacheCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub validation_cache: ValidationCacheEXT,
+}
+impl ::std::default::Default for ShaderModuleValidationCacheCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ validation_cache: ValidationCacheEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ShaderModuleValidationCacheCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT;
+}
+impl ShaderModuleValidationCacheCreateInfoEXT {
+ pub fn builder<'a>() -> ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
+ ShaderModuleValidationCacheCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
+ inner: ShaderModuleValidationCacheCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsShaderModuleCreateInfo for ShaderModuleValidationCacheCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsShaderModuleCreateInfo for ShaderModuleValidationCacheCreateInfoEXT {}
+unsafe impl ExtendsPipelineShaderStageCreateInfo
+ for ShaderModuleValidationCacheCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for ShaderModuleValidationCacheCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
+ type Target = ShaderModuleValidationCacheCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ShaderModuleValidationCacheCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn validation_cache(mut self, validation_cache: ValidationCacheEXT) -> Self {
+ self.inner.validation_cache = validation_cache;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ShaderModuleValidationCacheCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMaintenance3Properties.html>"]
+pub struct PhysicalDeviceMaintenance3Properties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_per_set_descriptors: u32,
+ pub max_memory_allocation_size: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceMaintenance3Properties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_per_set_descriptors: u32::default(),
+ max_memory_allocation_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMaintenance3Properties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES;
+}
+impl PhysicalDeviceMaintenance3Properties {
+ pub fn builder<'a>() -> PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
+ PhysicalDeviceMaintenance3PropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
+ inner: PhysicalDeviceMaintenance3Properties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance3PropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance3Properties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
+ type Target = PhysicalDeviceMaintenance3Properties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMaintenance3PropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_per_set_descriptors(mut self, max_per_set_descriptors: u32) -> Self {
+ self.inner.max_per_set_descriptors = max_per_set_descriptors;
+ self
+ }
+ #[inline]
+ pub fn max_memory_allocation_size(mut self, max_memory_allocation_size: DeviceSize) -> Self {
+ self.inner.max_memory_allocation_size = max_memory_allocation_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMaintenance3Properties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMaintenance4Features.html>"]
+pub struct PhysicalDeviceMaintenance4Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub maintenance4: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMaintenance4Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ maintenance4: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMaintenance4Features {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES;
+}
+impl PhysicalDeviceMaintenance4Features {
+ pub fn builder<'a>() -> PhysicalDeviceMaintenance4FeaturesBuilder<'a> {
+ PhysicalDeviceMaintenance4FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMaintenance4FeaturesBuilder<'a> {
+ inner: PhysicalDeviceMaintenance4Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMaintenance4FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMaintenance4Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMaintenance4FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMaintenance4Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMaintenance4FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceMaintenance4Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMaintenance4FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMaintenance4FeaturesBuilder<'a> {
+ #[inline]
+ pub fn maintenance4(mut self, maintenance4: bool) -> Self {
+ self.inner.maintenance4 = maintenance4.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMaintenance4Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMaintenance4Properties.html>"]
+pub struct PhysicalDeviceMaintenance4Properties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_buffer_size: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceMaintenance4Properties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_buffer_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMaintenance4Properties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES;
+}
+impl PhysicalDeviceMaintenance4Properties {
+ pub fn builder<'a>() -> PhysicalDeviceMaintenance4PropertiesBuilder<'a> {
+ PhysicalDeviceMaintenance4PropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMaintenance4PropertiesBuilder<'a> {
+ inner: PhysicalDeviceMaintenance4Properties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance4PropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMaintenance4Properties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMaintenance4PropertiesBuilder<'a> {
+ type Target = PhysicalDeviceMaintenance4Properties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMaintenance4PropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMaintenance4PropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_buffer_size(mut self, max_buffer_size: DeviceSize) -> Self {
+ self.inner.max_buffer_size = max_buffer_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMaintenance4Properties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayoutSupport.html>"]
+pub struct DescriptorSetLayoutSupport {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub supported: Bool32,
+}
+impl ::std::default::Default for DescriptorSetLayoutSupport {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ supported: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetLayoutSupport {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_LAYOUT_SUPPORT;
+}
+impl DescriptorSetLayoutSupport {
+ pub fn builder<'a>() -> DescriptorSetLayoutSupportBuilder<'a> {
+ DescriptorSetLayoutSupportBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetLayoutSupportBuilder<'a> {
+ inner: DescriptorSetLayoutSupport,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDescriptorSetLayoutSupport {}
+impl<'a> ::std::ops::Deref for DescriptorSetLayoutSupportBuilder<'a> {
+ type Target = DescriptorSetLayoutSupport;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutSupportBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetLayoutSupportBuilder<'a> {
+ #[inline]
+ pub fn supported(mut self, supported: bool) -> Self {
+ self.inner.supported = supported.into();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDescriptorSetLayoutSupport>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetLayoutSupport {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderDrawParametersFeatures.html>"]
+pub struct PhysicalDeviceShaderDrawParametersFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_draw_parameters: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderDrawParametersFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_draw_parameters: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderDrawParametersFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
+}
+impl PhysicalDeviceShaderDrawParametersFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceShaderDrawParametersFeaturesBuilder<'a> {
+ PhysicalDeviceShaderDrawParametersFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderDrawParametersFeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderDrawParametersFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderDrawParametersFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderDrawParametersFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDrawParametersFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDrawParametersFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderDrawParametersFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderDrawParametersFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderDrawParametersFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderDrawParametersFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_draw_parameters(mut self, shader_draw_parameters: bool) -> Self {
+ self.inner.shader_draw_parameters = shader_draw_parameters.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderDrawParametersFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderFloat16Int8Features.html>"]
+pub struct PhysicalDeviceShaderFloat16Int8Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_float16: Bool32,
+ pub shader_int8: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderFloat16Int8Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_float16: Bool32::default(),
+ shader_int8: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderFloat16Int8Features {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
+}
+impl PhysicalDeviceShaderFloat16Int8Features {
+ pub fn builder<'a>() -> PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'a> {
+ PhysicalDeviceShaderFloat16Int8FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderFloat16Int8Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderFloat16Int8Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderFloat16Int8Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderFloat16Int8Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderFloat16Int8FeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_float16(mut self, shader_float16: bool) -> Self {
+ self.inner.shader_float16 = shader_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_int8(mut self, shader_int8: bool) -> Self {
+ self.inner.shader_int8 = shader_int8.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderFloat16Int8Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFloatControlsProperties.html>"]
+pub struct PhysicalDeviceFloatControlsProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub denorm_behavior_independence: ShaderFloatControlsIndependence,
+ pub rounding_mode_independence: ShaderFloatControlsIndependence,
+ pub shader_signed_zero_inf_nan_preserve_float16: Bool32,
+ pub shader_signed_zero_inf_nan_preserve_float32: Bool32,
+ pub shader_signed_zero_inf_nan_preserve_float64: Bool32,
+ pub shader_denorm_preserve_float16: Bool32,
+ pub shader_denorm_preserve_float32: Bool32,
+ pub shader_denorm_preserve_float64: Bool32,
+ pub shader_denorm_flush_to_zero_float16: Bool32,
+ pub shader_denorm_flush_to_zero_float32: Bool32,
+ pub shader_denorm_flush_to_zero_float64: Bool32,
+ pub shader_rounding_mode_rte_float16: Bool32,
+ pub shader_rounding_mode_rte_float32: Bool32,
+ pub shader_rounding_mode_rte_float64: Bool32,
+ pub shader_rounding_mode_rtz_float16: Bool32,
+ pub shader_rounding_mode_rtz_float32: Bool32,
+ pub shader_rounding_mode_rtz_float64: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFloatControlsProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ denorm_behavior_independence: ShaderFloatControlsIndependence::default(),
+ rounding_mode_independence: ShaderFloatControlsIndependence::default(),
+ shader_signed_zero_inf_nan_preserve_float16: Bool32::default(),
+ shader_signed_zero_inf_nan_preserve_float32: Bool32::default(),
+ shader_signed_zero_inf_nan_preserve_float64: Bool32::default(),
+ shader_denorm_preserve_float16: Bool32::default(),
+ shader_denorm_preserve_float32: Bool32::default(),
+ shader_denorm_preserve_float64: Bool32::default(),
+ shader_denorm_flush_to_zero_float16: Bool32::default(),
+ shader_denorm_flush_to_zero_float32: Bool32::default(),
+ shader_denorm_flush_to_zero_float64: Bool32::default(),
+ shader_rounding_mode_rte_float16: Bool32::default(),
+ shader_rounding_mode_rte_float32: Bool32::default(),
+ shader_rounding_mode_rte_float64: Bool32::default(),
+ shader_rounding_mode_rtz_float16: Bool32::default(),
+ shader_rounding_mode_rtz_float32: Bool32::default(),
+ shader_rounding_mode_rtz_float64: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFloatControlsProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES;
+}
+impl PhysicalDeviceFloatControlsProperties {
+ pub fn builder<'a>() -> PhysicalDeviceFloatControlsPropertiesBuilder<'a> {
+ PhysicalDeviceFloatControlsPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFloatControlsPropertiesBuilder<'a> {
+ inner: PhysicalDeviceFloatControlsProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFloatControlsPropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFloatControlsProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFloatControlsPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceFloatControlsProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFloatControlsPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFloatControlsPropertiesBuilder<'a> {
+ #[inline]
+ pub fn denorm_behavior_independence(
+ mut self,
+ denorm_behavior_independence: ShaderFloatControlsIndependence,
+ ) -> Self {
+ self.inner.denorm_behavior_independence = denorm_behavior_independence;
+ self
+ }
+ #[inline]
+ pub fn rounding_mode_independence(
+ mut self,
+ rounding_mode_independence: ShaderFloatControlsIndependence,
+ ) -> Self {
+ self.inner.rounding_mode_independence = rounding_mode_independence;
+ self
+ }
+ #[inline]
+ pub fn shader_signed_zero_inf_nan_preserve_float16(
+ mut self,
+ shader_signed_zero_inf_nan_preserve_float16: bool,
+ ) -> Self {
+ self.inner.shader_signed_zero_inf_nan_preserve_float16 =
+ shader_signed_zero_inf_nan_preserve_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_signed_zero_inf_nan_preserve_float32(
+ mut self,
+ shader_signed_zero_inf_nan_preserve_float32: bool,
+ ) -> Self {
+ self.inner.shader_signed_zero_inf_nan_preserve_float32 =
+ shader_signed_zero_inf_nan_preserve_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_signed_zero_inf_nan_preserve_float64(
+ mut self,
+ shader_signed_zero_inf_nan_preserve_float64: bool,
+ ) -> Self {
+ self.inner.shader_signed_zero_inf_nan_preserve_float64 =
+ shader_signed_zero_inf_nan_preserve_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_preserve_float16(mut self, shader_denorm_preserve_float16: bool) -> Self {
+ self.inner.shader_denorm_preserve_float16 = shader_denorm_preserve_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_preserve_float32(mut self, shader_denorm_preserve_float32: bool) -> Self {
+ self.inner.shader_denorm_preserve_float32 = shader_denorm_preserve_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_preserve_float64(mut self, shader_denorm_preserve_float64: bool) -> Self {
+ self.inner.shader_denorm_preserve_float64 = shader_denorm_preserve_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_flush_to_zero_float16(
+ mut self,
+ shader_denorm_flush_to_zero_float16: bool,
+ ) -> Self {
+ self.inner.shader_denorm_flush_to_zero_float16 = shader_denorm_flush_to_zero_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_flush_to_zero_float32(
+ mut self,
+ shader_denorm_flush_to_zero_float32: bool,
+ ) -> Self {
+ self.inner.shader_denorm_flush_to_zero_float32 = shader_denorm_flush_to_zero_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_flush_to_zero_float64(
+ mut self,
+ shader_denorm_flush_to_zero_float64: bool,
+ ) -> Self {
+ self.inner.shader_denorm_flush_to_zero_float64 = shader_denorm_flush_to_zero_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rte_float16(
+ mut self,
+ shader_rounding_mode_rte_float16: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rte_float16 = shader_rounding_mode_rte_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rte_float32(
+ mut self,
+ shader_rounding_mode_rte_float32: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rte_float32 = shader_rounding_mode_rte_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rte_float64(
+ mut self,
+ shader_rounding_mode_rte_float64: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rte_float64 = shader_rounding_mode_rte_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rtz_float16(
+ mut self,
+ shader_rounding_mode_rtz_float16: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rtz_float16 = shader_rounding_mode_rtz_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rtz_float32(
+ mut self,
+ shader_rounding_mode_rtz_float32: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rtz_float32 = shader_rounding_mode_rtz_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rtz_float64(
+ mut self,
+ shader_rounding_mode_rtz_float64: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rtz_float64 = shader_rounding_mode_rtz_float64.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFloatControlsProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceHostQueryResetFeatures.html>"]
+pub struct PhysicalDeviceHostQueryResetFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub host_query_reset: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceHostQueryResetFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ host_query_reset: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceHostQueryResetFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES;
+}
+impl PhysicalDeviceHostQueryResetFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceHostQueryResetFeaturesBuilder<'a> {
+ PhysicalDeviceHostQueryResetFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceHostQueryResetFeaturesBuilder<'a> {
+ inner: PhysicalDeviceHostQueryResetFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceHostQueryResetFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceHostQueryResetFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceHostQueryResetFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceHostQueryResetFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceHostQueryResetFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceHostQueryResetFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceHostQueryResetFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceHostQueryResetFeaturesBuilder<'a> {
+ #[inline]
+ pub fn host_query_reset(mut self, host_query_reset: bool) -> Self {
+ self.inner.host_query_reset = host_query_reset.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceHostQueryResetFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkNativeBufferUsage2ANDROID.html>"]
+pub struct NativeBufferUsage2ANDROID {
+ pub consumer: u64,
+ pub producer: u64,
+}
+impl NativeBufferUsage2ANDROID {
+ pub fn builder<'a>() -> NativeBufferUsage2ANDROIDBuilder<'a> {
+ NativeBufferUsage2ANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct NativeBufferUsage2ANDROIDBuilder<'a> {
+ inner: NativeBufferUsage2ANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for NativeBufferUsage2ANDROIDBuilder<'a> {
+ type Target = NativeBufferUsage2ANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for NativeBufferUsage2ANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> NativeBufferUsage2ANDROIDBuilder<'a> {
+ #[inline]
+ pub fn consumer(mut self, consumer: u64) -> Self {
+ self.inner.consumer = consumer;
+ self
+ }
+ #[inline]
+ pub fn producer(mut self, producer: u64) -> Self {
+ self.inner.producer = producer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> NativeBufferUsage2ANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkNativeBufferANDROID.html>"]
+pub struct NativeBufferANDROID {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle: *const c_void,
+ pub stride: c_int,
+ pub format: c_int,
+ pub usage: c_int,
+ pub usage2: NativeBufferUsage2ANDROID,
+}
+impl ::std::default::Default for NativeBufferANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle: ::std::ptr::null(),
+ stride: c_int::default(),
+ format: c_int::default(),
+ usage: c_int::default(),
+ usage2: NativeBufferUsage2ANDROID::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for NativeBufferANDROID {
+ const STRUCTURE_TYPE: StructureType = StructureType::NATIVE_BUFFER_ANDROID;
+}
+impl NativeBufferANDROID {
+ pub fn builder<'a>() -> NativeBufferANDROIDBuilder<'a> {
+ NativeBufferANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct NativeBufferANDROIDBuilder<'a> {
+ inner: NativeBufferANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for NativeBufferANDROIDBuilder<'a> {
+ type Target = NativeBufferANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for NativeBufferANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> NativeBufferANDROIDBuilder<'a> {
+ #[inline]
+ pub fn handle(mut self, handle: *const c_void) -> Self {
+ self.inner.handle = handle;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: c_int) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: c_int) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: c_int) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn usage2(mut self, usage2: NativeBufferUsage2ANDROID) -> Self {
+ self.inner.usage2 = usage2;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> NativeBufferANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainImageCreateInfoANDROID.html>"]
+pub struct SwapchainImageCreateInfoANDROID {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub usage: SwapchainImageUsageFlagsANDROID,
+}
+impl ::std::default::Default for SwapchainImageCreateInfoANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ usage: SwapchainImageUsageFlagsANDROID::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainImageCreateInfoANDROID {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID;
+}
+impl SwapchainImageCreateInfoANDROID {
+ pub fn builder<'a>() -> SwapchainImageCreateInfoANDROIDBuilder<'a> {
+ SwapchainImageCreateInfoANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainImageCreateInfoANDROIDBuilder<'a> {
+ inner: SwapchainImageCreateInfoANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SwapchainImageCreateInfoANDROIDBuilder<'a> {
+ type Target = SwapchainImageCreateInfoANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainImageCreateInfoANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainImageCreateInfoANDROIDBuilder<'a> {
+ #[inline]
+ pub fn usage(mut self, usage: SwapchainImageUsageFlagsANDROID) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainImageCreateInfoANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePresentationPropertiesANDROID.html>"]
+pub struct PhysicalDevicePresentationPropertiesANDROID {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub shared_image: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePresentationPropertiesANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ shared_image: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePresentationPropertiesANDROID {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID;
+}
+impl PhysicalDevicePresentationPropertiesANDROID {
+ pub fn builder<'a>() -> PhysicalDevicePresentationPropertiesANDROIDBuilder<'a> {
+ PhysicalDevicePresentationPropertiesANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePresentationPropertiesANDROIDBuilder<'a> {
+ inner: PhysicalDevicePresentationPropertiesANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDevicePresentationPropertiesANDROIDBuilder<'a> {
+ type Target = PhysicalDevicePresentationPropertiesANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePresentationPropertiesANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePresentationPropertiesANDROIDBuilder<'a> {
+ #[inline]
+ pub fn shared_image(mut self, shared_image: bool) -> Self {
+ self.inner.shared_image = shared_image.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePresentationPropertiesANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderResourceUsageAMD.html>"]
+pub struct ShaderResourceUsageAMD {
+ pub num_used_vgprs: u32,
+ pub num_used_sgprs: u32,
+ pub lds_size_per_local_work_group: u32,
+ pub lds_usage_size_in_bytes: usize,
+ pub scratch_mem_usage_in_bytes: usize,
+}
+impl ShaderResourceUsageAMD {
+ pub fn builder<'a>() -> ShaderResourceUsageAMDBuilder<'a> {
+ ShaderResourceUsageAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ShaderResourceUsageAMDBuilder<'a> {
+ inner: ShaderResourceUsageAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ShaderResourceUsageAMDBuilder<'a> {
+ type Target = ShaderResourceUsageAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ShaderResourceUsageAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ShaderResourceUsageAMDBuilder<'a> {
+ #[inline]
+ pub fn num_used_vgprs(mut self, num_used_vgprs: u32) -> Self {
+ self.inner.num_used_vgprs = num_used_vgprs;
+ self
+ }
+ #[inline]
+ pub fn num_used_sgprs(mut self, num_used_sgprs: u32) -> Self {
+ self.inner.num_used_sgprs = num_used_sgprs;
+ self
+ }
+ #[inline]
+ pub fn lds_size_per_local_work_group(mut self, lds_size_per_local_work_group: u32) -> Self {
+ self.inner.lds_size_per_local_work_group = lds_size_per_local_work_group;
+ self
+ }
+ #[inline]
+ pub fn lds_usage_size_in_bytes(mut self, lds_usage_size_in_bytes: usize) -> Self {
+ self.inner.lds_usage_size_in_bytes = lds_usage_size_in_bytes;
+ self
+ }
+ #[inline]
+ pub fn scratch_mem_usage_in_bytes(mut self, scratch_mem_usage_in_bytes: usize) -> Self {
+ self.inner.scratch_mem_usage_in_bytes = scratch_mem_usage_in_bytes;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ShaderResourceUsageAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderStatisticsInfoAMD.html>"]
+pub struct ShaderStatisticsInfoAMD {
+ pub shader_stage_mask: ShaderStageFlags,
+ pub resource_usage: ShaderResourceUsageAMD,
+ pub num_physical_vgprs: u32,
+ pub num_physical_sgprs: u32,
+ pub num_available_vgprs: u32,
+ pub num_available_sgprs: u32,
+ pub compute_work_group_size: [u32; 3],
+}
+impl ::std::default::Default for ShaderStatisticsInfoAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ shader_stage_mask: ShaderStageFlags::default(),
+ resource_usage: ShaderResourceUsageAMD::default(),
+ num_physical_vgprs: u32::default(),
+ num_physical_sgprs: u32::default(),
+ num_available_vgprs: u32::default(),
+ num_available_sgprs: u32::default(),
+ compute_work_group_size: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+impl ShaderStatisticsInfoAMD {
+ pub fn builder<'a>() -> ShaderStatisticsInfoAMDBuilder<'a> {
+ ShaderStatisticsInfoAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ShaderStatisticsInfoAMDBuilder<'a> {
+ inner: ShaderStatisticsInfoAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ShaderStatisticsInfoAMDBuilder<'a> {
+ type Target = ShaderStatisticsInfoAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ShaderStatisticsInfoAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ShaderStatisticsInfoAMDBuilder<'a> {
+ #[inline]
+ pub fn shader_stage_mask(mut self, shader_stage_mask: ShaderStageFlags) -> Self {
+ self.inner.shader_stage_mask = shader_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn resource_usage(mut self, resource_usage: ShaderResourceUsageAMD) -> Self {
+ self.inner.resource_usage = resource_usage;
+ self
+ }
+ #[inline]
+ pub fn num_physical_vgprs(mut self, num_physical_vgprs: u32) -> Self {
+ self.inner.num_physical_vgprs = num_physical_vgprs;
+ self
+ }
+ #[inline]
+ pub fn num_physical_sgprs(mut self, num_physical_sgprs: u32) -> Self {
+ self.inner.num_physical_sgprs = num_physical_sgprs;
+ self
+ }
+ #[inline]
+ pub fn num_available_vgprs(mut self, num_available_vgprs: u32) -> Self {
+ self.inner.num_available_vgprs = num_available_vgprs;
+ self
+ }
+ #[inline]
+ pub fn num_available_sgprs(mut self, num_available_sgprs: u32) -> Self {
+ self.inner.num_available_sgprs = num_available_sgprs;
+ self
+ }
+ #[inline]
+ pub fn compute_work_group_size(mut self, compute_work_group_size: [u32; 3]) -> Self {
+ self.inner.compute_work_group_size = compute_work_group_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ShaderStatisticsInfoAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceQueueGlobalPriorityCreateInfoKHR.html>"]
+pub struct DeviceQueueGlobalPriorityCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub global_priority: QueueGlobalPriorityKHR,
+}
+impl ::std::default::Default for DeviceQueueGlobalPriorityCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ global_priority: QueueGlobalPriorityKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceQueueGlobalPriorityCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR;
+}
+impl DeviceQueueGlobalPriorityCreateInfoKHR {
+ pub fn builder<'a>() -> DeviceQueueGlobalPriorityCreateInfoKHRBuilder<'a> {
+ DeviceQueueGlobalPriorityCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceQueueGlobalPriorityCreateInfoKHRBuilder<'a> {
+ inner: DeviceQueueGlobalPriorityCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceQueueCreateInfo for DeviceQueueGlobalPriorityCreateInfoKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceQueueCreateInfo for DeviceQueueGlobalPriorityCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for DeviceQueueGlobalPriorityCreateInfoKHRBuilder<'a> {
+ type Target = DeviceQueueGlobalPriorityCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceQueueGlobalPriorityCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceQueueGlobalPriorityCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn global_priority(mut self, global_priority: QueueGlobalPriorityKHR) -> Self {
+ self.inner.global_priority = global_priority;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceQueueGlobalPriorityCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR.html>"]
+pub struct PhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub global_priority_query: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ global_priority_query: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR;
+}
+impl PhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'a> {
+ PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceGlobalPriorityQueryFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceGlobalPriorityQueryFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceGlobalPriorityQueryFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceGlobalPriorityQueryFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn global_priority_query(mut self, global_priority_query: bool) -> Self {
+ self.inner.global_priority_query = global_priority_query.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyGlobalPriorityPropertiesKHR.html>"]
+pub struct QueueFamilyGlobalPriorityPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub priority_count: u32,
+ pub priorities: [QueueGlobalPriorityKHR; MAX_GLOBAL_PRIORITY_SIZE_KHR],
+}
+impl ::std::default::Default for QueueFamilyGlobalPriorityPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ priority_count: u32::default(),
+ priorities: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for QueueFamilyGlobalPriorityPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR;
+}
+impl QueueFamilyGlobalPriorityPropertiesKHR {
+ pub fn builder<'a>() -> QueueFamilyGlobalPriorityPropertiesKHRBuilder<'a> {
+ QueueFamilyGlobalPriorityPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyGlobalPriorityPropertiesKHRBuilder<'a> {
+ inner: QueueFamilyGlobalPriorityPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyGlobalPriorityPropertiesKHRBuilder<'_> {}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyGlobalPriorityPropertiesKHR {}
+impl<'a> ::std::ops::Deref for QueueFamilyGlobalPriorityPropertiesKHRBuilder<'a> {
+ type Target = QueueFamilyGlobalPriorityPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyGlobalPriorityPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyGlobalPriorityPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn priority_count(mut self, priority_count: u32) -> Self {
+ self.inner.priority_count = priority_count;
+ self
+ }
+ #[inline]
+ pub fn priorities(
+ mut self,
+ priorities: [QueueGlobalPriorityKHR; MAX_GLOBAL_PRIORITY_SIZE_KHR],
+ ) -> Self {
+ self.inner.priorities = priorities;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyGlobalPriorityPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsObjectNameInfoEXT.html>"]
+pub struct DebugUtilsObjectNameInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub object_type: ObjectType,
+ pub object_handle: u64,
+ pub p_object_name: *const c_char,
+}
+impl ::std::default::Default for DebugUtilsObjectNameInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ object_type: ObjectType::default(),
+ object_handle: u64::default(),
+ p_object_name: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugUtilsObjectNameInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
+}
+impl DebugUtilsObjectNameInfoEXT {
+ pub fn builder<'a>() -> DebugUtilsObjectNameInfoEXTBuilder<'a> {
+ DebugUtilsObjectNameInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugUtilsObjectNameInfoEXTBuilder<'a> {
+ inner: DebugUtilsObjectNameInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for DebugUtilsObjectNameInfoEXTBuilder<'_> {}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for DebugUtilsObjectNameInfoEXT {}
+impl<'a> ::std::ops::Deref for DebugUtilsObjectNameInfoEXTBuilder<'a> {
+ type Target = DebugUtilsObjectNameInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugUtilsObjectNameInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugUtilsObjectNameInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn object_type(mut self, object_type: ObjectType) -> Self {
+ self.inner.object_type = object_type;
+ self
+ }
+ #[inline]
+ pub fn object_handle(mut self, object_handle: u64) -> Self {
+ self.inner.object_handle = object_handle;
+ self
+ }
+ #[inline]
+ pub fn object_name(mut self, object_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_object_name = object_name.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugUtilsObjectNameInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsObjectTagInfoEXT.html>"]
+pub struct DebugUtilsObjectTagInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub object_type: ObjectType,
+ pub object_handle: u64,
+ pub tag_name: u64,
+ pub tag_size: usize,
+ pub p_tag: *const c_void,
+}
+impl ::std::default::Default for DebugUtilsObjectTagInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ object_type: ObjectType::default(),
+ object_handle: u64::default(),
+ tag_name: u64::default(),
+ tag_size: usize::default(),
+ p_tag: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugUtilsObjectTagInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_OBJECT_TAG_INFO_EXT;
+}
+impl DebugUtilsObjectTagInfoEXT {
+ pub fn builder<'a>() -> DebugUtilsObjectTagInfoEXTBuilder<'a> {
+ DebugUtilsObjectTagInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugUtilsObjectTagInfoEXTBuilder<'a> {
+ inner: DebugUtilsObjectTagInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DebugUtilsObjectTagInfoEXTBuilder<'a> {
+ type Target = DebugUtilsObjectTagInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugUtilsObjectTagInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugUtilsObjectTagInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn object_type(mut self, object_type: ObjectType) -> Self {
+ self.inner.object_type = object_type;
+ self
+ }
+ #[inline]
+ pub fn object_handle(mut self, object_handle: u64) -> Self {
+ self.inner.object_handle = object_handle;
+ self
+ }
+ #[inline]
+ pub fn tag_name(mut self, tag_name: u64) -> Self {
+ self.inner.tag_name = tag_name;
+ self
+ }
+ #[inline]
+ pub fn tag(mut self, tag: &'a [u8]) -> Self {
+ self.inner.tag_size = tag.len();
+ self.inner.p_tag = tag.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugUtilsObjectTagInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsLabelEXT.html>"]
+pub struct DebugUtilsLabelEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_label_name: *const c_char,
+ pub color: [f32; 4],
+}
+impl ::std::default::Default for DebugUtilsLabelEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_label_name: ::std::ptr::null(),
+ color: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugUtilsLabelEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_LABEL_EXT;
+}
+impl DebugUtilsLabelEXT {
+ pub fn builder<'a>() -> DebugUtilsLabelEXTBuilder<'a> {
+ DebugUtilsLabelEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugUtilsLabelEXTBuilder<'a> {
+ inner: DebugUtilsLabelEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DebugUtilsLabelEXTBuilder<'a> {
+ type Target = DebugUtilsLabelEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugUtilsLabelEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugUtilsLabelEXTBuilder<'a> {
+ #[inline]
+ pub fn label_name(mut self, label_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_label_name = label_name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn color(mut self, color: [f32; 4]) -> Self {
+ self.inner.color = color;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugUtilsLabelEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessengerCreateInfoEXT.html>"]
+pub struct DebugUtilsMessengerCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DebugUtilsMessengerCreateFlagsEXT,
+ pub message_severity: DebugUtilsMessageSeverityFlagsEXT,
+ pub message_type: DebugUtilsMessageTypeFlagsEXT,
+ pub pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT,
+ pub p_user_data: *mut c_void,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DebugUtilsMessengerCreateInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DebugUtilsMessengerCreateInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("flags", &self.flags)
+ .field("message_severity", &self.message_severity)
+ .field("message_type", &self.message_type)
+ .field(
+ "pfn_user_callback",
+ &(self.pfn_user_callback.map(|x| x as *const ())),
+ )
+ .field("p_user_data", &self.p_user_data)
+ .finish()
+ }
+}
+impl ::std::default::Default for DebugUtilsMessengerCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DebugUtilsMessengerCreateFlagsEXT::default(),
+ message_severity: DebugUtilsMessageSeverityFlagsEXT::default(),
+ message_type: DebugUtilsMessageTypeFlagsEXT::default(),
+ pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT::default(),
+ p_user_data: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugUtilsMessengerCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
+}
+impl DebugUtilsMessengerCreateInfoEXT {
+ pub fn builder<'a>() -> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
+ DebugUtilsMessengerCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
+ inner: DebugUtilsMessengerCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsInstanceCreateInfo for DebugUtilsMessengerCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsInstanceCreateInfo for DebugUtilsMessengerCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
+ type Target = DebugUtilsMessengerCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugUtilsMessengerCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DebugUtilsMessengerCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn message_severity(mut self, message_severity: DebugUtilsMessageSeverityFlagsEXT) -> Self {
+ self.inner.message_severity = message_severity;
+ self
+ }
+ #[inline]
+ pub fn message_type(mut self, message_type: DebugUtilsMessageTypeFlagsEXT) -> Self {
+ self.inner.message_type = message_type;
+ self
+ }
+ #[inline]
+ pub fn pfn_user_callback(
+ mut self,
+ pfn_user_callback: PFN_vkDebugUtilsMessengerCallbackEXT,
+ ) -> Self {
+ self.inner.pfn_user_callback = pfn_user_callback;
+ self
+ }
+ #[inline]
+ pub fn user_data(mut self, user_data: *mut c_void) -> Self {
+ self.inner.p_user_data = user_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugUtilsMessengerCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugUtilsMessengerCallbackDataEXT.html>"]
+pub struct DebugUtilsMessengerCallbackDataEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DebugUtilsMessengerCallbackDataFlagsEXT,
+ pub p_message_id_name: *const c_char,
+ pub message_id_number: i32,
+ pub p_message: *const c_char,
+ pub queue_label_count: u32,
+ pub p_queue_labels: *const DebugUtilsLabelEXT,
+ pub cmd_buf_label_count: u32,
+ pub p_cmd_buf_labels: *const DebugUtilsLabelEXT,
+ pub object_count: u32,
+ pub p_objects: *const DebugUtilsObjectNameInfoEXT,
+}
+impl ::std::default::Default for DebugUtilsMessengerCallbackDataEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DebugUtilsMessengerCallbackDataFlagsEXT::default(),
+ p_message_id_name: ::std::ptr::null(),
+ message_id_number: i32::default(),
+ p_message: ::std::ptr::null(),
+ queue_label_count: u32::default(),
+ p_queue_labels: ::std::ptr::null(),
+ cmd_buf_label_count: u32::default(),
+ p_cmd_buf_labels: ::std::ptr::null(),
+ object_count: u32::default(),
+ p_objects: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DebugUtilsMessengerCallbackDataEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT;
+}
+impl DebugUtilsMessengerCallbackDataEXT {
+ pub fn builder<'a>() -> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
+ DebugUtilsMessengerCallbackDataEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
+ inner: DebugUtilsMessengerCallbackDataEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDebugUtilsMessengerCallbackDataEXT {}
+impl<'a> ::std::ops::Deref for DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
+ type Target = DebugUtilsMessengerCallbackDataEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DebugUtilsMessengerCallbackDataEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DebugUtilsMessengerCallbackDataFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn message_id_name(mut self, message_id_name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_message_id_name = message_id_name.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn message_id_number(mut self, message_id_number: i32) -> Self {
+ self.inner.message_id_number = message_id_number;
+ self
+ }
+ #[inline]
+ pub fn message(mut self, message: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_message = message.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn queue_labels(mut self, queue_labels: &'a [DebugUtilsLabelEXT]) -> Self {
+ self.inner.queue_label_count = queue_labels.len() as _;
+ self.inner.p_queue_labels = queue_labels.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn cmd_buf_labels(mut self, cmd_buf_labels: &'a [DebugUtilsLabelEXT]) -> Self {
+ self.inner.cmd_buf_label_count = cmd_buf_labels.len() as _;
+ self.inner.p_cmd_buf_labels = cmd_buf_labels.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn objects(mut self, objects: &'a [DebugUtilsObjectNameInfoEXT]) -> Self {
+ self.inner.object_count = objects.len() as _;
+ self.inner.p_objects = objects.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDebugUtilsMessengerCallbackDataEXT>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DebugUtilsMessengerCallbackDataEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDeviceMemoryReportFeaturesEXT.html>"]
+pub struct PhysicalDeviceDeviceMemoryReportFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_memory_report: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDeviceMemoryReportFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_memory_report: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDeviceMemoryReportFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT;
+}
+impl PhysicalDeviceDeviceMemoryReportFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'a> {
+ PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceDeviceMemoryReportFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDeviceMemoryReportFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDeviceMemoryReportFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDeviceMemoryReportFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn device_memory_report(mut self, device_memory_report: bool) -> Self {
+ self.inner.device_memory_report = device_memory_report.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDeviceMemoryReportFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceDeviceMemoryReportCreateInfoEXT.html>"]
+pub struct DeviceDeviceMemoryReportCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DeviceMemoryReportFlagsEXT,
+ pub pfn_user_callback: PFN_vkDeviceMemoryReportCallbackEXT,
+ pub p_user_data: *mut c_void,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DeviceDeviceMemoryReportCreateInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DeviceDeviceMemoryReportCreateInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("flags", &self.flags)
+ .field(
+ "pfn_user_callback",
+ &(self.pfn_user_callback.map(|x| x as *const ())),
+ )
+ .field("p_user_data", &self.p_user_data)
+ .finish()
+ }
+}
+impl ::std::default::Default for DeviceDeviceMemoryReportCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DeviceMemoryReportFlagsEXT::default(),
+ pfn_user_callback: PFN_vkDeviceMemoryReportCallbackEXT::default(),
+ p_user_data: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceDeviceMemoryReportCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT;
+}
+impl DeviceDeviceMemoryReportCreateInfoEXT {
+ pub fn builder<'a>() -> DeviceDeviceMemoryReportCreateInfoEXTBuilder<'a> {
+ DeviceDeviceMemoryReportCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceDeviceMemoryReportCreateInfoEXTBuilder<'a> {
+ inner: DeviceDeviceMemoryReportCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceCreateInfo for DeviceDeviceMemoryReportCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for DeviceDeviceMemoryReportCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for DeviceDeviceMemoryReportCreateInfoEXTBuilder<'a> {
+ type Target = DeviceDeviceMemoryReportCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceDeviceMemoryReportCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceDeviceMemoryReportCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceMemoryReportFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn pfn_user_callback(
+ mut self,
+ pfn_user_callback: PFN_vkDeviceMemoryReportCallbackEXT,
+ ) -> Self {
+ self.inner.pfn_user_callback = pfn_user_callback;
+ self
+ }
+ #[inline]
+ pub fn user_data(mut self, user_data: *mut c_void) -> Self {
+ self.inner.p_user_data = user_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceDeviceMemoryReportCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceMemoryReportCallbackDataEXT.html>"]
+pub struct DeviceMemoryReportCallbackDataEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: DeviceMemoryReportFlagsEXT,
+ pub ty: DeviceMemoryReportEventTypeEXT,
+ pub memory_object_id: u64,
+ pub size: DeviceSize,
+ pub object_type: ObjectType,
+ pub object_handle: u64,
+ pub heap_index: u32,
+}
+impl ::std::default::Default for DeviceMemoryReportCallbackDataEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: DeviceMemoryReportFlagsEXT::default(),
+ ty: DeviceMemoryReportEventTypeEXT::default(),
+ memory_object_id: u64::default(),
+ size: DeviceSize::default(),
+ object_type: ObjectType::default(),
+ object_handle: u64::default(),
+ heap_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceMemoryReportCallbackDataEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT;
+}
+impl DeviceMemoryReportCallbackDataEXT {
+ pub fn builder<'a>() -> DeviceMemoryReportCallbackDataEXTBuilder<'a> {
+ DeviceMemoryReportCallbackDataEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceMemoryReportCallbackDataEXTBuilder<'a> {
+ inner: DeviceMemoryReportCallbackDataEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceMemoryReportCallbackDataEXTBuilder<'a> {
+ type Target = DeviceMemoryReportCallbackDataEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceMemoryReportCallbackDataEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceMemoryReportCallbackDataEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceMemoryReportFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn ty(mut self, ty: DeviceMemoryReportEventTypeEXT) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn memory_object_id(mut self, memory_object_id: u64) -> Self {
+ self.inner.memory_object_id = memory_object_id;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn object_type(mut self, object_type: ObjectType) -> Self {
+ self.inner.object_type = object_type;
+ self
+ }
+ #[inline]
+ pub fn object_handle(mut self, object_handle: u64) -> Self {
+ self.inner.object_handle = object_handle;
+ self
+ }
+ #[inline]
+ pub fn heap_index(mut self, heap_index: u32) -> Self {
+ self.inner.heap_index = heap_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceMemoryReportCallbackDataEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMemoryHostPointerInfoEXT.html>"]
+pub struct ImportMemoryHostPointerInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+ pub p_host_pointer: *mut c_void,
+}
+impl ::std::default::Default for ImportMemoryHostPointerInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ p_host_pointer: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMemoryHostPointerInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_HOST_POINTER_INFO_EXT;
+}
+impl ImportMemoryHostPointerInfoEXT {
+ pub fn builder<'a>() -> ImportMemoryHostPointerInfoEXTBuilder<'a> {
+ ImportMemoryHostPointerInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMemoryHostPointerInfoEXTBuilder<'a> {
+ inner: ImportMemoryHostPointerInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryHostPointerInfoEXTBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryHostPointerInfoEXT {}
+impl<'a> ::std::ops::Deref for ImportMemoryHostPointerInfoEXTBuilder<'a> {
+ type Target = ImportMemoryHostPointerInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMemoryHostPointerInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMemoryHostPointerInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[inline]
+ pub fn host_pointer(mut self, host_pointer: *mut c_void) -> Self {
+ self.inner.p_host_pointer = host_pointer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMemoryHostPointerInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryHostPointerPropertiesEXT.html>"]
+pub struct MemoryHostPointerPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_type_bits: u32,
+}
+impl ::std::default::Default for MemoryHostPointerPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_type_bits: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryHostPointerPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_HOST_POINTER_PROPERTIES_EXT;
+}
+impl MemoryHostPointerPropertiesEXT {
+ pub fn builder<'a>() -> MemoryHostPointerPropertiesEXTBuilder<'a> {
+ MemoryHostPointerPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryHostPointerPropertiesEXTBuilder<'a> {
+ inner: MemoryHostPointerPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryHostPointerPropertiesEXTBuilder<'a> {
+ type Target = MemoryHostPointerPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryHostPointerPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryHostPointerPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryHostPointerPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExternalMemoryHostPropertiesEXT.html>"]
+pub struct PhysicalDeviceExternalMemoryHostPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_imported_host_pointer_alignment: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceExternalMemoryHostPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_imported_host_pointer_alignment: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExternalMemoryHostPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
+}
+impl PhysicalDeviceExternalMemoryHostPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
+ PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceExternalMemoryHostPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExternalMemoryHostPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceExternalMemoryHostPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExternalMemoryHostPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn min_imported_host_pointer_alignment(
+ mut self,
+ min_imported_host_pointer_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.min_imported_host_pointer_alignment = min_imported_host_pointer_alignment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExternalMemoryHostPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceConservativeRasterizationPropertiesEXT.html>"]
+pub struct PhysicalDeviceConservativeRasterizationPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub primitive_overestimation_size: f32,
+ pub max_extra_primitive_overestimation_size: f32,
+ pub extra_primitive_overestimation_size_granularity: f32,
+ pub primitive_underestimation: Bool32,
+ pub conservative_point_and_line_rasterization: Bool32,
+ pub degenerate_triangles_rasterized: Bool32,
+ pub degenerate_lines_rasterized: Bool32,
+ pub fully_covered_fragment_shader_input_variable: Bool32,
+ pub conservative_rasterization_post_depth_coverage: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceConservativeRasterizationPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ primitive_overestimation_size: f32::default(),
+ max_extra_primitive_overestimation_size: f32::default(),
+ extra_primitive_overestimation_size_granularity: f32::default(),
+ primitive_underestimation: Bool32::default(),
+ conservative_point_and_line_rasterization: Bool32::default(),
+ degenerate_triangles_rasterized: Bool32::default(),
+ degenerate_lines_rasterized: Bool32::default(),
+ fully_covered_fragment_shader_input_variable: Bool32::default(),
+ conservative_rasterization_post_depth_coverage: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceConservativeRasterizationPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT;
+}
+impl PhysicalDeviceConservativeRasterizationPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
+ PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceConservativeRasterizationPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceConservativeRasterizationPropertiesEXT
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceConservativeRasterizationPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceConservativeRasterizationPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn primitive_overestimation_size(mut self, primitive_overestimation_size: f32) -> Self {
+ self.inner.primitive_overestimation_size = primitive_overestimation_size;
+ self
+ }
+ #[inline]
+ pub fn max_extra_primitive_overestimation_size(
+ mut self,
+ max_extra_primitive_overestimation_size: f32,
+ ) -> Self {
+ self.inner.max_extra_primitive_overestimation_size =
+ max_extra_primitive_overestimation_size;
+ self
+ }
+ #[inline]
+ pub fn extra_primitive_overestimation_size_granularity(
+ mut self,
+ extra_primitive_overestimation_size_granularity: f32,
+ ) -> Self {
+ self.inner.extra_primitive_overestimation_size_granularity =
+ extra_primitive_overestimation_size_granularity;
+ self
+ }
+ #[inline]
+ pub fn primitive_underestimation(mut self, primitive_underestimation: bool) -> Self {
+ self.inner.primitive_underestimation = primitive_underestimation.into();
+ self
+ }
+ #[inline]
+ pub fn conservative_point_and_line_rasterization(
+ mut self,
+ conservative_point_and_line_rasterization: bool,
+ ) -> Self {
+ self.inner.conservative_point_and_line_rasterization =
+ conservative_point_and_line_rasterization.into();
+ self
+ }
+ #[inline]
+ pub fn degenerate_triangles_rasterized(
+ mut self,
+ degenerate_triangles_rasterized: bool,
+ ) -> Self {
+ self.inner.degenerate_triangles_rasterized = degenerate_triangles_rasterized.into();
+ self
+ }
+ #[inline]
+ pub fn degenerate_lines_rasterized(mut self, degenerate_lines_rasterized: bool) -> Self {
+ self.inner.degenerate_lines_rasterized = degenerate_lines_rasterized.into();
+ self
+ }
+ #[inline]
+ pub fn fully_covered_fragment_shader_input_variable(
+ mut self,
+ fully_covered_fragment_shader_input_variable: bool,
+ ) -> Self {
+ self.inner.fully_covered_fragment_shader_input_variable =
+ fully_covered_fragment_shader_input_variable.into();
+ self
+ }
+ #[inline]
+ pub fn conservative_rasterization_post_depth_coverage(
+ mut self,
+ conservative_rasterization_post_depth_coverage: bool,
+ ) -> Self {
+ self.inner.conservative_rasterization_post_depth_coverage =
+ conservative_rasterization_post_depth_coverage.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceConservativeRasterizationPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCalibratedTimestampInfoEXT.html>"]
+pub struct CalibratedTimestampInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub time_domain: TimeDomainEXT,
+}
+impl ::std::default::Default for CalibratedTimestampInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ time_domain: TimeDomainEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CalibratedTimestampInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::CALIBRATED_TIMESTAMP_INFO_EXT;
+}
+impl CalibratedTimestampInfoEXT {
+ pub fn builder<'a>() -> CalibratedTimestampInfoEXTBuilder<'a> {
+ CalibratedTimestampInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CalibratedTimestampInfoEXTBuilder<'a> {
+ inner: CalibratedTimestampInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CalibratedTimestampInfoEXTBuilder<'a> {
+ type Target = CalibratedTimestampInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CalibratedTimestampInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CalibratedTimestampInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn time_domain(mut self, time_domain: TimeDomainEXT) -> Self {
+ self.inner.time_domain = time_domain;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CalibratedTimestampInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderCorePropertiesAMD.html>"]
+pub struct PhysicalDeviceShaderCorePropertiesAMD {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_engine_count: u32,
+ pub shader_arrays_per_engine_count: u32,
+ pub compute_units_per_shader_array: u32,
+ pub simd_per_compute_unit: u32,
+ pub wavefronts_per_simd: u32,
+ pub wavefront_size: u32,
+ pub sgprs_per_simd: u32,
+ pub min_sgpr_allocation: u32,
+ pub max_sgpr_allocation: u32,
+ pub sgpr_allocation_granularity: u32,
+ pub vgprs_per_simd: u32,
+ pub min_vgpr_allocation: u32,
+ pub max_vgpr_allocation: u32,
+ pub vgpr_allocation_granularity: u32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderCorePropertiesAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_engine_count: u32::default(),
+ shader_arrays_per_engine_count: u32::default(),
+ compute_units_per_shader_array: u32::default(),
+ simd_per_compute_unit: u32::default(),
+ wavefronts_per_simd: u32::default(),
+ wavefront_size: u32::default(),
+ sgprs_per_simd: u32::default(),
+ min_sgpr_allocation: u32::default(),
+ max_sgpr_allocation: u32::default(),
+ sgpr_allocation_granularity: u32::default(),
+ vgprs_per_simd: u32::default(),
+ min_vgpr_allocation: u32::default(),
+ max_vgpr_allocation: u32::default(),
+ vgpr_allocation_granularity: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderCorePropertiesAMD {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD;
+}
+impl PhysicalDeviceShaderCorePropertiesAMD {
+ pub fn builder<'a>() -> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
+ PhysicalDeviceShaderCorePropertiesAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
+ inner: PhysicalDeviceShaderCorePropertiesAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCorePropertiesAMDBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCorePropertiesAMD {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
+ type Target = PhysicalDeviceShaderCorePropertiesAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderCorePropertiesAMDBuilder<'a> {
+ #[inline]
+ pub fn shader_engine_count(mut self, shader_engine_count: u32) -> Self {
+ self.inner.shader_engine_count = shader_engine_count;
+ self
+ }
+ #[inline]
+ pub fn shader_arrays_per_engine_count(mut self, shader_arrays_per_engine_count: u32) -> Self {
+ self.inner.shader_arrays_per_engine_count = shader_arrays_per_engine_count;
+ self
+ }
+ #[inline]
+ pub fn compute_units_per_shader_array(mut self, compute_units_per_shader_array: u32) -> Self {
+ self.inner.compute_units_per_shader_array = compute_units_per_shader_array;
+ self
+ }
+ #[inline]
+ pub fn simd_per_compute_unit(mut self, simd_per_compute_unit: u32) -> Self {
+ self.inner.simd_per_compute_unit = simd_per_compute_unit;
+ self
+ }
+ #[inline]
+ pub fn wavefronts_per_simd(mut self, wavefronts_per_simd: u32) -> Self {
+ self.inner.wavefronts_per_simd = wavefronts_per_simd;
+ self
+ }
+ #[inline]
+ pub fn wavefront_size(mut self, wavefront_size: u32) -> Self {
+ self.inner.wavefront_size = wavefront_size;
+ self
+ }
+ #[inline]
+ pub fn sgprs_per_simd(mut self, sgprs_per_simd: u32) -> Self {
+ self.inner.sgprs_per_simd = sgprs_per_simd;
+ self
+ }
+ #[inline]
+ pub fn min_sgpr_allocation(mut self, min_sgpr_allocation: u32) -> Self {
+ self.inner.min_sgpr_allocation = min_sgpr_allocation;
+ self
+ }
+ #[inline]
+ pub fn max_sgpr_allocation(mut self, max_sgpr_allocation: u32) -> Self {
+ self.inner.max_sgpr_allocation = max_sgpr_allocation;
+ self
+ }
+ #[inline]
+ pub fn sgpr_allocation_granularity(mut self, sgpr_allocation_granularity: u32) -> Self {
+ self.inner.sgpr_allocation_granularity = sgpr_allocation_granularity;
+ self
+ }
+ #[inline]
+ pub fn vgprs_per_simd(mut self, vgprs_per_simd: u32) -> Self {
+ self.inner.vgprs_per_simd = vgprs_per_simd;
+ self
+ }
+ #[inline]
+ pub fn min_vgpr_allocation(mut self, min_vgpr_allocation: u32) -> Self {
+ self.inner.min_vgpr_allocation = min_vgpr_allocation;
+ self
+ }
+ #[inline]
+ pub fn max_vgpr_allocation(mut self, max_vgpr_allocation: u32) -> Self {
+ self.inner.max_vgpr_allocation = max_vgpr_allocation;
+ self
+ }
+ #[inline]
+ pub fn vgpr_allocation_granularity(mut self, vgpr_allocation_granularity: u32) -> Self {
+ self.inner.vgpr_allocation_granularity = vgpr_allocation_granularity;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderCorePropertiesAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderCoreProperties2AMD.html>"]
+pub struct PhysicalDeviceShaderCoreProperties2AMD {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_core_features: ShaderCorePropertiesFlagsAMD,
+ pub active_compute_unit_count: u32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderCoreProperties2AMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_core_features: ShaderCorePropertiesFlagsAMD::default(),
+ active_compute_unit_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderCoreProperties2AMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD;
+}
+impl PhysicalDeviceShaderCoreProperties2AMD {
+ pub fn builder<'a>() -> PhysicalDeviceShaderCoreProperties2AMDBuilder<'a> {
+ PhysicalDeviceShaderCoreProperties2AMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderCoreProperties2AMDBuilder<'a> {
+ inner: PhysicalDeviceShaderCoreProperties2AMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCoreProperties2AMDBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCoreProperties2AMD {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderCoreProperties2AMDBuilder<'a> {
+ type Target = PhysicalDeviceShaderCoreProperties2AMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderCoreProperties2AMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderCoreProperties2AMDBuilder<'a> {
+ #[inline]
+ pub fn shader_core_features(
+ mut self,
+ shader_core_features: ShaderCorePropertiesFlagsAMD,
+ ) -> Self {
+ self.inner.shader_core_features = shader_core_features;
+ self
+ }
+ #[inline]
+ pub fn active_compute_unit_count(mut self, active_compute_unit_count: u32) -> Self {
+ self.inner.active_compute_unit_count = active_compute_unit_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderCoreProperties2AMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationConservativeStateCreateInfoEXT.html>"]
+pub struct PipelineRasterizationConservativeStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineRasterizationConservativeStateCreateFlagsEXT,
+ pub conservative_rasterization_mode: ConservativeRasterizationModeEXT,
+ pub extra_primitive_overestimation_size: f32,
+}
+impl ::std::default::Default for PipelineRasterizationConservativeStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineRasterizationConservativeStateCreateFlagsEXT::default(),
+ conservative_rasterization_mode: ConservativeRasterizationModeEXT::default(),
+ extra_primitive_overestimation_size: f32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationConservativeStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT;
+}
+impl PipelineRasterizationConservativeStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
+ PipelineRasterizationConservativeStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineRasterizationConservativeStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationConservativeStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineRasterizationConservativeStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationConservativeStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineRasterizationConservativeStateCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn conservative_rasterization_mode(
+ mut self,
+ conservative_rasterization_mode: ConservativeRasterizationModeEXT,
+ ) -> Self {
+ self.inner.conservative_rasterization_mode = conservative_rasterization_mode;
+ self
+ }
+ #[inline]
+ pub fn extra_primitive_overestimation_size(
+ mut self,
+ extra_primitive_overestimation_size: f32,
+ ) -> Self {
+ self.inner.extra_primitive_overestimation_size = extra_primitive_overestimation_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationConservativeStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDescriptorIndexingFeatures.html>"]
+pub struct PhysicalDeviceDescriptorIndexingFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_input_attachment_array_dynamic_indexing: Bool32,
+ pub shader_uniform_texel_buffer_array_dynamic_indexing: Bool32,
+ pub shader_storage_texel_buffer_array_dynamic_indexing: Bool32,
+ pub shader_uniform_buffer_array_non_uniform_indexing: Bool32,
+ pub shader_sampled_image_array_non_uniform_indexing: Bool32,
+ pub shader_storage_buffer_array_non_uniform_indexing: Bool32,
+ pub shader_storage_image_array_non_uniform_indexing: Bool32,
+ pub shader_input_attachment_array_non_uniform_indexing: Bool32,
+ pub shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32,
+ pub shader_storage_texel_buffer_array_non_uniform_indexing: Bool32,
+ pub descriptor_binding_uniform_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_sampled_image_update_after_bind: Bool32,
+ pub descriptor_binding_storage_image_update_after_bind: Bool32,
+ pub descriptor_binding_storage_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_storage_texel_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_update_unused_while_pending: Bool32,
+ pub descriptor_binding_partially_bound: Bool32,
+ pub descriptor_binding_variable_descriptor_count: Bool32,
+ pub runtime_descriptor_array: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDescriptorIndexingFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_input_attachment_array_dynamic_indexing: Bool32::default(),
+ shader_uniform_texel_buffer_array_dynamic_indexing: Bool32::default(),
+ shader_storage_texel_buffer_array_dynamic_indexing: Bool32::default(),
+ shader_uniform_buffer_array_non_uniform_indexing: Bool32::default(),
+ shader_sampled_image_array_non_uniform_indexing: Bool32::default(),
+ shader_storage_buffer_array_non_uniform_indexing: Bool32::default(),
+ shader_storage_image_array_non_uniform_indexing: Bool32::default(),
+ shader_input_attachment_array_non_uniform_indexing: Bool32::default(),
+ shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32::default(),
+ shader_storage_texel_buffer_array_non_uniform_indexing: Bool32::default(),
+ descriptor_binding_uniform_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_sampled_image_update_after_bind: Bool32::default(),
+ descriptor_binding_storage_image_update_after_bind: Bool32::default(),
+ descriptor_binding_storage_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_storage_texel_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_update_unused_while_pending: Bool32::default(),
+ descriptor_binding_partially_bound: Bool32::default(),
+ descriptor_binding_variable_descriptor_count: Bool32::default(),
+ runtime_descriptor_array: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDescriptorIndexingFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES;
+}
+impl PhysicalDeviceDescriptorIndexingFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceDescriptorIndexingFeaturesBuilder<'a> {
+ PhysicalDeviceDescriptorIndexingFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDescriptorIndexingFeaturesBuilder<'a> {
+ inner: PhysicalDeviceDescriptorIndexingFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorIndexingFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorIndexingFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorIndexingFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorIndexingFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorIndexingFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceDescriptorIndexingFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorIndexingFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDescriptorIndexingFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_input_attachment_array_dynamic_indexing(
+ mut self,
+ shader_input_attachment_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner.shader_input_attachment_array_dynamic_indexing =
+ shader_input_attachment_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_texel_buffer_array_dynamic_indexing(
+ mut self,
+ shader_uniform_texel_buffer_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_uniform_texel_buffer_array_dynamic_indexing =
+ shader_uniform_texel_buffer_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_texel_buffer_array_dynamic_indexing(
+ mut self,
+ shader_storage_texel_buffer_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_texel_buffer_array_dynamic_indexing =
+ shader_storage_texel_buffer_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_uniform_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_uniform_buffer_array_non_uniform_indexing =
+ shader_uniform_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_sampled_image_array_non_uniform_indexing(
+ mut self,
+ shader_sampled_image_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_sampled_image_array_non_uniform_indexing =
+ shader_sampled_image_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_storage_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_storage_buffer_array_non_uniform_indexing =
+ shader_storage_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_array_non_uniform_indexing(
+ mut self,
+ shader_storage_image_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_array_non_uniform_indexing =
+ shader_storage_image_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_input_attachment_array_non_uniform_indexing(
+ mut self,
+ shader_input_attachment_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_input_attachment_array_non_uniform_indexing =
+ shader_input_attachment_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_texel_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_uniform_texel_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_uniform_texel_buffer_array_non_uniform_indexing =
+ shader_uniform_texel_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_texel_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_storage_texel_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_texel_buffer_array_non_uniform_indexing =
+ shader_storage_texel_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_uniform_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_uniform_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_uniform_buffer_update_after_bind =
+ descriptor_binding_uniform_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_sampled_image_update_after_bind(
+ mut self,
+ descriptor_binding_sampled_image_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_sampled_image_update_after_bind =
+ descriptor_binding_sampled_image_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_storage_image_update_after_bind(
+ mut self,
+ descriptor_binding_storage_image_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_storage_image_update_after_bind =
+ descriptor_binding_storage_image_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_storage_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_storage_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_storage_buffer_update_after_bind =
+ descriptor_binding_storage_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_uniform_texel_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_uniform_texel_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_uniform_texel_buffer_update_after_bind =
+ descriptor_binding_uniform_texel_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_storage_texel_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_storage_texel_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_storage_texel_buffer_update_after_bind =
+ descriptor_binding_storage_texel_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_update_unused_while_pending(
+ mut self,
+ descriptor_binding_update_unused_while_pending: bool,
+ ) -> Self {
+ self.inner.descriptor_binding_update_unused_while_pending =
+ descriptor_binding_update_unused_while_pending.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_partially_bound(
+ mut self,
+ descriptor_binding_partially_bound: bool,
+ ) -> Self {
+ self.inner.descriptor_binding_partially_bound = descriptor_binding_partially_bound.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_variable_descriptor_count(
+ mut self,
+ descriptor_binding_variable_descriptor_count: bool,
+ ) -> Self {
+ self.inner.descriptor_binding_variable_descriptor_count =
+ descriptor_binding_variable_descriptor_count.into();
+ self
+ }
+ #[inline]
+ pub fn runtime_descriptor_array(mut self, runtime_descriptor_array: bool) -> Self {
+ self.inner.runtime_descriptor_array = runtime_descriptor_array.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDescriptorIndexingFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDescriptorIndexingProperties.html>"]
+pub struct PhysicalDeviceDescriptorIndexingProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_update_after_bind_descriptors_in_all_pools: u32,
+ pub shader_uniform_buffer_array_non_uniform_indexing_native: Bool32,
+ pub shader_sampled_image_array_non_uniform_indexing_native: Bool32,
+ pub shader_storage_buffer_array_non_uniform_indexing_native: Bool32,
+ pub shader_storage_image_array_non_uniform_indexing_native: Bool32,
+ pub shader_input_attachment_array_non_uniform_indexing_native: Bool32,
+ pub robust_buffer_access_update_after_bind: Bool32,
+ pub quad_divergent_implicit_lod: Bool32,
+ pub max_per_stage_descriptor_update_after_bind_samplers: u32,
+ pub max_per_stage_descriptor_update_after_bind_uniform_buffers: u32,
+ pub max_per_stage_descriptor_update_after_bind_storage_buffers: u32,
+ pub max_per_stage_descriptor_update_after_bind_sampled_images: u32,
+ pub max_per_stage_descriptor_update_after_bind_storage_images: u32,
+ pub max_per_stage_descriptor_update_after_bind_input_attachments: u32,
+ pub max_per_stage_update_after_bind_resources: u32,
+ pub max_descriptor_set_update_after_bind_samplers: u32,
+ pub max_descriptor_set_update_after_bind_uniform_buffers: u32,
+ pub max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32,
+ pub max_descriptor_set_update_after_bind_storage_buffers: u32,
+ pub max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32,
+ pub max_descriptor_set_update_after_bind_sampled_images: u32,
+ pub max_descriptor_set_update_after_bind_storage_images: u32,
+ pub max_descriptor_set_update_after_bind_input_attachments: u32,
+}
+impl ::std::default::Default for PhysicalDeviceDescriptorIndexingProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_update_after_bind_descriptors_in_all_pools: u32::default(),
+ shader_uniform_buffer_array_non_uniform_indexing_native: Bool32::default(),
+ shader_sampled_image_array_non_uniform_indexing_native: Bool32::default(),
+ shader_storage_buffer_array_non_uniform_indexing_native: Bool32::default(),
+ shader_storage_image_array_non_uniform_indexing_native: Bool32::default(),
+ shader_input_attachment_array_non_uniform_indexing_native: Bool32::default(),
+ robust_buffer_access_update_after_bind: Bool32::default(),
+ quad_divergent_implicit_lod: Bool32::default(),
+ max_per_stage_descriptor_update_after_bind_samplers: u32::default(),
+ max_per_stage_descriptor_update_after_bind_uniform_buffers: u32::default(),
+ max_per_stage_descriptor_update_after_bind_storage_buffers: u32::default(),
+ max_per_stage_descriptor_update_after_bind_sampled_images: u32::default(),
+ max_per_stage_descriptor_update_after_bind_storage_images: u32::default(),
+ max_per_stage_descriptor_update_after_bind_input_attachments: u32::default(),
+ max_per_stage_update_after_bind_resources: u32::default(),
+ max_descriptor_set_update_after_bind_samplers: u32::default(),
+ max_descriptor_set_update_after_bind_uniform_buffers: u32::default(),
+ max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32::default(),
+ max_descriptor_set_update_after_bind_storage_buffers: u32::default(),
+ max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32::default(),
+ max_descriptor_set_update_after_bind_sampled_images: u32::default(),
+ max_descriptor_set_update_after_bind_storage_images: u32::default(),
+ max_descriptor_set_update_after_bind_input_attachments: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDescriptorIndexingProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES;
+}
+impl PhysicalDeviceDescriptorIndexingProperties {
+ pub fn builder<'a>() -> PhysicalDeviceDescriptorIndexingPropertiesBuilder<'a> {
+ PhysicalDeviceDescriptorIndexingPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDescriptorIndexingPropertiesBuilder<'a> {
+ inner: PhysicalDeviceDescriptorIndexingProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDescriptorIndexingPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDescriptorIndexingProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorIndexingPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceDescriptorIndexingProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorIndexingPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDescriptorIndexingPropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_update_after_bind_descriptors_in_all_pools(
+ mut self,
+ max_update_after_bind_descriptors_in_all_pools: u32,
+ ) -> Self {
+ self.inner.max_update_after_bind_descriptors_in_all_pools =
+ max_update_after_bind_descriptors_in_all_pools;
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_buffer_array_non_uniform_indexing_native(
+ mut self,
+ shader_uniform_buffer_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_uniform_buffer_array_non_uniform_indexing_native =
+ shader_uniform_buffer_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_sampled_image_array_non_uniform_indexing_native(
+ mut self,
+ shader_sampled_image_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_sampled_image_array_non_uniform_indexing_native =
+ shader_sampled_image_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_buffer_array_non_uniform_indexing_native(
+ mut self,
+ shader_storage_buffer_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_buffer_array_non_uniform_indexing_native =
+ shader_storage_buffer_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_array_non_uniform_indexing_native(
+ mut self,
+ shader_storage_image_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_image_array_non_uniform_indexing_native =
+ shader_storage_image_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_input_attachment_array_non_uniform_indexing_native(
+ mut self,
+ shader_input_attachment_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_input_attachment_array_non_uniform_indexing_native =
+ shader_input_attachment_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn robust_buffer_access_update_after_bind(
+ mut self,
+ robust_buffer_access_update_after_bind: bool,
+ ) -> Self {
+ self.inner.robust_buffer_access_update_after_bind =
+ robust_buffer_access_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn quad_divergent_implicit_lod(mut self, quad_divergent_implicit_lod: bool) -> Self {
+ self.inner.quad_divergent_implicit_lod = quad_divergent_implicit_lod.into();
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_samplers(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_samplers: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_samplers =
+ max_per_stage_descriptor_update_after_bind_samplers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_uniform_buffers(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_uniform_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_uniform_buffers =
+ max_per_stage_descriptor_update_after_bind_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_storage_buffers(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_storage_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_storage_buffers =
+ max_per_stage_descriptor_update_after_bind_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_sampled_images(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_sampled_images: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_sampled_images =
+ max_per_stage_descriptor_update_after_bind_sampled_images;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_storage_images(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_storage_images: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_storage_images =
+ max_per_stage_descriptor_update_after_bind_storage_images;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_input_attachments(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_input_attachments: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_input_attachments =
+ max_per_stage_descriptor_update_after_bind_input_attachments;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_update_after_bind_resources(
+ mut self,
+ max_per_stage_update_after_bind_resources: u32,
+ ) -> Self {
+ self.inner.max_per_stage_update_after_bind_resources =
+ max_per_stage_update_after_bind_resources;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_samplers(
+ mut self,
+ max_descriptor_set_update_after_bind_samplers: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_update_after_bind_samplers =
+ max_descriptor_set_update_after_bind_samplers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_uniform_buffers(
+ mut self,
+ max_descriptor_set_update_after_bind_uniform_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_uniform_buffers =
+ max_descriptor_set_update_after_bind_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_uniform_buffers_dynamic(
+ mut self,
+ max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_uniform_buffers_dynamic =
+ max_descriptor_set_update_after_bind_uniform_buffers_dynamic;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_storage_buffers(
+ mut self,
+ max_descriptor_set_update_after_bind_storage_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_storage_buffers =
+ max_descriptor_set_update_after_bind_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_storage_buffers_dynamic(
+ mut self,
+ max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_storage_buffers_dynamic =
+ max_descriptor_set_update_after_bind_storage_buffers_dynamic;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_sampled_images(
+ mut self,
+ max_descriptor_set_update_after_bind_sampled_images: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_sampled_images =
+ max_descriptor_set_update_after_bind_sampled_images;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_storage_images(
+ mut self,
+ max_descriptor_set_update_after_bind_storage_images: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_storage_images =
+ max_descriptor_set_update_after_bind_storage_images;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_input_attachments(
+ mut self,
+ max_descriptor_set_update_after_bind_input_attachments: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_input_attachments =
+ max_descriptor_set_update_after_bind_input_attachments;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDescriptorIndexingProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayoutBindingFlagsCreateInfo.html>"]
+pub struct DescriptorSetLayoutBindingFlagsCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub binding_count: u32,
+ pub p_binding_flags: *const DescriptorBindingFlags,
+}
+impl ::std::default::Default for DescriptorSetLayoutBindingFlagsCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ binding_count: u32::default(),
+ p_binding_flags: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetLayoutBindingFlagsCreateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO;
+}
+impl DescriptorSetLayoutBindingFlagsCreateInfo {
+ pub fn builder<'a>() -> DescriptorSetLayoutBindingFlagsCreateInfoBuilder<'a> {
+ DescriptorSetLayoutBindingFlagsCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetLayoutBindingFlagsCreateInfoBuilder<'a> {
+ inner: DescriptorSetLayoutBindingFlagsCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDescriptorSetLayoutCreateInfo
+ for DescriptorSetLayoutBindingFlagsCreateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsDescriptorSetLayoutCreateInfo for DescriptorSetLayoutBindingFlagsCreateInfo {}
+impl<'a> ::std::ops::Deref for DescriptorSetLayoutBindingFlagsCreateInfoBuilder<'a> {
+ type Target = DescriptorSetLayoutBindingFlagsCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutBindingFlagsCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetLayoutBindingFlagsCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn binding_flags(mut self, binding_flags: &'a [DescriptorBindingFlags]) -> Self {
+ self.inner.binding_count = binding_flags.len() as _;
+ self.inner.p_binding_flags = binding_flags.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetLayoutBindingFlagsCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetVariableDescriptorCountAllocateInfo.html>"]
+pub struct DescriptorSetVariableDescriptorCountAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub descriptor_set_count: u32,
+ pub p_descriptor_counts: *const u32,
+}
+impl ::std::default::Default for DescriptorSetVariableDescriptorCountAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ descriptor_set_count: u32::default(),
+ p_descriptor_counts: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetVariableDescriptorCountAllocateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO;
+}
+impl DescriptorSetVariableDescriptorCountAllocateInfo {
+ pub fn builder<'a>() -> DescriptorSetVariableDescriptorCountAllocateInfoBuilder<'a> {
+ DescriptorSetVariableDescriptorCountAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetVariableDescriptorCountAllocateInfoBuilder<'a> {
+ inner: DescriptorSetVariableDescriptorCountAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDescriptorSetAllocateInfo
+ for DescriptorSetVariableDescriptorCountAllocateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsDescriptorSetAllocateInfo for DescriptorSetVariableDescriptorCountAllocateInfo {}
+impl<'a> ::std::ops::Deref for DescriptorSetVariableDescriptorCountAllocateInfoBuilder<'a> {
+ type Target = DescriptorSetVariableDescriptorCountAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetVariableDescriptorCountAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetVariableDescriptorCountAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn descriptor_counts(mut self, descriptor_counts: &'a [u32]) -> Self {
+ self.inner.descriptor_set_count = descriptor_counts.len() as _;
+ self.inner.p_descriptor_counts = descriptor_counts.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetVariableDescriptorCountAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetVariableDescriptorCountLayoutSupport.html>"]
+pub struct DescriptorSetVariableDescriptorCountLayoutSupport {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_variable_descriptor_count: u32,
+}
+impl ::std::default::Default for DescriptorSetVariableDescriptorCountLayoutSupport {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_variable_descriptor_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetVariableDescriptorCountLayoutSupport {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT;
+}
+impl DescriptorSetVariableDescriptorCountLayoutSupport {
+ pub fn builder<'a>() -> DescriptorSetVariableDescriptorCountLayoutSupportBuilder<'a> {
+ DescriptorSetVariableDescriptorCountLayoutSupportBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetVariableDescriptorCountLayoutSupportBuilder<'a> {
+ inner: DescriptorSetVariableDescriptorCountLayoutSupport,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDescriptorSetLayoutSupport
+ for DescriptorSetVariableDescriptorCountLayoutSupportBuilder<'_>
+{
+}
+unsafe impl ExtendsDescriptorSetLayoutSupport
+ for DescriptorSetVariableDescriptorCountLayoutSupport
+{
+}
+impl<'a> ::std::ops::Deref for DescriptorSetVariableDescriptorCountLayoutSupportBuilder<'a> {
+ type Target = DescriptorSetVariableDescriptorCountLayoutSupport;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetVariableDescriptorCountLayoutSupportBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetVariableDescriptorCountLayoutSupportBuilder<'a> {
+ #[inline]
+ pub fn max_variable_descriptor_count(mut self, max_variable_descriptor_count: u32) -> Self {
+ self.inner.max_variable_descriptor_count = max_variable_descriptor_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetVariableDescriptorCountLayoutSupport {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentDescription2.html>"]
+pub struct AttachmentDescription2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: AttachmentDescriptionFlags,
+ pub format: Format,
+ pub samples: SampleCountFlags,
+ pub load_op: AttachmentLoadOp,
+ pub store_op: AttachmentStoreOp,
+ pub stencil_load_op: AttachmentLoadOp,
+ pub stencil_store_op: AttachmentStoreOp,
+ pub initial_layout: ImageLayout,
+ pub final_layout: ImageLayout,
+}
+impl ::std::default::Default for AttachmentDescription2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: AttachmentDescriptionFlags::default(),
+ format: Format::default(),
+ samples: SampleCountFlags::default(),
+ load_op: AttachmentLoadOp::default(),
+ store_op: AttachmentStoreOp::default(),
+ stencil_load_op: AttachmentLoadOp::default(),
+ stencil_store_op: AttachmentStoreOp::default(),
+ initial_layout: ImageLayout::default(),
+ final_layout: ImageLayout::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AttachmentDescription2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_DESCRIPTION_2;
+}
+impl AttachmentDescription2 {
+ pub fn builder<'a>() -> AttachmentDescription2Builder<'a> {
+ AttachmentDescription2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentDescription2Builder<'a> {
+ inner: AttachmentDescription2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsAttachmentDescription2 {}
+impl<'a> ::std::ops::Deref for AttachmentDescription2Builder<'a> {
+ type Target = AttachmentDescription2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentDescription2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentDescription2Builder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: AttachmentDescriptionFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn samples(mut self, samples: SampleCountFlags) -> Self {
+ self.inner.samples = samples;
+ self
+ }
+ #[inline]
+ pub fn load_op(mut self, load_op: AttachmentLoadOp) -> Self {
+ self.inner.load_op = load_op;
+ self
+ }
+ #[inline]
+ pub fn store_op(mut self, store_op: AttachmentStoreOp) -> Self {
+ self.inner.store_op = store_op;
+ self
+ }
+ #[inline]
+ pub fn stencil_load_op(mut self, stencil_load_op: AttachmentLoadOp) -> Self {
+ self.inner.stencil_load_op = stencil_load_op;
+ self
+ }
+ #[inline]
+ pub fn stencil_store_op(mut self, stencil_store_op: AttachmentStoreOp) -> Self {
+ self.inner.stencil_store_op = stencil_store_op;
+ self
+ }
+ #[inline]
+ pub fn initial_layout(mut self, initial_layout: ImageLayout) -> Self {
+ self.inner.initial_layout = initial_layout;
+ self
+ }
+ #[inline]
+ pub fn final_layout(mut self, final_layout: ImageLayout) -> Self {
+ self.inner.final_layout = final_layout;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsAttachmentDescription2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentDescription2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentReference2.html>"]
+pub struct AttachmentReference2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub attachment: u32,
+ pub layout: ImageLayout,
+ pub aspect_mask: ImageAspectFlags,
+}
+impl ::std::default::Default for AttachmentReference2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ attachment: u32::default(),
+ layout: ImageLayout::default(),
+ aspect_mask: ImageAspectFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AttachmentReference2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_REFERENCE_2;
+}
+impl AttachmentReference2 {
+ pub fn builder<'a>() -> AttachmentReference2Builder<'a> {
+ AttachmentReference2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentReference2Builder<'a> {
+ inner: AttachmentReference2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsAttachmentReference2 {}
+impl<'a> ::std::ops::Deref for AttachmentReference2Builder<'a> {
+ type Target = AttachmentReference2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentReference2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentReference2Builder<'a> {
+ #[inline]
+ pub fn attachment(mut self, attachment: u32) -> Self {
+ self.inner.attachment = attachment;
+ self
+ }
+ #[inline]
+ pub fn layout(mut self, layout: ImageLayout) -> Self {
+ self.inner.layout = layout;
+ self
+ }
+ #[inline]
+ pub fn aspect_mask(mut self, aspect_mask: ImageAspectFlags) -> Self {
+ self.inner.aspect_mask = aspect_mask;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsAttachmentReference2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentReference2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassDescription2.html>"]
+pub struct SubpassDescription2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: SubpassDescriptionFlags,
+ pub pipeline_bind_point: PipelineBindPoint,
+ pub view_mask: u32,
+ pub input_attachment_count: u32,
+ pub p_input_attachments: *const AttachmentReference2,
+ pub color_attachment_count: u32,
+ pub p_color_attachments: *const AttachmentReference2,
+ pub p_resolve_attachments: *const AttachmentReference2,
+ pub p_depth_stencil_attachment: *const AttachmentReference2,
+ pub preserve_attachment_count: u32,
+ pub p_preserve_attachments: *const u32,
+}
+impl ::std::default::Default for SubpassDescription2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: SubpassDescriptionFlags::default(),
+ pipeline_bind_point: PipelineBindPoint::default(),
+ view_mask: u32::default(),
+ input_attachment_count: u32::default(),
+ p_input_attachments: ::std::ptr::null(),
+ color_attachment_count: u32::default(),
+ p_color_attachments: ::std::ptr::null(),
+ p_resolve_attachments: ::std::ptr::null(),
+ p_depth_stencil_attachment: ::std::ptr::null(),
+ preserve_attachment_count: u32::default(),
+ p_preserve_attachments: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassDescription2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_DESCRIPTION_2;
+}
+impl SubpassDescription2 {
+ pub fn builder<'a>() -> SubpassDescription2Builder<'a> {
+ SubpassDescription2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassDescription2Builder<'a> {
+ inner: SubpassDescription2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSubpassDescription2 {}
+impl<'a> ::std::ops::Deref for SubpassDescription2Builder<'a> {
+ type Target = SubpassDescription2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassDescription2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassDescription2Builder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SubpassDescriptionFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn pipeline_bind_point(mut self, pipeline_bind_point: PipelineBindPoint) -> Self {
+ self.inner.pipeline_bind_point = pipeline_bind_point;
+ self
+ }
+ #[inline]
+ pub fn view_mask(mut self, view_mask: u32) -> Self {
+ self.inner.view_mask = view_mask;
+ self
+ }
+ #[inline]
+ pub fn input_attachments(mut self, input_attachments: &'a [AttachmentReference2]) -> Self {
+ self.inner.input_attachment_count = input_attachments.len() as _;
+ self.inner.p_input_attachments = input_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn color_attachments(mut self, color_attachments: &'a [AttachmentReference2]) -> Self {
+ self.inner.color_attachment_count = color_attachments.len() as _;
+ self.inner.p_color_attachments = color_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn resolve_attachments(mut self, resolve_attachments: &'a [AttachmentReference2]) -> Self {
+ self.inner.color_attachment_count = resolve_attachments.len() as _;
+ self.inner.p_resolve_attachments = resolve_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn depth_stencil_attachment(
+ mut self,
+ depth_stencil_attachment: &'a AttachmentReference2,
+ ) -> Self {
+ self.inner.p_depth_stencil_attachment = depth_stencil_attachment;
+ self
+ }
+ #[inline]
+ pub fn preserve_attachments(mut self, preserve_attachments: &'a [u32]) -> Self {
+ self.inner.preserve_attachment_count = preserve_attachments.len() as _;
+ self.inner.p_preserve_attachments = preserve_attachments.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSubpassDescription2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassDescription2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassDependency2.html>"]
+pub struct SubpassDependency2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_subpass: u32,
+ pub dst_subpass: u32,
+ pub src_stage_mask: PipelineStageFlags,
+ pub dst_stage_mask: PipelineStageFlags,
+ pub src_access_mask: AccessFlags,
+ pub dst_access_mask: AccessFlags,
+ pub dependency_flags: DependencyFlags,
+ pub view_offset: i32,
+}
+impl ::std::default::Default for SubpassDependency2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_subpass: u32::default(),
+ dst_subpass: u32::default(),
+ src_stage_mask: PipelineStageFlags::default(),
+ dst_stage_mask: PipelineStageFlags::default(),
+ src_access_mask: AccessFlags::default(),
+ dst_access_mask: AccessFlags::default(),
+ dependency_flags: DependencyFlags::default(),
+ view_offset: i32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassDependency2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_DEPENDENCY_2;
+}
+impl SubpassDependency2 {
+ pub fn builder<'a>() -> SubpassDependency2Builder<'a> {
+ SubpassDependency2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassDependency2Builder<'a> {
+ inner: SubpassDependency2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSubpassDependency2 {}
+impl<'a> ::std::ops::Deref for SubpassDependency2Builder<'a> {
+ type Target = SubpassDependency2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassDependency2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassDependency2Builder<'a> {
+ #[inline]
+ pub fn src_subpass(mut self, src_subpass: u32) -> Self {
+ self.inner.src_subpass = src_subpass;
+ self
+ }
+ #[inline]
+ pub fn dst_subpass(mut self, dst_subpass: u32) -> Self {
+ self.inner.dst_subpass = dst_subpass;
+ self
+ }
+ #[inline]
+ pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags) -> Self {
+ self.inner.src_stage_mask = src_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags) -> Self {
+ self.inner.dst_stage_mask = dst_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self {
+ self.inner.dependency_flags = dependency_flags;
+ self
+ }
+ #[inline]
+ pub fn view_offset(mut self, view_offset: i32) -> Self {
+ self.inner.view_offset = view_offset;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSubpassDependency2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassDependency2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassCreateInfo2.html>"]
+pub struct RenderPassCreateInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: RenderPassCreateFlags,
+ pub attachment_count: u32,
+ pub p_attachments: *const AttachmentDescription2,
+ pub subpass_count: u32,
+ pub p_subpasses: *const SubpassDescription2,
+ pub dependency_count: u32,
+ pub p_dependencies: *const SubpassDependency2,
+ pub correlated_view_mask_count: u32,
+ pub p_correlated_view_masks: *const u32,
+}
+impl ::std::default::Default for RenderPassCreateInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: RenderPassCreateFlags::default(),
+ attachment_count: u32::default(),
+ p_attachments: ::std::ptr::null(),
+ subpass_count: u32::default(),
+ p_subpasses: ::std::ptr::null(),
+ dependency_count: u32::default(),
+ p_dependencies: ::std::ptr::null(),
+ correlated_view_mask_count: u32::default(),
+ p_correlated_view_masks: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassCreateInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATE_INFO_2;
+}
+impl RenderPassCreateInfo2 {
+ pub fn builder<'a>() -> RenderPassCreateInfo2Builder<'a> {
+ RenderPassCreateInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassCreateInfo2Builder<'a> {
+ inner: RenderPassCreateInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsRenderPassCreateInfo2 {}
+impl<'a> ::std::ops::Deref for RenderPassCreateInfo2Builder<'a> {
+ type Target = RenderPassCreateInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassCreateInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassCreateInfo2Builder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: RenderPassCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn attachments(mut self, attachments: &'a [AttachmentDescription2]) -> Self {
+ self.inner.attachment_count = attachments.len() as _;
+ self.inner.p_attachments = attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn subpasses(mut self, subpasses: &'a [SubpassDescription2]) -> Self {
+ self.inner.subpass_count = subpasses.len() as _;
+ self.inner.p_subpasses = subpasses.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn dependencies(mut self, dependencies: &'a [SubpassDependency2]) -> Self {
+ self.inner.dependency_count = dependencies.len() as _;
+ self.inner.p_dependencies = dependencies.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn correlated_view_masks(mut self, correlated_view_masks: &'a [u32]) -> Self {
+ self.inner.correlated_view_mask_count = correlated_view_masks.len() as _;
+ self.inner.p_correlated_view_masks = correlated_view_masks.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsRenderPassCreateInfo2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassCreateInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassBeginInfo.html>"]
+pub struct SubpassBeginInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub contents: SubpassContents,
+}
+impl ::std::default::Default for SubpassBeginInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ contents: SubpassContents::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassBeginInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_BEGIN_INFO;
+}
+impl SubpassBeginInfo {
+ pub fn builder<'a>() -> SubpassBeginInfoBuilder<'a> {
+ SubpassBeginInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassBeginInfoBuilder<'a> {
+ inner: SubpassBeginInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SubpassBeginInfoBuilder<'a> {
+ type Target = SubpassBeginInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassBeginInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassBeginInfoBuilder<'a> {
+ #[inline]
+ pub fn contents(mut self, contents: SubpassContents) -> Self {
+ self.inner.contents = contents;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassBeginInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassEndInfo.html>"]
+pub struct SubpassEndInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+}
+impl ::std::default::Default for SubpassEndInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassEndInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_END_INFO;
+}
+impl SubpassEndInfo {
+ pub fn builder<'a>() -> SubpassEndInfoBuilder<'a> {
+ SubpassEndInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassEndInfoBuilder<'a> {
+ inner: SubpassEndInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSubpassEndInfo {}
+impl<'a> ::std::ops::Deref for SubpassEndInfoBuilder<'a> {
+ type Target = SubpassEndInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassEndInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassEndInfoBuilder<'a> {
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSubpassEndInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassEndInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTimelineSemaphoreFeatures.html>"]
+pub struct PhysicalDeviceTimelineSemaphoreFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub timeline_semaphore: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTimelineSemaphoreFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ timeline_semaphore: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTimelineSemaphoreFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES;
+}
+impl PhysicalDeviceTimelineSemaphoreFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'a> {
+ PhysicalDeviceTimelineSemaphoreFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'a> {
+ inner: PhysicalDeviceTimelineSemaphoreFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTimelineSemaphoreFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTimelineSemaphoreFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceTimelineSemaphoreFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTimelineSemaphoreFeaturesBuilder<'a> {
+ #[inline]
+ pub fn timeline_semaphore(mut self, timeline_semaphore: bool) -> Self {
+ self.inner.timeline_semaphore = timeline_semaphore.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTimelineSemaphoreFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTimelineSemaphoreProperties.html>"]
+pub struct PhysicalDeviceTimelineSemaphoreProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_timeline_semaphore_value_difference: u64,
+}
+impl ::std::default::Default for PhysicalDeviceTimelineSemaphoreProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_timeline_semaphore_value_difference: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTimelineSemaphoreProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES;
+}
+impl PhysicalDeviceTimelineSemaphoreProperties {
+ pub fn builder<'a>() -> PhysicalDeviceTimelineSemaphorePropertiesBuilder<'a> {
+ PhysicalDeviceTimelineSemaphorePropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTimelineSemaphorePropertiesBuilder<'a> {
+ inner: PhysicalDeviceTimelineSemaphoreProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceTimelineSemaphorePropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTimelineSemaphoreProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTimelineSemaphorePropertiesBuilder<'a> {
+ type Target = PhysicalDeviceTimelineSemaphoreProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTimelineSemaphorePropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTimelineSemaphorePropertiesBuilder<'a> {
+ #[inline]
+ pub fn max_timeline_semaphore_value_difference(
+ mut self,
+ max_timeline_semaphore_value_difference: u64,
+ ) -> Self {
+ self.inner.max_timeline_semaphore_value_difference =
+ max_timeline_semaphore_value_difference;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTimelineSemaphoreProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreTypeCreateInfo.html>"]
+pub struct SemaphoreTypeCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore_type: SemaphoreType,
+ pub initial_value: u64,
+}
+impl ::std::default::Default for SemaphoreTypeCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore_type: SemaphoreType::default(),
+ initial_value: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreTypeCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_TYPE_CREATE_INFO;
+}
+impl SemaphoreTypeCreateInfo {
+ pub fn builder<'a>() -> SemaphoreTypeCreateInfoBuilder<'a> {
+ SemaphoreTypeCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreTypeCreateInfoBuilder<'a> {
+ inner: SemaphoreTypeCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSemaphoreCreateInfo for SemaphoreTypeCreateInfoBuilder<'_> {}
+unsafe impl ExtendsSemaphoreCreateInfo for SemaphoreTypeCreateInfo {}
+unsafe impl ExtendsPhysicalDeviceExternalSemaphoreInfo for SemaphoreTypeCreateInfoBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceExternalSemaphoreInfo for SemaphoreTypeCreateInfo {}
+impl<'a> ::std::ops::Deref for SemaphoreTypeCreateInfoBuilder<'a> {
+ type Target = SemaphoreTypeCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreTypeCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreTypeCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn semaphore_type(mut self, semaphore_type: SemaphoreType) -> Self {
+ self.inner.semaphore_type = semaphore_type;
+ self
+ }
+ #[inline]
+ pub fn initial_value(mut self, initial_value: u64) -> Self {
+ self.inner.initial_value = initial_value;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreTypeCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTimelineSemaphoreSubmitInfo.html>"]
+pub struct TimelineSemaphoreSubmitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub wait_semaphore_value_count: u32,
+ pub p_wait_semaphore_values: *const u64,
+ pub signal_semaphore_value_count: u32,
+ pub p_signal_semaphore_values: *const u64,
+}
+impl ::std::default::Default for TimelineSemaphoreSubmitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ wait_semaphore_value_count: u32::default(),
+ p_wait_semaphore_values: ::std::ptr::null(),
+ signal_semaphore_value_count: u32::default(),
+ p_signal_semaphore_values: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for TimelineSemaphoreSubmitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::TIMELINE_SEMAPHORE_SUBMIT_INFO;
+}
+impl TimelineSemaphoreSubmitInfo {
+ pub fn builder<'a>() -> TimelineSemaphoreSubmitInfoBuilder<'a> {
+ TimelineSemaphoreSubmitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct TimelineSemaphoreSubmitInfoBuilder<'a> {
+ inner: TimelineSemaphoreSubmitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for TimelineSemaphoreSubmitInfoBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for TimelineSemaphoreSubmitInfo {}
+unsafe impl ExtendsBindSparseInfo for TimelineSemaphoreSubmitInfoBuilder<'_> {}
+unsafe impl ExtendsBindSparseInfo for TimelineSemaphoreSubmitInfo {}
+impl<'a> ::std::ops::Deref for TimelineSemaphoreSubmitInfoBuilder<'a> {
+ type Target = TimelineSemaphoreSubmitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for TimelineSemaphoreSubmitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> TimelineSemaphoreSubmitInfoBuilder<'a> {
+ #[inline]
+ pub fn wait_semaphore_values(mut self, wait_semaphore_values: &'a [u64]) -> Self {
+ self.inner.wait_semaphore_value_count = wait_semaphore_values.len() as _;
+ self.inner.p_wait_semaphore_values = wait_semaphore_values.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn signal_semaphore_values(mut self, signal_semaphore_values: &'a [u64]) -> Self {
+ self.inner.signal_semaphore_value_count = signal_semaphore_values.len() as _;
+ self.inner.p_signal_semaphore_values = signal_semaphore_values.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> TimelineSemaphoreSubmitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreWaitInfo.html>"]
+pub struct SemaphoreWaitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: SemaphoreWaitFlags,
+ pub semaphore_count: u32,
+ pub p_semaphores: *const Semaphore,
+ pub p_values: *const u64,
+}
+impl ::std::default::Default for SemaphoreWaitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: SemaphoreWaitFlags::default(),
+ semaphore_count: u32::default(),
+ p_semaphores: ::std::ptr::null(),
+ p_values: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreWaitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_WAIT_INFO;
+}
+impl SemaphoreWaitInfo {
+ pub fn builder<'a>() -> SemaphoreWaitInfoBuilder<'a> {
+ SemaphoreWaitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreWaitInfoBuilder<'a> {
+ inner: SemaphoreWaitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SemaphoreWaitInfoBuilder<'a> {
+ type Target = SemaphoreWaitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreWaitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreWaitInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SemaphoreWaitFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn semaphores(mut self, semaphores: &'a [Semaphore]) -> Self {
+ self.inner.semaphore_count = semaphores.len() as _;
+ self.inner.p_semaphores = semaphores.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn values(mut self, values: &'a [u64]) -> Self {
+ self.inner.semaphore_count = values.len() as _;
+ self.inner.p_values = values.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreWaitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreSignalInfo.html>"]
+pub struct SemaphoreSignalInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub value: u64,
+}
+impl ::std::default::Default for SemaphoreSignalInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ value: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreSignalInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_SIGNAL_INFO;
+}
+impl SemaphoreSignalInfo {
+ pub fn builder<'a>() -> SemaphoreSignalInfoBuilder<'a> {
+ SemaphoreSignalInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreSignalInfoBuilder<'a> {
+ inner: SemaphoreSignalInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SemaphoreSignalInfoBuilder<'a> {
+ type Target = SemaphoreSignalInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreSignalInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreSignalInfoBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn value(mut self, value: u64) -> Self {
+ self.inner.value = value;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreSignalInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVertexInputBindingDivisorDescriptionEXT.html>"]
+pub struct VertexInputBindingDivisorDescriptionEXT {
+ pub binding: u32,
+ pub divisor: u32,
+}
+impl VertexInputBindingDivisorDescriptionEXT {
+ pub fn builder<'a>() -> VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
+ VertexInputBindingDivisorDescriptionEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
+ inner: VertexInputBindingDivisorDescriptionEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
+ type Target = VertexInputBindingDivisorDescriptionEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VertexInputBindingDivisorDescriptionEXTBuilder<'a> {
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[inline]
+ pub fn divisor(mut self, divisor: u32) -> Self {
+ self.inner.divisor = divisor;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VertexInputBindingDivisorDescriptionEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineVertexInputDivisorStateCreateInfoEXT.html>"]
+pub struct PipelineVertexInputDivisorStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub vertex_binding_divisor_count: u32,
+ pub p_vertex_binding_divisors: *const VertexInputBindingDivisorDescriptionEXT,
+}
+impl ::std::default::Default for PipelineVertexInputDivisorStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ vertex_binding_divisor_count: u32::default(),
+ p_vertex_binding_divisors: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineVertexInputDivisorStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
+}
+impl PipelineVertexInputDivisorStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
+ PipelineVertexInputDivisorStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineVertexInputDivisorStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineVertexInputStateCreateInfo
+ for PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineVertexInputStateCreateInfo
+ for PipelineVertexInputDivisorStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineVertexInputDivisorStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineVertexInputDivisorStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn vertex_binding_divisors(
+ mut self,
+ vertex_binding_divisors: &'a [VertexInputBindingDivisorDescriptionEXT],
+ ) -> Self {
+ self.inner.vertex_binding_divisor_count = vertex_binding_divisors.len() as _;
+ self.inner.p_vertex_binding_divisors = vertex_binding_divisors.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineVertexInputDivisorStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT.html>"]
+pub struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_vertex_attrib_divisor: u32,
+}
+impl ::std::default::Default for PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_vertex_attrib_divisor: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT;
+}
+impl PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
+ PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceVertexAttributeDivisorPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVertexAttributeDivisorPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVertexAttributeDivisorPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_vertex_attrib_divisor(mut self, max_vertex_attrib_divisor: u32) -> Self {
+ self.inner.max_vertex_attrib_divisor = max_vertex_attrib_divisor;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePCIBusInfoPropertiesEXT.html>"]
+pub struct PhysicalDevicePCIBusInfoPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pci_domain: u32,
+ pub pci_bus: u32,
+ pub pci_device: u32,
+ pub pci_function: u32,
+}
+impl ::std::default::Default for PhysicalDevicePCIBusInfoPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pci_domain: u32::default(),
+ pci_bus: u32::default(),
+ pci_device: u32::default(),
+ pci_function: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePCIBusInfoPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT;
+}
+impl PhysicalDevicePCIBusInfoPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
+ PhysicalDevicePCIBusInfoPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
+ inner: PhysicalDevicePCIBusInfoPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePCIBusInfoPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDevicePCIBusInfoPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePCIBusInfoPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn pci_domain(mut self, pci_domain: u32) -> Self {
+ self.inner.pci_domain = pci_domain;
+ self
+ }
+ #[inline]
+ pub fn pci_bus(mut self, pci_bus: u32) -> Self {
+ self.inner.pci_bus = pci_bus;
+ self
+ }
+ #[inline]
+ pub fn pci_device(mut self, pci_device: u32) -> Self {
+ self.inner.pci_device = pci_device;
+ self
+ }
+ #[inline]
+ pub fn pci_function(mut self, pci_function: u32) -> Self {
+ self.inner.pci_function = pci_function;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePCIBusInfoPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportAndroidHardwareBufferInfoANDROID.html>"]
+pub struct ImportAndroidHardwareBufferInfoANDROID {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer: *mut AHardwareBuffer,
+}
+impl ::std::default::Default for ImportAndroidHardwareBufferInfoANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportAndroidHardwareBufferInfoANDROID {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+}
+impl ImportAndroidHardwareBufferInfoANDROID {
+ pub fn builder<'a>() -> ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ ImportAndroidHardwareBufferInfoANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ inner: ImportAndroidHardwareBufferInfoANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportAndroidHardwareBufferInfoANDROIDBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportAndroidHardwareBufferInfoANDROID {}
+impl<'a> ::std::ops::Deref for ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ type Target = ImportAndroidHardwareBufferInfoANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: *mut AHardwareBuffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportAndroidHardwareBufferInfoANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAndroidHardwareBufferUsageANDROID.html>"]
+pub struct AndroidHardwareBufferUsageANDROID {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub android_hardware_buffer_usage: u64,
+}
+impl ::std::default::Default for AndroidHardwareBufferUsageANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ android_hardware_buffer_usage: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AndroidHardwareBufferUsageANDROID {
+ const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_USAGE_ANDROID;
+}
+impl AndroidHardwareBufferUsageANDROID {
+ pub fn builder<'a>() -> AndroidHardwareBufferUsageANDROIDBuilder<'a> {
+ AndroidHardwareBufferUsageANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AndroidHardwareBufferUsageANDROIDBuilder<'a> {
+ inner: AndroidHardwareBufferUsageANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageFormatProperties2 for AndroidHardwareBufferUsageANDROIDBuilder<'_> {}
+unsafe impl ExtendsImageFormatProperties2 for AndroidHardwareBufferUsageANDROID {}
+impl<'a> ::std::ops::Deref for AndroidHardwareBufferUsageANDROIDBuilder<'a> {
+ type Target = AndroidHardwareBufferUsageANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferUsageANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AndroidHardwareBufferUsageANDROIDBuilder<'a> {
+ #[inline]
+ pub fn android_hardware_buffer_usage(mut self, android_hardware_buffer_usage: u64) -> Self {
+ self.inner.android_hardware_buffer_usage = android_hardware_buffer_usage;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AndroidHardwareBufferUsageANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAndroidHardwareBufferPropertiesANDROID.html>"]
+pub struct AndroidHardwareBufferPropertiesANDROID {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub allocation_size: DeviceSize,
+ pub memory_type_bits: u32,
+}
+impl ::std::default::Default for AndroidHardwareBufferPropertiesANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ allocation_size: DeviceSize::default(),
+ memory_type_bits: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AndroidHardwareBufferPropertiesANDROID {
+ const STRUCTURE_TYPE: StructureType = StructureType::ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID;
+}
+impl AndroidHardwareBufferPropertiesANDROID {
+ pub fn builder<'a>() -> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
+ AndroidHardwareBufferPropertiesANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
+ inner: AndroidHardwareBufferPropertiesANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsAndroidHardwareBufferPropertiesANDROID {}
+impl<'a> ::std::ops::Deref for AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
+ type Target = AndroidHardwareBufferPropertiesANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AndroidHardwareBufferPropertiesANDROIDBuilder<'a> {
+ #[inline]
+ pub fn allocation_size(mut self, allocation_size: DeviceSize) -> Self {
+ self.inner.allocation_size = allocation_size;
+ self
+ }
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsAndroidHardwareBufferPropertiesANDROID>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AndroidHardwareBufferPropertiesANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryGetAndroidHardwareBufferInfoANDROID.html>"]
+pub struct MemoryGetAndroidHardwareBufferInfoANDROID {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+}
+impl ::std::default::Default for MemoryGetAndroidHardwareBufferInfoANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryGetAndroidHardwareBufferInfoANDROID {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID;
+}
+impl MemoryGetAndroidHardwareBufferInfoANDROID {
+ pub fn builder<'a>() -> MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ MemoryGetAndroidHardwareBufferInfoANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ inner: MemoryGetAndroidHardwareBufferInfoANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ type Target = MemoryGetAndroidHardwareBufferInfoANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryGetAndroidHardwareBufferInfoANDROIDBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryGetAndroidHardwareBufferInfoANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAndroidHardwareBufferFormatPropertiesANDROID.html>"]
+pub struct AndroidHardwareBufferFormatPropertiesANDROID {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub format: Format,
+ pub external_format: u64,
+ pub format_features: FormatFeatureFlags,
+ pub sampler_ycbcr_conversion_components: ComponentMapping,
+ pub suggested_ycbcr_model: SamplerYcbcrModelConversion,
+ pub suggested_ycbcr_range: SamplerYcbcrRange,
+ pub suggested_x_chroma_offset: ChromaLocation,
+ pub suggested_y_chroma_offset: ChromaLocation,
+}
+impl ::std::default::Default for AndroidHardwareBufferFormatPropertiesANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ format: Format::default(),
+ external_format: u64::default(),
+ format_features: FormatFeatureFlags::default(),
+ sampler_ycbcr_conversion_components: ComponentMapping::default(),
+ suggested_ycbcr_model: SamplerYcbcrModelConversion::default(),
+ suggested_ycbcr_range: SamplerYcbcrRange::default(),
+ suggested_x_chroma_offset: ChromaLocation::default(),
+ suggested_y_chroma_offset: ChromaLocation::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AndroidHardwareBufferFormatPropertiesANDROID {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID;
+}
+impl AndroidHardwareBufferFormatPropertiesANDROID {
+ pub fn builder<'a>() -> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
+ AndroidHardwareBufferFormatPropertiesANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
+ inner: AndroidHardwareBufferFormatPropertiesANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID
+ for AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'_>
+{
+}
+unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID
+ for AndroidHardwareBufferFormatPropertiesANDROID
+{
+}
+impl<'a> ::std::ops::Deref for AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
+ type Target = AndroidHardwareBufferFormatPropertiesANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AndroidHardwareBufferFormatPropertiesANDROIDBuilder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn external_format(mut self, external_format: u64) -> Self {
+ self.inner.external_format = external_format;
+ self
+ }
+ #[inline]
+ pub fn format_features(mut self, format_features: FormatFeatureFlags) -> Self {
+ self.inner.format_features = format_features;
+ self
+ }
+ #[inline]
+ pub fn sampler_ycbcr_conversion_components(
+ mut self,
+ sampler_ycbcr_conversion_components: ComponentMapping,
+ ) -> Self {
+ self.inner.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components;
+ self
+ }
+ #[inline]
+ pub fn suggested_ycbcr_model(
+ mut self,
+ suggested_ycbcr_model: SamplerYcbcrModelConversion,
+ ) -> Self {
+ self.inner.suggested_ycbcr_model = suggested_ycbcr_model;
+ self
+ }
+ #[inline]
+ pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self {
+ self.inner.suggested_ycbcr_range = suggested_ycbcr_range;
+ self
+ }
+ #[inline]
+ pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self {
+ self.inner.suggested_x_chroma_offset = suggested_x_chroma_offset;
+ self
+ }
+ #[inline]
+ pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self {
+ self.inner.suggested_y_chroma_offset = suggested_y_chroma_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AndroidHardwareBufferFormatPropertiesANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferInheritanceConditionalRenderingInfoEXT.html>"]
+pub struct CommandBufferInheritanceConditionalRenderingInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub conditional_rendering_enable: Bool32,
+}
+impl ::std::default::Default for CommandBufferInheritanceConditionalRenderingInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ conditional_rendering_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferInheritanceConditionalRenderingInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT;
+}
+impl CommandBufferInheritanceConditionalRenderingInfoEXT {
+ pub fn builder<'a>() -> CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
+ CommandBufferInheritanceConditionalRenderingInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
+ inner: CommandBufferInheritanceConditionalRenderingInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo
+ for CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo
+ for CommandBufferInheritanceConditionalRenderingInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
+ type Target = CommandBufferInheritanceConditionalRenderingInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferInheritanceConditionalRenderingInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn conditional_rendering_enable(mut self, conditional_rendering_enable: bool) -> Self {
+ self.inner.conditional_rendering_enable = conditional_rendering_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferInheritanceConditionalRenderingInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExternalFormatANDROID.html>"]
+pub struct ExternalFormatANDROID {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub external_format: u64,
+}
+impl ::std::default::Default for ExternalFormatANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ external_format: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExternalFormatANDROID {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXTERNAL_FORMAT_ANDROID;
+}
+impl ExternalFormatANDROID {
+ pub fn builder<'a>() -> ExternalFormatANDROIDBuilder<'a> {
+ ExternalFormatANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExternalFormatANDROIDBuilder<'a> {
+ inner: ExternalFormatANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ExternalFormatANDROIDBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ExternalFormatANDROID {}
+unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for ExternalFormatANDROIDBuilder<'_> {}
+unsafe impl ExtendsSamplerYcbcrConversionCreateInfo for ExternalFormatANDROID {}
+impl<'a> ::std::ops::Deref for ExternalFormatANDROIDBuilder<'a> {
+ type Target = ExternalFormatANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExternalFormatANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExternalFormatANDROIDBuilder<'a> {
+ #[inline]
+ pub fn external_format(mut self, external_format: u64) -> Self {
+ self.inner.external_format = external_format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExternalFormatANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevice8BitStorageFeatures.html>"]
+pub struct PhysicalDevice8BitStorageFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub storage_buffer8_bit_access: Bool32,
+ pub uniform_and_storage_buffer8_bit_access: Bool32,
+ pub storage_push_constant8: Bool32,
+}
+impl ::std::default::Default for PhysicalDevice8BitStorageFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ storage_buffer8_bit_access: Bool32::default(),
+ uniform_and_storage_buffer8_bit_access: Bool32::default(),
+ storage_push_constant8: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevice8BitStorageFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES;
+}
+impl PhysicalDevice8BitStorageFeatures {
+ pub fn builder<'a>() -> PhysicalDevice8BitStorageFeaturesBuilder<'a> {
+ PhysicalDevice8BitStorageFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevice8BitStorageFeaturesBuilder<'a> {
+ inner: PhysicalDevice8BitStorageFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice8BitStorageFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice8BitStorageFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice8BitStorageFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice8BitStorageFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDevice8BitStorageFeaturesBuilder<'a> {
+ type Target = PhysicalDevice8BitStorageFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevice8BitStorageFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevice8BitStorageFeaturesBuilder<'a> {
+ #[inline]
+ pub fn storage_buffer8_bit_access(mut self, storage_buffer8_bit_access: bool) -> Self {
+ self.inner.storage_buffer8_bit_access = storage_buffer8_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_and_storage_buffer8_bit_access(
+ mut self,
+ uniform_and_storage_buffer8_bit_access: bool,
+ ) -> Self {
+ self.inner.uniform_and_storage_buffer8_bit_access =
+ uniform_and_storage_buffer8_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn storage_push_constant8(mut self, storage_push_constant8: bool) -> Self {
+ self.inner.storage_push_constant8 = storage_push_constant8.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevice8BitStorageFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceConditionalRenderingFeaturesEXT.html>"]
+pub struct PhysicalDeviceConditionalRenderingFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub conditional_rendering: Bool32,
+ pub inherited_conditional_rendering: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceConditionalRenderingFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ conditional_rendering: Bool32::default(),
+ inherited_conditional_rendering: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceConditionalRenderingFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT;
+}
+impl PhysicalDeviceConditionalRenderingFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
+ PhysicalDeviceConditionalRenderingFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceConditionalRenderingFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceConditionalRenderingFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceConditionalRenderingFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceConditionalRenderingFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceConditionalRenderingFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn conditional_rendering(mut self, conditional_rendering: bool) -> Self {
+ self.inner.conditional_rendering = conditional_rendering.into();
+ self
+ }
+ #[inline]
+ pub fn inherited_conditional_rendering(
+ mut self,
+ inherited_conditional_rendering: bool,
+ ) -> Self {
+ self.inner.inherited_conditional_rendering = inherited_conditional_rendering.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceConditionalRenderingFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkanMemoryModelFeatures.html>"]
+pub struct PhysicalDeviceVulkanMemoryModelFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub vulkan_memory_model: Bool32,
+ pub vulkan_memory_model_device_scope: Bool32,
+ pub vulkan_memory_model_availability_visibility_chains: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVulkanMemoryModelFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ vulkan_memory_model: Bool32::default(),
+ vulkan_memory_model_device_scope: Bool32::default(),
+ vulkan_memory_model_availability_visibility_chains: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkanMemoryModelFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES;
+}
+impl PhysicalDeviceVulkanMemoryModelFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'a> {
+ PhysicalDeviceVulkanMemoryModelFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'a> {
+ inner: PhysicalDeviceVulkanMemoryModelFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkanMemoryModelFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkanMemoryModelFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceVulkanMemoryModelFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkanMemoryModelFeaturesBuilder<'a> {
+ #[inline]
+ pub fn vulkan_memory_model(mut self, vulkan_memory_model: bool) -> Self {
+ self.inner.vulkan_memory_model = vulkan_memory_model.into();
+ self
+ }
+ #[inline]
+ pub fn vulkan_memory_model_device_scope(
+ mut self,
+ vulkan_memory_model_device_scope: bool,
+ ) -> Self {
+ self.inner.vulkan_memory_model_device_scope = vulkan_memory_model_device_scope.into();
+ self
+ }
+ #[inline]
+ pub fn vulkan_memory_model_availability_visibility_chains(
+ mut self,
+ vulkan_memory_model_availability_visibility_chains: bool,
+ ) -> Self {
+ self.inner
+ .vulkan_memory_model_availability_visibility_chains =
+ vulkan_memory_model_availability_visibility_chains.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkanMemoryModelFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderAtomicInt64Features.html>"]
+pub struct PhysicalDeviceShaderAtomicInt64Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_buffer_int64_atomics: Bool32,
+ pub shader_shared_int64_atomics: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderAtomicInt64Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_buffer_int64_atomics: Bool32::default(),
+ shader_shared_int64_atomics: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderAtomicInt64Features {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES;
+}
+impl PhysicalDeviceShaderAtomicInt64Features {
+ pub fn builder<'a>() -> PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'a> {
+ PhysicalDeviceShaderAtomicInt64FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderAtomicInt64Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicInt64Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicInt64Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderAtomicInt64Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderAtomicInt64FeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_buffer_int64_atomics(mut self, shader_buffer_int64_atomics: bool) -> Self {
+ self.inner.shader_buffer_int64_atomics = shader_buffer_int64_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_int64_atomics(mut self, shader_shared_int64_atomics: bool) -> Self {
+ self.inner.shader_shared_int64_atomics = shader_shared_int64_atomics.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderAtomicInt64Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderAtomicFloatFeaturesEXT.html>"]
+pub struct PhysicalDeviceShaderAtomicFloatFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_buffer_float32_atomics: Bool32,
+ pub shader_buffer_float32_atomic_add: Bool32,
+ pub shader_buffer_float64_atomics: Bool32,
+ pub shader_buffer_float64_atomic_add: Bool32,
+ pub shader_shared_float32_atomics: Bool32,
+ pub shader_shared_float32_atomic_add: Bool32,
+ pub shader_shared_float64_atomics: Bool32,
+ pub shader_shared_float64_atomic_add: Bool32,
+ pub shader_image_float32_atomics: Bool32,
+ pub shader_image_float32_atomic_add: Bool32,
+ pub sparse_image_float32_atomics: Bool32,
+ pub sparse_image_float32_atomic_add: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderAtomicFloatFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_buffer_float32_atomics: Bool32::default(),
+ shader_buffer_float32_atomic_add: Bool32::default(),
+ shader_buffer_float64_atomics: Bool32::default(),
+ shader_buffer_float64_atomic_add: Bool32::default(),
+ shader_shared_float32_atomics: Bool32::default(),
+ shader_shared_float32_atomic_add: Bool32::default(),
+ shader_shared_float64_atomics: Bool32::default(),
+ shader_shared_float64_atomic_add: Bool32::default(),
+ shader_image_float32_atomics: Bool32::default(),
+ shader_image_float32_atomic_add: Bool32::default(),
+ sparse_image_float32_atomics: Bool32::default(),
+ sparse_image_float32_atomic_add: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderAtomicFloatFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT;
+}
+impl PhysicalDeviceShaderAtomicFloatFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'a> {
+ PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceShaderAtomicFloatFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicFloatFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloatFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderAtomicFloatFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn shader_buffer_float32_atomics(mut self, shader_buffer_float32_atomics: bool) -> Self {
+ self.inner.shader_buffer_float32_atomics = shader_buffer_float32_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float32_atomic_add(
+ mut self,
+ shader_buffer_float32_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_buffer_float32_atomic_add = shader_buffer_float32_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float64_atomics(mut self, shader_buffer_float64_atomics: bool) -> Self {
+ self.inner.shader_buffer_float64_atomics = shader_buffer_float64_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float64_atomic_add(
+ mut self,
+ shader_buffer_float64_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_buffer_float64_atomic_add = shader_buffer_float64_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float32_atomics(mut self, shader_shared_float32_atomics: bool) -> Self {
+ self.inner.shader_shared_float32_atomics = shader_shared_float32_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float32_atomic_add(
+ mut self,
+ shader_shared_float32_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_shared_float32_atomic_add = shader_shared_float32_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float64_atomics(mut self, shader_shared_float64_atomics: bool) -> Self {
+ self.inner.shader_shared_float64_atomics = shader_shared_float64_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float64_atomic_add(
+ mut self,
+ shader_shared_float64_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_shared_float64_atomic_add = shader_shared_float64_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn shader_image_float32_atomics(mut self, shader_image_float32_atomics: bool) -> Self {
+ self.inner.shader_image_float32_atomics = shader_image_float32_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_image_float32_atomic_add(
+ mut self,
+ shader_image_float32_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_image_float32_atomic_add = shader_image_float32_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_image_float32_atomics(mut self, sparse_image_float32_atomics: bool) -> Self {
+ self.inner.sparse_image_float32_atomics = sparse_image_float32_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_image_float32_atomic_add(
+ mut self,
+ sparse_image_float32_atomic_add: bool,
+ ) -> Self {
+ self.inner.sparse_image_float32_atomic_add = sparse_image_float32_atomic_add.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderAtomicFloatFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT.html>"]
+pub struct PhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_buffer_float16_atomics: Bool32,
+ pub shader_buffer_float16_atomic_add: Bool32,
+ pub shader_buffer_float16_atomic_min_max: Bool32,
+ pub shader_buffer_float32_atomic_min_max: Bool32,
+ pub shader_buffer_float64_atomic_min_max: Bool32,
+ pub shader_shared_float16_atomics: Bool32,
+ pub shader_shared_float16_atomic_add: Bool32,
+ pub shader_shared_float16_atomic_min_max: Bool32,
+ pub shader_shared_float32_atomic_min_max: Bool32,
+ pub shader_shared_float64_atomic_min_max: Bool32,
+ pub shader_image_float32_atomic_min_max: Bool32,
+ pub sparse_image_float32_atomic_min_max: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_buffer_float16_atomics: Bool32::default(),
+ shader_buffer_float16_atomic_add: Bool32::default(),
+ shader_buffer_float16_atomic_min_max: Bool32::default(),
+ shader_buffer_float32_atomic_min_max: Bool32::default(),
+ shader_buffer_float64_atomic_min_max: Bool32::default(),
+ shader_shared_float16_atomics: Bool32::default(),
+ shader_shared_float16_atomic_add: Bool32::default(),
+ shader_shared_float16_atomic_min_max: Bool32::default(),
+ shader_shared_float32_atomic_min_max: Bool32::default(),
+ shader_shared_float64_atomic_min_max: Bool32::default(),
+ shader_image_float32_atomic_min_max: Bool32::default(),
+ sparse_image_float32_atomic_min_max: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT;
+}
+impl PhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'a> {
+ PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceShaderAtomicFloat2FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderAtomicFloat2FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderAtomicFloat2FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderAtomicFloat2FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn shader_buffer_float16_atomics(mut self, shader_buffer_float16_atomics: bool) -> Self {
+ self.inner.shader_buffer_float16_atomics = shader_buffer_float16_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float16_atomic_add(
+ mut self,
+ shader_buffer_float16_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_buffer_float16_atomic_add = shader_buffer_float16_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float16_atomic_min_max(
+ mut self,
+ shader_buffer_float16_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_buffer_float16_atomic_min_max =
+ shader_buffer_float16_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float32_atomic_min_max(
+ mut self,
+ shader_buffer_float32_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_buffer_float32_atomic_min_max =
+ shader_buffer_float32_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_float64_atomic_min_max(
+ mut self,
+ shader_buffer_float64_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_buffer_float64_atomic_min_max =
+ shader_buffer_float64_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float16_atomics(mut self, shader_shared_float16_atomics: bool) -> Self {
+ self.inner.shader_shared_float16_atomics = shader_shared_float16_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float16_atomic_add(
+ mut self,
+ shader_shared_float16_atomic_add: bool,
+ ) -> Self {
+ self.inner.shader_shared_float16_atomic_add = shader_shared_float16_atomic_add.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float16_atomic_min_max(
+ mut self,
+ shader_shared_float16_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_shared_float16_atomic_min_max =
+ shader_shared_float16_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float32_atomic_min_max(
+ mut self,
+ shader_shared_float32_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_shared_float32_atomic_min_max =
+ shader_shared_float32_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_float64_atomic_min_max(
+ mut self,
+ shader_shared_float64_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_shared_float64_atomic_min_max =
+ shader_shared_float64_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn shader_image_float32_atomic_min_max(
+ mut self,
+ shader_image_float32_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.shader_image_float32_atomic_min_max = shader_image_float32_atomic_min_max.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_image_float32_atomic_min_max(
+ mut self,
+ sparse_image_float32_atomic_min_max: bool,
+ ) -> Self {
+ self.inner.sparse_image_float32_atomic_min_max = sparse_image_float32_atomic_min_max.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT.html>"]
+pub struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub vertex_attribute_instance_rate_divisor: Bool32,
+ pub vertex_attribute_instance_rate_zero_divisor: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ vertex_attribute_instance_rate_divisor: Bool32::default(),
+ vertex_attribute_instance_rate_zero_divisor: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT;
+}
+impl PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
+ PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceVertexAttributeDivisorFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVertexAttributeDivisorFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexAttributeDivisorFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVertexAttributeDivisorFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn vertex_attribute_instance_rate_divisor(
+ mut self,
+ vertex_attribute_instance_rate_divisor: bool,
+ ) -> Self {
+ self.inner.vertex_attribute_instance_rate_divisor =
+ vertex_attribute_instance_rate_divisor.into();
+ self
+ }
+ #[inline]
+ pub fn vertex_attribute_instance_rate_zero_divisor(
+ mut self,
+ vertex_attribute_instance_rate_zero_divisor: bool,
+ ) -> Self {
+ self.inner.vertex_attribute_instance_rate_zero_divisor =
+ vertex_attribute_instance_rate_zero_divisor.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyCheckpointPropertiesNV.html>"]
+pub struct QueueFamilyCheckpointPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub checkpoint_execution_stage_mask: PipelineStageFlags,
+}
+impl ::std::default::Default for QueueFamilyCheckpointPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ checkpoint_execution_stage_mask: PipelineStageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueueFamilyCheckpointPropertiesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV;
+}
+impl QueueFamilyCheckpointPropertiesNV {
+ pub fn builder<'a>() -> QueueFamilyCheckpointPropertiesNVBuilder<'a> {
+ QueueFamilyCheckpointPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyCheckpointPropertiesNVBuilder<'a> {
+ inner: QueueFamilyCheckpointPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointPropertiesNVBuilder<'_> {}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointPropertiesNV {}
+impl<'a> ::std::ops::Deref for QueueFamilyCheckpointPropertiesNVBuilder<'a> {
+ type Target = QueueFamilyCheckpointPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyCheckpointPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyCheckpointPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn checkpoint_execution_stage_mask(
+ mut self,
+ checkpoint_execution_stage_mask: PipelineStageFlags,
+ ) -> Self {
+ self.inner.checkpoint_execution_stage_mask = checkpoint_execution_stage_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyCheckpointPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCheckpointDataNV.html>"]
+pub struct CheckpointDataNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub stage: PipelineStageFlags,
+ pub p_checkpoint_marker: *mut c_void,
+}
+impl ::std::default::Default for CheckpointDataNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ stage: PipelineStageFlags::default(),
+ p_checkpoint_marker: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CheckpointDataNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::CHECKPOINT_DATA_NV;
+}
+impl CheckpointDataNV {
+ pub fn builder<'a>() -> CheckpointDataNVBuilder<'a> {
+ CheckpointDataNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CheckpointDataNVBuilder<'a> {
+ inner: CheckpointDataNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CheckpointDataNVBuilder<'a> {
+ type Target = CheckpointDataNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CheckpointDataNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CheckpointDataNVBuilder<'a> {
+ #[inline]
+ pub fn stage(mut self, stage: PipelineStageFlags) -> Self {
+ self.inner.stage = stage;
+ self
+ }
+ #[inline]
+ pub fn checkpoint_marker(mut self, checkpoint_marker: *mut c_void) -> Self {
+ self.inner.p_checkpoint_marker = checkpoint_marker;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CheckpointDataNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDepthStencilResolveProperties.html>"]
+pub struct PhysicalDeviceDepthStencilResolveProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub supported_depth_resolve_modes: ResolveModeFlags,
+ pub supported_stencil_resolve_modes: ResolveModeFlags,
+ pub independent_resolve_none: Bool32,
+ pub independent_resolve: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDepthStencilResolveProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ supported_depth_resolve_modes: ResolveModeFlags::default(),
+ supported_stencil_resolve_modes: ResolveModeFlags::default(),
+ independent_resolve_none: Bool32::default(),
+ independent_resolve: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDepthStencilResolveProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES;
+}
+impl PhysicalDeviceDepthStencilResolveProperties {
+ pub fn builder<'a>() -> PhysicalDeviceDepthStencilResolvePropertiesBuilder<'a> {
+ PhysicalDeviceDepthStencilResolvePropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDepthStencilResolvePropertiesBuilder<'a> {
+ inner: PhysicalDeviceDepthStencilResolveProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDepthStencilResolvePropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDepthStencilResolveProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDepthStencilResolvePropertiesBuilder<'a> {
+ type Target = PhysicalDeviceDepthStencilResolveProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDepthStencilResolvePropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDepthStencilResolvePropertiesBuilder<'a> {
+ #[inline]
+ pub fn supported_depth_resolve_modes(
+ mut self,
+ supported_depth_resolve_modes: ResolveModeFlags,
+ ) -> Self {
+ self.inner.supported_depth_resolve_modes = supported_depth_resolve_modes;
+ self
+ }
+ #[inline]
+ pub fn supported_stencil_resolve_modes(
+ mut self,
+ supported_stencil_resolve_modes: ResolveModeFlags,
+ ) -> Self {
+ self.inner.supported_stencil_resolve_modes = supported_stencil_resolve_modes;
+ self
+ }
+ #[inline]
+ pub fn independent_resolve_none(mut self, independent_resolve_none: bool) -> Self {
+ self.inner.independent_resolve_none = independent_resolve_none.into();
+ self
+ }
+ #[inline]
+ pub fn independent_resolve(mut self, independent_resolve: bool) -> Self {
+ self.inner.independent_resolve = independent_resolve.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDepthStencilResolveProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassDescriptionDepthStencilResolve.html>"]
+pub struct SubpassDescriptionDepthStencilResolve {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub depth_resolve_mode: ResolveModeFlags,
+ pub stencil_resolve_mode: ResolveModeFlags,
+ pub p_depth_stencil_resolve_attachment: *const AttachmentReference2,
+}
+impl ::std::default::Default for SubpassDescriptionDepthStencilResolve {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ depth_resolve_mode: ResolveModeFlags::default(),
+ stencil_resolve_mode: ResolveModeFlags::default(),
+ p_depth_stencil_resolve_attachment: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassDescriptionDepthStencilResolve {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE;
+}
+impl SubpassDescriptionDepthStencilResolve {
+ pub fn builder<'a>() -> SubpassDescriptionDepthStencilResolveBuilder<'a> {
+ SubpassDescriptionDepthStencilResolveBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassDescriptionDepthStencilResolveBuilder<'a> {
+ inner: SubpassDescriptionDepthStencilResolve,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubpassDescription2 for SubpassDescriptionDepthStencilResolveBuilder<'_> {}
+unsafe impl ExtendsSubpassDescription2 for SubpassDescriptionDepthStencilResolve {}
+impl<'a> ::std::ops::Deref for SubpassDescriptionDepthStencilResolveBuilder<'a> {
+ type Target = SubpassDescriptionDepthStencilResolve;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassDescriptionDepthStencilResolveBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassDescriptionDepthStencilResolveBuilder<'a> {
+ #[inline]
+ pub fn depth_resolve_mode(mut self, depth_resolve_mode: ResolveModeFlags) -> Self {
+ self.inner.depth_resolve_mode = depth_resolve_mode;
+ self
+ }
+ #[inline]
+ pub fn stencil_resolve_mode(mut self, stencil_resolve_mode: ResolveModeFlags) -> Self {
+ self.inner.stencil_resolve_mode = stencil_resolve_mode;
+ self
+ }
+ #[inline]
+ pub fn depth_stencil_resolve_attachment(
+ mut self,
+ depth_stencil_resolve_attachment: &'a AttachmentReference2,
+ ) -> Self {
+ self.inner.p_depth_stencil_resolve_attachment = depth_stencil_resolve_attachment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassDescriptionDepthStencilResolve {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewASTCDecodeModeEXT.html>"]
+pub struct ImageViewASTCDecodeModeEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub decode_mode: Format,
+}
+impl ::std::default::Default for ImageViewASTCDecodeModeEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ decode_mode: Format::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewASTCDecodeModeEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_ASTC_DECODE_MODE_EXT;
+}
+impl ImageViewASTCDecodeModeEXT {
+ pub fn builder<'a>() -> ImageViewASTCDecodeModeEXTBuilder<'a> {
+ ImageViewASTCDecodeModeEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewASTCDecodeModeEXTBuilder<'a> {
+ inner: ImageViewASTCDecodeModeEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewASTCDecodeModeEXTBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewASTCDecodeModeEXT {}
+impl<'a> ::std::ops::Deref for ImageViewASTCDecodeModeEXTBuilder<'a> {
+ type Target = ImageViewASTCDecodeModeEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewASTCDecodeModeEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewASTCDecodeModeEXTBuilder<'a> {
+ #[inline]
+ pub fn decode_mode(mut self, decode_mode: Format) -> Self {
+ self.inner.decode_mode = decode_mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewASTCDecodeModeEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceASTCDecodeFeaturesEXT.html>"]
+pub struct PhysicalDeviceASTCDecodeFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub decode_mode_shared_exponent: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceASTCDecodeFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ decode_mode_shared_exponent: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceASTCDecodeFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT;
+}
+impl PhysicalDeviceASTCDecodeFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
+ PhysicalDeviceASTCDecodeFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceASTCDecodeFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceASTCDecodeFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceASTCDecodeFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceASTCDecodeFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceASTCDecodeFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn decode_mode_shared_exponent(mut self, decode_mode_shared_exponent: bool) -> Self {
+ self.inner.decode_mode_shared_exponent = decode_mode_shared_exponent.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceASTCDecodeFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTransformFeedbackFeaturesEXT.html>"]
+pub struct PhysicalDeviceTransformFeedbackFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub transform_feedback: Bool32,
+ pub geometry_streams: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTransformFeedbackFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ transform_feedback: Bool32::default(),
+ geometry_streams: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTransformFeedbackFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
+}
+impl PhysicalDeviceTransformFeedbackFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
+ PhysicalDeviceTransformFeedbackFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceTransformFeedbackFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTransformFeedbackFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTransformFeedbackFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceTransformFeedbackFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTransformFeedbackFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn transform_feedback(mut self, transform_feedback: bool) -> Self {
+ self.inner.transform_feedback = transform_feedback.into();
+ self
+ }
+ #[inline]
+ pub fn geometry_streams(mut self, geometry_streams: bool) -> Self {
+ self.inner.geometry_streams = geometry_streams.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTransformFeedbackFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTransformFeedbackPropertiesEXT.html>"]
+pub struct PhysicalDeviceTransformFeedbackPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_transform_feedback_streams: u32,
+ pub max_transform_feedback_buffers: u32,
+ pub max_transform_feedback_buffer_size: DeviceSize,
+ pub max_transform_feedback_stream_data_size: u32,
+ pub max_transform_feedback_buffer_data_size: u32,
+ pub max_transform_feedback_buffer_data_stride: u32,
+ pub transform_feedback_queries: Bool32,
+ pub transform_feedback_streams_lines_triangles: Bool32,
+ pub transform_feedback_rasterization_stream_select: Bool32,
+ pub transform_feedback_draw: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTransformFeedbackPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_transform_feedback_streams: u32::default(),
+ max_transform_feedback_buffers: u32::default(),
+ max_transform_feedback_buffer_size: DeviceSize::default(),
+ max_transform_feedback_stream_data_size: u32::default(),
+ max_transform_feedback_buffer_data_size: u32::default(),
+ max_transform_feedback_buffer_data_stride: u32::default(),
+ transform_feedback_queries: Bool32::default(),
+ transform_feedback_streams_lines_triangles: Bool32::default(),
+ transform_feedback_rasterization_stream_select: Bool32::default(),
+ transform_feedback_draw: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTransformFeedbackPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
+}
+impl PhysicalDeviceTransformFeedbackPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
+ PhysicalDeviceTransformFeedbackPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceTransformFeedbackPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTransformFeedbackPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceTransformFeedbackPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTransformFeedbackPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_transform_feedback_streams(mut self, max_transform_feedback_streams: u32) -> Self {
+ self.inner.max_transform_feedback_streams = max_transform_feedback_streams;
+ self
+ }
+ #[inline]
+ pub fn max_transform_feedback_buffers(mut self, max_transform_feedback_buffers: u32) -> Self {
+ self.inner.max_transform_feedback_buffers = max_transform_feedback_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_transform_feedback_buffer_size(
+ mut self,
+ max_transform_feedback_buffer_size: DeviceSize,
+ ) -> Self {
+ self.inner.max_transform_feedback_buffer_size = max_transform_feedback_buffer_size;
+ self
+ }
+ #[inline]
+ pub fn max_transform_feedback_stream_data_size(
+ mut self,
+ max_transform_feedback_stream_data_size: u32,
+ ) -> Self {
+ self.inner.max_transform_feedback_stream_data_size =
+ max_transform_feedback_stream_data_size;
+ self
+ }
+ #[inline]
+ pub fn max_transform_feedback_buffer_data_size(
+ mut self,
+ max_transform_feedback_buffer_data_size: u32,
+ ) -> Self {
+ self.inner.max_transform_feedback_buffer_data_size =
+ max_transform_feedback_buffer_data_size;
+ self
+ }
+ #[inline]
+ pub fn max_transform_feedback_buffer_data_stride(
+ mut self,
+ max_transform_feedback_buffer_data_stride: u32,
+ ) -> Self {
+ self.inner.max_transform_feedback_buffer_data_stride =
+ max_transform_feedback_buffer_data_stride;
+ self
+ }
+ #[inline]
+ pub fn transform_feedback_queries(mut self, transform_feedback_queries: bool) -> Self {
+ self.inner.transform_feedback_queries = transform_feedback_queries.into();
+ self
+ }
+ #[inline]
+ pub fn transform_feedback_streams_lines_triangles(
+ mut self,
+ transform_feedback_streams_lines_triangles: bool,
+ ) -> Self {
+ self.inner.transform_feedback_streams_lines_triangles =
+ transform_feedback_streams_lines_triangles.into();
+ self
+ }
+ #[inline]
+ pub fn transform_feedback_rasterization_stream_select(
+ mut self,
+ transform_feedback_rasterization_stream_select: bool,
+ ) -> Self {
+ self.inner.transform_feedback_rasterization_stream_select =
+ transform_feedback_rasterization_stream_select.into();
+ self
+ }
+ #[inline]
+ pub fn transform_feedback_draw(mut self, transform_feedback_draw: bool) -> Self {
+ self.inner.transform_feedback_draw = transform_feedback_draw.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTransformFeedbackPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationStateStreamCreateInfoEXT.html>"]
+pub struct PipelineRasterizationStateStreamCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineRasterizationStateStreamCreateFlagsEXT,
+ pub rasterization_stream: u32,
+}
+impl ::std::default::Default for PipelineRasterizationStateStreamCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineRasterizationStateStreamCreateFlagsEXT::default(),
+ rasterization_stream: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationStateStreamCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT;
+}
+impl PipelineRasterizationStateStreamCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
+ PipelineRasterizationStateStreamCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
+ inner: PipelineRasterizationStateStreamCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationStateStreamCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationStateStreamCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
+ type Target = PipelineRasterizationStateStreamCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationStateStreamCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineRasterizationStateStreamCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn rasterization_stream(mut self, rasterization_stream: u32) -> Self {
+ self.inner.rasterization_stream = rasterization_stream;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationStateStreamCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV.html>"]
+pub struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub representative_fragment_test: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ representative_fragment_test: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV;
+}
+impl PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
+ PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceRepresentativeFragmentTestFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRepresentativeFragmentTestFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRepresentativeFragmentTestFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRepresentativeFragmentTestFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn representative_fragment_test(mut self, representative_fragment_test: bool) -> Self {
+ self.inner.representative_fragment_test = representative_fragment_test.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRepresentativeFragmentTestStateCreateInfoNV.html>"]
+pub struct PipelineRepresentativeFragmentTestStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub representative_fragment_test_enable: Bool32,
+}
+impl ::std::default::Default for PipelineRepresentativeFragmentTestStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ representative_fragment_test_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRepresentativeFragmentTestStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV;
+}
+impl PipelineRepresentativeFragmentTestStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
+ PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
+ inner: PipelineRepresentativeFragmentTestStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo
+ for PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo
+ for PipelineRepresentativeFragmentTestStateCreateInfoNV
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineRepresentativeFragmentTestStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRepresentativeFragmentTestStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn representative_fragment_test_enable(
+ mut self,
+ representative_fragment_test_enable: bool,
+ ) -> Self {
+ self.inner.representative_fragment_test_enable = representative_fragment_test_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRepresentativeFragmentTestStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExclusiveScissorFeaturesNV.html>"]
+pub struct PhysicalDeviceExclusiveScissorFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub exclusive_scissor: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceExclusiveScissorFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ exclusive_scissor: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExclusiveScissorFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV;
+}
+impl PhysicalDeviceExclusiveScissorFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
+ PhysicalDeviceExclusiveScissorFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceExclusiveScissorFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExclusiveScissorFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExclusiveScissorFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceExclusiveScissorFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExclusiveScissorFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn exclusive_scissor(mut self, exclusive_scissor: bool) -> Self {
+ self.inner.exclusive_scissor = exclusive_scissor.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExclusiveScissorFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportExclusiveScissorStateCreateInfoNV.html>"]
+pub struct PipelineViewportExclusiveScissorStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub exclusive_scissor_count: u32,
+ pub p_exclusive_scissors: *const Rect2D,
+}
+impl ::std::default::Default for PipelineViewportExclusiveScissorStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ exclusive_scissor_count: u32::default(),
+ p_exclusive_scissors: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportExclusiveScissorStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV;
+}
+impl PipelineViewportExclusiveScissorStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
+ PipelineViewportExclusiveScissorStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
+ inner: PipelineViewportExclusiveScissorStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportExclusiveScissorStateCreateInfoNV
+{
+}
+impl<'a> ::std::ops::Deref for PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineViewportExclusiveScissorStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportExclusiveScissorStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn exclusive_scissors(mut self, exclusive_scissors: &'a [Rect2D]) -> Self {
+ self.inner.exclusive_scissor_count = exclusive_scissors.len() as _;
+ self.inner.p_exclusive_scissors = exclusive_scissors.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportExclusiveScissorStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCornerSampledImageFeaturesNV.html>"]
+pub struct PhysicalDeviceCornerSampledImageFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub corner_sampled_image: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceCornerSampledImageFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ corner_sampled_image: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCornerSampledImageFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV;
+}
+impl PhysicalDeviceCornerSampledImageFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
+ PhysicalDeviceCornerSampledImageFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceCornerSampledImageFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCornerSampledImageFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCornerSampledImageFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceCornerSampledImageFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCornerSampledImageFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn corner_sampled_image(mut self, corner_sampled_image: bool) -> Self {
+ self.inner.corner_sampled_image = corner_sampled_image.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCornerSampledImageFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceComputeShaderDerivativesFeaturesNV.html>"]
+pub struct PhysicalDeviceComputeShaderDerivativesFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub compute_derivative_group_quads: Bool32,
+ pub compute_derivative_group_linear: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceComputeShaderDerivativesFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ compute_derivative_group_quads: Bool32::default(),
+ compute_derivative_group_linear: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceComputeShaderDerivativesFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV;
+}
+impl PhysicalDeviceComputeShaderDerivativesFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
+ PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceComputeShaderDerivativesFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceComputeShaderDerivativesFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceComputeShaderDerivativesFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceComputeShaderDerivativesFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn compute_derivative_group_quads(mut self, compute_derivative_group_quads: bool) -> Self {
+ self.inner.compute_derivative_group_quads = compute_derivative_group_quads.into();
+ self
+ }
+ #[inline]
+ pub fn compute_derivative_group_linear(
+ mut self,
+ compute_derivative_group_linear: bool,
+ ) -> Self {
+ self.inner.compute_derivative_group_linear = compute_derivative_group_linear.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceComputeShaderDerivativesFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderImageFootprintFeaturesNV.html>"]
+pub struct PhysicalDeviceShaderImageFootprintFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_footprint: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderImageFootprintFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_footprint: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderImageFootprintFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV;
+}
+impl PhysicalDeviceShaderImageFootprintFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
+ PhysicalDeviceShaderImageFootprintFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceShaderImageFootprintFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderImageFootprintFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageFootprintFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceShaderImageFootprintFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderImageFootprintFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn image_footprint(mut self, image_footprint: bool) -> Self {
+ self.inner.image_footprint = image_footprint.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderImageFootprintFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV.html>"]
+pub struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub dedicated_allocation_image_aliasing: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ dedicated_allocation_image_aliasing: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV;
+}
+impl PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'a> {
+ PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn dedicated_allocation_image_aliasing(
+ mut self,
+ dedicated_allocation_image_aliasing: bool,
+ ) -> Self {
+ self.inner.dedicated_allocation_image_aliasing = dedicated_allocation_image_aliasing.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCopyMemoryIndirectFeaturesNV.html>"]
+pub struct PhysicalDeviceCopyMemoryIndirectFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub indirect_copy: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceCopyMemoryIndirectFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ indirect_copy: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCopyMemoryIndirectFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV;
+}
+impl PhysicalDeviceCopyMemoryIndirectFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'a> {
+ PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceCopyMemoryIndirectFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCopyMemoryIndirectFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCopyMemoryIndirectFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceCopyMemoryIndirectFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCopyMemoryIndirectFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn indirect_copy(mut self, indirect_copy: bool) -> Self {
+ self.inner.indirect_copy = indirect_copy.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCopyMemoryIndirectFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCopyMemoryIndirectPropertiesNV.html>"]
+pub struct PhysicalDeviceCopyMemoryIndirectPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub supported_queues: QueueFlags,
+}
+impl ::std::default::Default for PhysicalDeviceCopyMemoryIndirectPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ supported_queues: QueueFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCopyMemoryIndirectPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV;
+}
+impl PhysicalDeviceCopyMemoryIndirectPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder<'a> {
+ PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceCopyMemoryIndirectPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCopyMemoryIndirectPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceCopyMemoryIndirectPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCopyMemoryIndirectPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn supported_queues(mut self, supported_queues: QueueFlags) -> Self {
+ self.inner.supported_queues = supported_queues;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCopyMemoryIndirectPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryDecompressionFeaturesNV.html>"]
+pub struct PhysicalDeviceMemoryDecompressionFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_decompression: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMemoryDecompressionFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_decompression: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMemoryDecompressionFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV;
+}
+impl PhysicalDeviceMemoryDecompressionFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'a> {
+ PhysicalDeviceMemoryDecompressionFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceMemoryDecompressionFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMemoryDecompressionFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryDecompressionFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceMemoryDecompressionFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMemoryDecompressionFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn memory_decompression(mut self, memory_decompression: bool) -> Self {
+ self.inner.memory_decompression = memory_decompression.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMemoryDecompressionFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryDecompressionPropertiesNV.html>"]
+pub struct PhysicalDeviceMemoryDecompressionPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub decompression_methods: MemoryDecompressionMethodFlagsNV,
+ pub max_decompression_indirect_count: u64,
+}
+impl ::std::default::Default for PhysicalDeviceMemoryDecompressionPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ decompression_methods: MemoryDecompressionMethodFlagsNV::default(),
+ max_decompression_indirect_count: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMemoryDecompressionPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV;
+}
+impl PhysicalDeviceMemoryDecompressionPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceMemoryDecompressionPropertiesNVBuilder<'a> {
+ PhysicalDeviceMemoryDecompressionPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMemoryDecompressionPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceMemoryDecompressionPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceMemoryDecompressionPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMemoryDecompressionPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryDecompressionPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceMemoryDecompressionPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryDecompressionPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMemoryDecompressionPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn decompression_methods(
+ mut self,
+ decompression_methods: MemoryDecompressionMethodFlagsNV,
+ ) -> Self {
+ self.inner.decompression_methods = decompression_methods;
+ self
+ }
+ #[inline]
+ pub fn max_decompression_indirect_count(
+ mut self,
+ max_decompression_indirect_count: u64,
+ ) -> Self {
+ self.inner.max_decompression_indirect_count = max_decompression_indirect_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMemoryDecompressionPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShadingRatePaletteNV.html>"]
+pub struct ShadingRatePaletteNV {
+ pub shading_rate_palette_entry_count: u32,
+ pub p_shading_rate_palette_entries: *const ShadingRatePaletteEntryNV,
+}
+impl ::std::default::Default for ShadingRatePaletteNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ shading_rate_palette_entry_count: u32::default(),
+ p_shading_rate_palette_entries: ::std::ptr::null(),
+ }
+ }
+}
+impl ShadingRatePaletteNV {
+ pub fn builder<'a>() -> ShadingRatePaletteNVBuilder<'a> {
+ ShadingRatePaletteNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ShadingRatePaletteNVBuilder<'a> {
+ inner: ShadingRatePaletteNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ShadingRatePaletteNVBuilder<'a> {
+ type Target = ShadingRatePaletteNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ShadingRatePaletteNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ShadingRatePaletteNVBuilder<'a> {
+ #[inline]
+ pub fn shading_rate_palette_entries(
+ mut self,
+ shading_rate_palette_entries: &'a [ShadingRatePaletteEntryNV],
+ ) -> Self {
+ self.inner.shading_rate_palette_entry_count = shading_rate_palette_entries.len() as _;
+ self.inner.p_shading_rate_palette_entries = shading_rate_palette_entries.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ShadingRatePaletteNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportShadingRateImageStateCreateInfoNV.html>"]
+pub struct PipelineViewportShadingRateImageStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub shading_rate_image_enable: Bool32,
+ pub viewport_count: u32,
+ pub p_shading_rate_palettes: *const ShadingRatePaletteNV,
+}
+impl ::std::default::Default for PipelineViewportShadingRateImageStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ shading_rate_image_enable: Bool32::default(),
+ viewport_count: u32::default(),
+ p_shading_rate_palettes: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportShadingRateImageStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV;
+}
+impl PipelineViewportShadingRateImageStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
+ PipelineViewportShadingRateImageStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
+ inner: PipelineViewportShadingRateImageStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportShadingRateImageStateCreateInfoNV
+{
+}
+impl<'a> ::std::ops::Deref for PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineViewportShadingRateImageStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportShadingRateImageStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn shading_rate_image_enable(mut self, shading_rate_image_enable: bool) -> Self {
+ self.inner.shading_rate_image_enable = shading_rate_image_enable.into();
+ self
+ }
+ #[inline]
+ pub fn shading_rate_palettes(
+ mut self,
+ shading_rate_palettes: &'a [ShadingRatePaletteNV],
+ ) -> Self {
+ self.inner.viewport_count = shading_rate_palettes.len() as _;
+ self.inner.p_shading_rate_palettes = shading_rate_palettes.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportShadingRateImageStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShadingRateImageFeaturesNV.html>"]
+pub struct PhysicalDeviceShadingRateImageFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shading_rate_image: Bool32,
+ pub shading_rate_coarse_sample_order: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShadingRateImageFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shading_rate_image: Bool32::default(),
+ shading_rate_coarse_sample_order: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShadingRateImageFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV;
+}
+impl PhysicalDeviceShadingRateImageFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
+ PhysicalDeviceShadingRateImageFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceShadingRateImageFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShadingRateImageFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShadingRateImageFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceShadingRateImageFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShadingRateImageFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn shading_rate_image(mut self, shading_rate_image: bool) -> Self {
+ self.inner.shading_rate_image = shading_rate_image.into();
+ self
+ }
+ #[inline]
+ pub fn shading_rate_coarse_sample_order(
+ mut self,
+ shading_rate_coarse_sample_order: bool,
+ ) -> Self {
+ self.inner.shading_rate_coarse_sample_order = shading_rate_coarse_sample_order.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShadingRateImageFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShadingRateImagePropertiesNV.html>"]
+pub struct PhysicalDeviceShadingRateImagePropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shading_rate_texel_size: Extent2D,
+ pub shading_rate_palette_size: u32,
+ pub shading_rate_max_coarse_samples: u32,
+}
+impl ::std::default::Default for PhysicalDeviceShadingRateImagePropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shading_rate_texel_size: Extent2D::default(),
+ shading_rate_palette_size: u32::default(),
+ shading_rate_max_coarse_samples: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShadingRateImagePropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV;
+}
+impl PhysicalDeviceShadingRateImagePropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
+ PhysicalDeviceShadingRateImagePropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceShadingRateImagePropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceShadingRateImagePropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShadingRateImagePropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceShadingRateImagePropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShadingRateImagePropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn shading_rate_texel_size(mut self, shading_rate_texel_size: Extent2D) -> Self {
+ self.inner.shading_rate_texel_size = shading_rate_texel_size;
+ self
+ }
+ #[inline]
+ pub fn shading_rate_palette_size(mut self, shading_rate_palette_size: u32) -> Self {
+ self.inner.shading_rate_palette_size = shading_rate_palette_size;
+ self
+ }
+ #[inline]
+ pub fn shading_rate_max_coarse_samples(mut self, shading_rate_max_coarse_samples: u32) -> Self {
+ self.inner.shading_rate_max_coarse_samples = shading_rate_max_coarse_samples;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShadingRateImagePropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceInvocationMaskFeaturesHUAWEI.html>"]
+pub struct PhysicalDeviceInvocationMaskFeaturesHUAWEI {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub invocation_mask: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceInvocationMaskFeaturesHUAWEI {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ invocation_mask: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceInvocationMaskFeaturesHUAWEI {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI;
+}
+impl PhysicalDeviceInvocationMaskFeaturesHUAWEI {
+ pub fn builder<'a>() -> PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'a> {
+ PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'a> {
+ inner: PhysicalDeviceInvocationMaskFeaturesHUAWEI,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInvocationMaskFeaturesHUAWEI {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInvocationMaskFeaturesHUAWEI {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'a> {
+ type Target = PhysicalDeviceInvocationMaskFeaturesHUAWEI;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceInvocationMaskFeaturesHUAWEIBuilder<'a> {
+ #[inline]
+ pub fn invocation_mask(mut self, invocation_mask: bool) -> Self {
+ self.inner.invocation_mask = invocation_mask.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceInvocationMaskFeaturesHUAWEI {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCoarseSampleLocationNV.html>"]
+pub struct CoarseSampleLocationNV {
+ pub pixel_x: u32,
+ pub pixel_y: u32,
+ pub sample: u32,
+}
+impl CoarseSampleLocationNV {
+ pub fn builder<'a>() -> CoarseSampleLocationNVBuilder<'a> {
+ CoarseSampleLocationNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CoarseSampleLocationNVBuilder<'a> {
+ inner: CoarseSampleLocationNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CoarseSampleLocationNVBuilder<'a> {
+ type Target = CoarseSampleLocationNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CoarseSampleLocationNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CoarseSampleLocationNVBuilder<'a> {
+ #[inline]
+ pub fn pixel_x(mut self, pixel_x: u32) -> Self {
+ self.inner.pixel_x = pixel_x;
+ self
+ }
+ #[inline]
+ pub fn pixel_y(mut self, pixel_y: u32) -> Self {
+ self.inner.pixel_y = pixel_y;
+ self
+ }
+ #[inline]
+ pub fn sample(mut self, sample: u32) -> Self {
+ self.inner.sample = sample;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CoarseSampleLocationNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCoarseSampleOrderCustomNV.html>"]
+pub struct CoarseSampleOrderCustomNV {
+ pub shading_rate: ShadingRatePaletteEntryNV,
+ pub sample_count: u32,
+ pub sample_location_count: u32,
+ pub p_sample_locations: *const CoarseSampleLocationNV,
+}
+impl ::std::default::Default for CoarseSampleOrderCustomNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ shading_rate: ShadingRatePaletteEntryNV::default(),
+ sample_count: u32::default(),
+ sample_location_count: u32::default(),
+ p_sample_locations: ::std::ptr::null(),
+ }
+ }
+}
+impl CoarseSampleOrderCustomNV {
+ pub fn builder<'a>() -> CoarseSampleOrderCustomNVBuilder<'a> {
+ CoarseSampleOrderCustomNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CoarseSampleOrderCustomNVBuilder<'a> {
+ inner: CoarseSampleOrderCustomNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CoarseSampleOrderCustomNVBuilder<'a> {
+ type Target = CoarseSampleOrderCustomNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CoarseSampleOrderCustomNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CoarseSampleOrderCustomNVBuilder<'a> {
+ #[inline]
+ pub fn shading_rate(mut self, shading_rate: ShadingRatePaletteEntryNV) -> Self {
+ self.inner.shading_rate = shading_rate;
+ self
+ }
+ #[inline]
+ pub fn sample_count(mut self, sample_count: u32) -> Self {
+ self.inner.sample_count = sample_count;
+ self
+ }
+ #[inline]
+ pub fn sample_locations(mut self, sample_locations: &'a [CoarseSampleLocationNV]) -> Self {
+ self.inner.sample_location_count = sample_locations.len() as _;
+ self.inner.p_sample_locations = sample_locations.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CoarseSampleOrderCustomNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportCoarseSampleOrderStateCreateInfoNV.html>"]
+pub struct PipelineViewportCoarseSampleOrderStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub sample_order_type: CoarseSampleOrderTypeNV,
+ pub custom_sample_order_count: u32,
+ pub p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
+}
+impl ::std::default::Default for PipelineViewportCoarseSampleOrderStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ sample_order_type: CoarseSampleOrderTypeNV::default(),
+ custom_sample_order_count: u32::default(),
+ p_custom_sample_orders: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportCoarseSampleOrderStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV;
+}
+impl PipelineViewportCoarseSampleOrderStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
+ PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
+ inner: PipelineViewportCoarseSampleOrderStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportCoarseSampleOrderStateCreateInfoNV
+{
+}
+impl<'a> ::std::ops::Deref for PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportCoarseSampleOrderStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn sample_order_type(mut self, sample_order_type: CoarseSampleOrderTypeNV) -> Self {
+ self.inner.sample_order_type = sample_order_type;
+ self
+ }
+ #[inline]
+ pub fn custom_sample_orders(
+ mut self,
+ custom_sample_orders: &'a [CoarseSampleOrderCustomNV],
+ ) -> Self {
+ self.inner.custom_sample_order_count = custom_sample_orders.len() as _;
+ self.inner.p_custom_sample_orders = custom_sample_orders.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportCoarseSampleOrderStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMeshShaderFeaturesNV.html>"]
+pub struct PhysicalDeviceMeshShaderFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub task_shader: Bool32,
+ pub mesh_shader: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMeshShaderFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ task_shader: Bool32::default(),
+ mesh_shader: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMeshShaderFeaturesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV;
+}
+impl PhysicalDeviceMeshShaderFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
+ PhysicalDeviceMeshShaderFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceMeshShaderFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMeshShaderFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMeshShaderFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceMeshShaderFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMeshShaderFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn task_shader(mut self, task_shader: bool) -> Self {
+ self.inner.task_shader = task_shader.into();
+ self
+ }
+ #[inline]
+ pub fn mesh_shader(mut self, mesh_shader: bool) -> Self {
+ self.inner.mesh_shader = mesh_shader.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMeshShaderFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMeshShaderPropertiesNV.html>"]
+pub struct PhysicalDeviceMeshShaderPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_draw_mesh_tasks_count: u32,
+ pub max_task_work_group_invocations: u32,
+ pub max_task_work_group_size: [u32; 3],
+ pub max_task_total_memory_size: u32,
+ pub max_task_output_count: u32,
+ pub max_mesh_work_group_invocations: u32,
+ pub max_mesh_work_group_size: [u32; 3],
+ pub max_mesh_total_memory_size: u32,
+ pub max_mesh_output_vertices: u32,
+ pub max_mesh_output_primitives: u32,
+ pub max_mesh_multiview_view_count: u32,
+ pub mesh_output_per_vertex_granularity: u32,
+ pub mesh_output_per_primitive_granularity: u32,
+}
+impl ::std::default::Default for PhysicalDeviceMeshShaderPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_draw_mesh_tasks_count: u32::default(),
+ max_task_work_group_invocations: u32::default(),
+ max_task_work_group_size: unsafe { ::std::mem::zeroed() },
+ max_task_total_memory_size: u32::default(),
+ max_task_output_count: u32::default(),
+ max_mesh_work_group_invocations: u32::default(),
+ max_mesh_work_group_size: unsafe { ::std::mem::zeroed() },
+ max_mesh_total_memory_size: u32::default(),
+ max_mesh_output_vertices: u32::default(),
+ max_mesh_output_primitives: u32::default(),
+ max_mesh_multiview_view_count: u32::default(),
+ mesh_output_per_vertex_granularity: u32::default(),
+ mesh_output_per_primitive_granularity: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMeshShaderPropertiesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV;
+}
+impl PhysicalDeviceMeshShaderPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
+ PhysicalDeviceMeshShaderPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceMeshShaderPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceMeshShaderPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMeshShaderPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn max_draw_mesh_tasks_count(mut self, max_draw_mesh_tasks_count: u32) -> Self {
+ self.inner.max_draw_mesh_tasks_count = max_draw_mesh_tasks_count;
+ self
+ }
+ #[inline]
+ pub fn max_task_work_group_invocations(mut self, max_task_work_group_invocations: u32) -> Self {
+ self.inner.max_task_work_group_invocations = max_task_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_task_work_group_size(mut self, max_task_work_group_size: [u32; 3]) -> Self {
+ self.inner.max_task_work_group_size = max_task_work_group_size;
+ self
+ }
+ #[inline]
+ pub fn max_task_total_memory_size(mut self, max_task_total_memory_size: u32) -> Self {
+ self.inner.max_task_total_memory_size = max_task_total_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_task_output_count(mut self, max_task_output_count: u32) -> Self {
+ self.inner.max_task_output_count = max_task_output_count;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_work_group_invocations(mut self, max_mesh_work_group_invocations: u32) -> Self {
+ self.inner.max_mesh_work_group_invocations = max_mesh_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_work_group_size(mut self, max_mesh_work_group_size: [u32; 3]) -> Self {
+ self.inner.max_mesh_work_group_size = max_mesh_work_group_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_total_memory_size(mut self, max_mesh_total_memory_size: u32) -> Self {
+ self.inner.max_mesh_total_memory_size = max_mesh_total_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_vertices(mut self, max_mesh_output_vertices: u32) -> Self {
+ self.inner.max_mesh_output_vertices = max_mesh_output_vertices;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_primitives(mut self, max_mesh_output_primitives: u32) -> Self {
+ self.inner.max_mesh_output_primitives = max_mesh_output_primitives;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_multiview_view_count(mut self, max_mesh_multiview_view_count: u32) -> Self {
+ self.inner.max_mesh_multiview_view_count = max_mesh_multiview_view_count;
+ self
+ }
+ #[inline]
+ pub fn mesh_output_per_vertex_granularity(
+ mut self,
+ mesh_output_per_vertex_granularity: u32,
+ ) -> Self {
+ self.inner.mesh_output_per_vertex_granularity = mesh_output_per_vertex_granularity;
+ self
+ }
+ #[inline]
+ pub fn mesh_output_per_primitive_granularity(
+ mut self,
+ mesh_output_per_primitive_granularity: u32,
+ ) -> Self {
+ self.inner.mesh_output_per_primitive_granularity = mesh_output_per_primitive_granularity;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMeshShaderPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrawMeshTasksIndirectCommandNV.html>"]
+pub struct DrawMeshTasksIndirectCommandNV {
+ pub task_count: u32,
+ pub first_task: u32,
+}
+impl DrawMeshTasksIndirectCommandNV {
+ pub fn builder<'a>() -> DrawMeshTasksIndirectCommandNVBuilder<'a> {
+ DrawMeshTasksIndirectCommandNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrawMeshTasksIndirectCommandNVBuilder<'a> {
+ inner: DrawMeshTasksIndirectCommandNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DrawMeshTasksIndirectCommandNVBuilder<'a> {
+ type Target = DrawMeshTasksIndirectCommandNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrawMeshTasksIndirectCommandNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrawMeshTasksIndirectCommandNVBuilder<'a> {
+ #[inline]
+ pub fn task_count(mut self, task_count: u32) -> Self {
+ self.inner.task_count = task_count;
+ self
+ }
+ #[inline]
+ pub fn first_task(mut self, first_task: u32) -> Self {
+ self.inner.first_task = first_task;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrawMeshTasksIndirectCommandNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMeshShaderFeaturesEXT.html>"]
+pub struct PhysicalDeviceMeshShaderFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub task_shader: Bool32,
+ pub mesh_shader: Bool32,
+ pub multiview_mesh_shader: Bool32,
+ pub primitive_fragment_shading_rate_mesh_shader: Bool32,
+ pub mesh_shader_queries: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMeshShaderFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ task_shader: Bool32::default(),
+ mesh_shader: Bool32::default(),
+ multiview_mesh_shader: Bool32::default(),
+ primitive_fragment_shading_rate_mesh_shader: Bool32::default(),
+ mesh_shader_queries: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMeshShaderFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT;
+}
+impl PhysicalDeviceMeshShaderFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMeshShaderFeaturesEXTBuilder<'a> {
+ PhysicalDeviceMeshShaderFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMeshShaderFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceMeshShaderFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMeshShaderFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMeshShaderFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMeshShaderFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMeshShaderFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMeshShaderFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMeshShaderFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMeshShaderFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn task_shader(mut self, task_shader: bool) -> Self {
+ self.inner.task_shader = task_shader.into();
+ self
+ }
+ #[inline]
+ pub fn mesh_shader(mut self, mesh_shader: bool) -> Self {
+ self.inner.mesh_shader = mesh_shader.into();
+ self
+ }
+ #[inline]
+ pub fn multiview_mesh_shader(mut self, multiview_mesh_shader: bool) -> Self {
+ self.inner.multiview_mesh_shader = multiview_mesh_shader.into();
+ self
+ }
+ #[inline]
+ pub fn primitive_fragment_shading_rate_mesh_shader(
+ mut self,
+ primitive_fragment_shading_rate_mesh_shader: bool,
+ ) -> Self {
+ self.inner.primitive_fragment_shading_rate_mesh_shader =
+ primitive_fragment_shading_rate_mesh_shader.into();
+ self
+ }
+ #[inline]
+ pub fn mesh_shader_queries(mut self, mesh_shader_queries: bool) -> Self {
+ self.inner.mesh_shader_queries = mesh_shader_queries.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMeshShaderFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMeshShaderPropertiesEXT.html>"]
+pub struct PhysicalDeviceMeshShaderPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_task_work_group_total_count: u32,
+ pub max_task_work_group_count: [u32; 3],
+ pub max_task_work_group_invocations: u32,
+ pub max_task_work_group_size: [u32; 3],
+ pub max_task_payload_size: u32,
+ pub max_task_shared_memory_size: u32,
+ pub max_task_payload_and_shared_memory_size: u32,
+ pub max_mesh_work_group_total_count: u32,
+ pub max_mesh_work_group_count: [u32; 3],
+ pub max_mesh_work_group_invocations: u32,
+ pub max_mesh_work_group_size: [u32; 3],
+ pub max_mesh_shared_memory_size: u32,
+ pub max_mesh_payload_and_shared_memory_size: u32,
+ pub max_mesh_output_memory_size: u32,
+ pub max_mesh_payload_and_output_memory_size: u32,
+ pub max_mesh_output_components: u32,
+ pub max_mesh_output_vertices: u32,
+ pub max_mesh_output_primitives: u32,
+ pub max_mesh_output_layers: u32,
+ pub max_mesh_multiview_view_count: u32,
+ pub mesh_output_per_vertex_granularity: u32,
+ pub mesh_output_per_primitive_granularity: u32,
+ pub max_preferred_task_work_group_invocations: u32,
+ pub max_preferred_mesh_work_group_invocations: u32,
+ pub prefers_local_invocation_vertex_output: Bool32,
+ pub prefers_local_invocation_primitive_output: Bool32,
+ pub prefers_compact_vertex_output: Bool32,
+ pub prefers_compact_primitive_output: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMeshShaderPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_task_work_group_total_count: u32::default(),
+ max_task_work_group_count: unsafe { ::std::mem::zeroed() },
+ max_task_work_group_invocations: u32::default(),
+ max_task_work_group_size: unsafe { ::std::mem::zeroed() },
+ max_task_payload_size: u32::default(),
+ max_task_shared_memory_size: u32::default(),
+ max_task_payload_and_shared_memory_size: u32::default(),
+ max_mesh_work_group_total_count: u32::default(),
+ max_mesh_work_group_count: unsafe { ::std::mem::zeroed() },
+ max_mesh_work_group_invocations: u32::default(),
+ max_mesh_work_group_size: unsafe { ::std::mem::zeroed() },
+ max_mesh_shared_memory_size: u32::default(),
+ max_mesh_payload_and_shared_memory_size: u32::default(),
+ max_mesh_output_memory_size: u32::default(),
+ max_mesh_payload_and_output_memory_size: u32::default(),
+ max_mesh_output_components: u32::default(),
+ max_mesh_output_vertices: u32::default(),
+ max_mesh_output_primitives: u32::default(),
+ max_mesh_output_layers: u32::default(),
+ max_mesh_multiview_view_count: u32::default(),
+ mesh_output_per_vertex_granularity: u32::default(),
+ mesh_output_per_primitive_granularity: u32::default(),
+ max_preferred_task_work_group_invocations: u32::default(),
+ max_preferred_mesh_work_group_invocations: u32::default(),
+ prefers_local_invocation_vertex_output: Bool32::default(),
+ prefers_local_invocation_primitive_output: Bool32::default(),
+ prefers_compact_vertex_output: Bool32::default(),
+ prefers_compact_primitive_output: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMeshShaderPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT;
+}
+impl PhysicalDeviceMeshShaderPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMeshShaderPropertiesEXTBuilder<'a> {
+ PhysicalDeviceMeshShaderPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMeshShaderPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceMeshShaderPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceMeshShaderPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMeshShaderPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMeshShaderPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMeshShaderPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMeshShaderPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_task_work_group_total_count(mut self, max_task_work_group_total_count: u32) -> Self {
+ self.inner.max_task_work_group_total_count = max_task_work_group_total_count;
+ self
+ }
+ #[inline]
+ pub fn max_task_work_group_count(mut self, max_task_work_group_count: [u32; 3]) -> Self {
+ self.inner.max_task_work_group_count = max_task_work_group_count;
+ self
+ }
+ #[inline]
+ pub fn max_task_work_group_invocations(mut self, max_task_work_group_invocations: u32) -> Self {
+ self.inner.max_task_work_group_invocations = max_task_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_task_work_group_size(mut self, max_task_work_group_size: [u32; 3]) -> Self {
+ self.inner.max_task_work_group_size = max_task_work_group_size;
+ self
+ }
+ #[inline]
+ pub fn max_task_payload_size(mut self, max_task_payload_size: u32) -> Self {
+ self.inner.max_task_payload_size = max_task_payload_size;
+ self
+ }
+ #[inline]
+ pub fn max_task_shared_memory_size(mut self, max_task_shared_memory_size: u32) -> Self {
+ self.inner.max_task_shared_memory_size = max_task_shared_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_task_payload_and_shared_memory_size(
+ mut self,
+ max_task_payload_and_shared_memory_size: u32,
+ ) -> Self {
+ self.inner.max_task_payload_and_shared_memory_size =
+ max_task_payload_and_shared_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_work_group_total_count(mut self, max_mesh_work_group_total_count: u32) -> Self {
+ self.inner.max_mesh_work_group_total_count = max_mesh_work_group_total_count;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_work_group_count(mut self, max_mesh_work_group_count: [u32; 3]) -> Self {
+ self.inner.max_mesh_work_group_count = max_mesh_work_group_count;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_work_group_invocations(mut self, max_mesh_work_group_invocations: u32) -> Self {
+ self.inner.max_mesh_work_group_invocations = max_mesh_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_work_group_size(mut self, max_mesh_work_group_size: [u32; 3]) -> Self {
+ self.inner.max_mesh_work_group_size = max_mesh_work_group_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_shared_memory_size(mut self, max_mesh_shared_memory_size: u32) -> Self {
+ self.inner.max_mesh_shared_memory_size = max_mesh_shared_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_payload_and_shared_memory_size(
+ mut self,
+ max_mesh_payload_and_shared_memory_size: u32,
+ ) -> Self {
+ self.inner.max_mesh_payload_and_shared_memory_size =
+ max_mesh_payload_and_shared_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_memory_size(mut self, max_mesh_output_memory_size: u32) -> Self {
+ self.inner.max_mesh_output_memory_size = max_mesh_output_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_payload_and_output_memory_size(
+ mut self,
+ max_mesh_payload_and_output_memory_size: u32,
+ ) -> Self {
+ self.inner.max_mesh_payload_and_output_memory_size =
+ max_mesh_payload_and_output_memory_size;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_components(mut self, max_mesh_output_components: u32) -> Self {
+ self.inner.max_mesh_output_components = max_mesh_output_components;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_vertices(mut self, max_mesh_output_vertices: u32) -> Self {
+ self.inner.max_mesh_output_vertices = max_mesh_output_vertices;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_primitives(mut self, max_mesh_output_primitives: u32) -> Self {
+ self.inner.max_mesh_output_primitives = max_mesh_output_primitives;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_output_layers(mut self, max_mesh_output_layers: u32) -> Self {
+ self.inner.max_mesh_output_layers = max_mesh_output_layers;
+ self
+ }
+ #[inline]
+ pub fn max_mesh_multiview_view_count(mut self, max_mesh_multiview_view_count: u32) -> Self {
+ self.inner.max_mesh_multiview_view_count = max_mesh_multiview_view_count;
+ self
+ }
+ #[inline]
+ pub fn mesh_output_per_vertex_granularity(
+ mut self,
+ mesh_output_per_vertex_granularity: u32,
+ ) -> Self {
+ self.inner.mesh_output_per_vertex_granularity = mesh_output_per_vertex_granularity;
+ self
+ }
+ #[inline]
+ pub fn mesh_output_per_primitive_granularity(
+ mut self,
+ mesh_output_per_primitive_granularity: u32,
+ ) -> Self {
+ self.inner.mesh_output_per_primitive_granularity = mesh_output_per_primitive_granularity;
+ self
+ }
+ #[inline]
+ pub fn max_preferred_task_work_group_invocations(
+ mut self,
+ max_preferred_task_work_group_invocations: u32,
+ ) -> Self {
+ self.inner.max_preferred_task_work_group_invocations =
+ max_preferred_task_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn max_preferred_mesh_work_group_invocations(
+ mut self,
+ max_preferred_mesh_work_group_invocations: u32,
+ ) -> Self {
+ self.inner.max_preferred_mesh_work_group_invocations =
+ max_preferred_mesh_work_group_invocations;
+ self
+ }
+ #[inline]
+ pub fn prefers_local_invocation_vertex_output(
+ mut self,
+ prefers_local_invocation_vertex_output: bool,
+ ) -> Self {
+ self.inner.prefers_local_invocation_vertex_output =
+ prefers_local_invocation_vertex_output.into();
+ self
+ }
+ #[inline]
+ pub fn prefers_local_invocation_primitive_output(
+ mut self,
+ prefers_local_invocation_primitive_output: bool,
+ ) -> Self {
+ self.inner.prefers_local_invocation_primitive_output =
+ prefers_local_invocation_primitive_output.into();
+ self
+ }
+ #[inline]
+ pub fn prefers_compact_vertex_output(mut self, prefers_compact_vertex_output: bool) -> Self {
+ self.inner.prefers_compact_vertex_output = prefers_compact_vertex_output.into();
+ self
+ }
+ #[inline]
+ pub fn prefers_compact_primitive_output(
+ mut self,
+ prefers_compact_primitive_output: bool,
+ ) -> Self {
+ self.inner.prefers_compact_primitive_output = prefers_compact_primitive_output.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMeshShaderPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrawMeshTasksIndirectCommandEXT.html>"]
+pub struct DrawMeshTasksIndirectCommandEXT {
+ pub group_count_x: u32,
+ pub group_count_y: u32,
+ pub group_count_z: u32,
+}
+impl DrawMeshTasksIndirectCommandEXT {
+ pub fn builder<'a>() -> DrawMeshTasksIndirectCommandEXTBuilder<'a> {
+ DrawMeshTasksIndirectCommandEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrawMeshTasksIndirectCommandEXTBuilder<'a> {
+ inner: DrawMeshTasksIndirectCommandEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DrawMeshTasksIndirectCommandEXTBuilder<'a> {
+ type Target = DrawMeshTasksIndirectCommandEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrawMeshTasksIndirectCommandEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrawMeshTasksIndirectCommandEXTBuilder<'a> {
+ #[inline]
+ pub fn group_count_x(mut self, group_count_x: u32) -> Self {
+ self.inner.group_count_x = group_count_x;
+ self
+ }
+ #[inline]
+ pub fn group_count_y(mut self, group_count_y: u32) -> Self {
+ self.inner.group_count_y = group_count_y;
+ self
+ }
+ #[inline]
+ pub fn group_count_z(mut self, group_count_z: u32) -> Self {
+ self.inner.group_count_z = group_count_z;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrawMeshTasksIndirectCommandEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingShaderGroupCreateInfoNV.html>"]
+pub struct RayTracingShaderGroupCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: RayTracingShaderGroupTypeKHR,
+ pub general_shader: u32,
+ pub closest_hit_shader: u32,
+ pub any_hit_shader: u32,
+ pub intersection_shader: u32,
+}
+impl ::std::default::Default for RayTracingShaderGroupCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: RayTracingShaderGroupTypeKHR::default(),
+ general_shader: u32::default(),
+ closest_hit_shader: u32::default(),
+ any_hit_shader: u32::default(),
+ intersection_shader: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RayTracingShaderGroupCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV;
+}
+impl RayTracingShaderGroupCreateInfoNV {
+ pub fn builder<'a>() -> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
+ RayTracingShaderGroupCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RayTracingShaderGroupCreateInfoNVBuilder<'a> {
+ inner: RayTracingShaderGroupCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RayTracingShaderGroupCreateInfoNVBuilder<'a> {
+ type Target = RayTracingShaderGroupCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RayTracingShaderGroupCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RayTracingShaderGroupCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: RayTracingShaderGroupTypeKHR) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn general_shader(mut self, general_shader: u32) -> Self {
+ self.inner.general_shader = general_shader;
+ self
+ }
+ #[inline]
+ pub fn closest_hit_shader(mut self, closest_hit_shader: u32) -> Self {
+ self.inner.closest_hit_shader = closest_hit_shader;
+ self
+ }
+ #[inline]
+ pub fn any_hit_shader(mut self, any_hit_shader: u32) -> Self {
+ self.inner.any_hit_shader = any_hit_shader;
+ self
+ }
+ #[inline]
+ pub fn intersection_shader(mut self, intersection_shader: u32) -> Self {
+ self.inner.intersection_shader = intersection_shader;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RayTracingShaderGroupCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingShaderGroupCreateInfoKHR.html>"]
+pub struct RayTracingShaderGroupCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: RayTracingShaderGroupTypeKHR,
+ pub general_shader: u32,
+ pub closest_hit_shader: u32,
+ pub any_hit_shader: u32,
+ pub intersection_shader: u32,
+ pub p_shader_group_capture_replay_handle: *const c_void,
+}
+impl ::std::default::Default for RayTracingShaderGroupCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: RayTracingShaderGroupTypeKHR::default(),
+ general_shader: u32::default(),
+ closest_hit_shader: u32::default(),
+ any_hit_shader: u32::default(),
+ intersection_shader: u32::default(),
+ p_shader_group_capture_replay_handle: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RayTracingShaderGroupCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR;
+}
+impl RayTracingShaderGroupCreateInfoKHR {
+ pub fn builder<'a>() -> RayTracingShaderGroupCreateInfoKHRBuilder<'a> {
+ RayTracingShaderGroupCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RayTracingShaderGroupCreateInfoKHRBuilder<'a> {
+ inner: RayTracingShaderGroupCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RayTracingShaderGroupCreateInfoKHRBuilder<'a> {
+ type Target = RayTracingShaderGroupCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RayTracingShaderGroupCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RayTracingShaderGroupCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: RayTracingShaderGroupTypeKHR) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn general_shader(mut self, general_shader: u32) -> Self {
+ self.inner.general_shader = general_shader;
+ self
+ }
+ #[inline]
+ pub fn closest_hit_shader(mut self, closest_hit_shader: u32) -> Self {
+ self.inner.closest_hit_shader = closest_hit_shader;
+ self
+ }
+ #[inline]
+ pub fn any_hit_shader(mut self, any_hit_shader: u32) -> Self {
+ self.inner.any_hit_shader = any_hit_shader;
+ self
+ }
+ #[inline]
+ pub fn intersection_shader(mut self, intersection_shader: u32) -> Self {
+ self.inner.intersection_shader = intersection_shader;
+ self
+ }
+ #[inline]
+ pub fn shader_group_capture_replay_handle(
+ mut self,
+ shader_group_capture_replay_handle: *const c_void,
+ ) -> Self {
+ self.inner.p_shader_group_capture_replay_handle = shader_group_capture_replay_handle;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RayTracingShaderGroupCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingPipelineCreateInfoNV.html>"]
+pub struct RayTracingPipelineCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCreateFlags,
+ pub stage_count: u32,
+ pub p_stages: *const PipelineShaderStageCreateInfo,
+ pub group_count: u32,
+ pub p_groups: *const RayTracingShaderGroupCreateInfoNV,
+ pub max_recursion_depth: u32,
+ pub layout: PipelineLayout,
+ pub base_pipeline_handle: Pipeline,
+ pub base_pipeline_index: i32,
+}
+impl ::std::default::Default for RayTracingPipelineCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCreateFlags::default(),
+ stage_count: u32::default(),
+ p_stages: ::std::ptr::null(),
+ group_count: u32::default(),
+ p_groups: ::std::ptr::null(),
+ max_recursion_depth: u32::default(),
+ layout: PipelineLayout::default(),
+ base_pipeline_handle: Pipeline::default(),
+ base_pipeline_index: i32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RayTracingPipelineCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_PIPELINE_CREATE_INFO_NV;
+}
+impl RayTracingPipelineCreateInfoNV {
+ pub fn builder<'a>() -> RayTracingPipelineCreateInfoNVBuilder<'a> {
+ RayTracingPipelineCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RayTracingPipelineCreateInfoNVBuilder<'a> {
+ inner: RayTracingPipelineCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsRayTracingPipelineCreateInfoNV {}
+impl<'a> ::std::ops::Deref for RayTracingPipelineCreateInfoNVBuilder<'a> {
+ type Target = RayTracingPipelineCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RayTracingPipelineCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RayTracingPipelineCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo]) -> Self {
+ self.inner.stage_count = stages.len() as _;
+ self.inner.p_stages = stages.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn groups(mut self, groups: &'a [RayTracingShaderGroupCreateInfoNV]) -> Self {
+ self.inner.group_count = groups.len() as _;
+ self.inner.p_groups = groups.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn max_recursion_depth(mut self, max_recursion_depth: u32) -> Self {
+ self.inner.max_recursion_depth = max_recursion_depth;
+ self
+ }
+ #[inline]
+ pub fn layout(mut self, layout: PipelineLayout) -> Self {
+ self.inner.layout = layout;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self {
+ self.inner.base_pipeline_handle = base_pipeline_handle;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self {
+ self.inner.base_pipeline_index = base_pipeline_index;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsRayTracingPipelineCreateInfoNV>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RayTracingPipelineCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingPipelineCreateInfoKHR.html>"]
+pub struct RayTracingPipelineCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCreateFlags,
+ pub stage_count: u32,
+ pub p_stages: *const PipelineShaderStageCreateInfo,
+ pub group_count: u32,
+ pub p_groups: *const RayTracingShaderGroupCreateInfoKHR,
+ pub max_pipeline_ray_recursion_depth: u32,
+ pub p_library_info: *const PipelineLibraryCreateInfoKHR,
+ pub p_library_interface: *const RayTracingPipelineInterfaceCreateInfoKHR,
+ pub p_dynamic_state: *const PipelineDynamicStateCreateInfo,
+ pub layout: PipelineLayout,
+ pub base_pipeline_handle: Pipeline,
+ pub base_pipeline_index: i32,
+}
+impl ::std::default::Default for RayTracingPipelineCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCreateFlags::default(),
+ stage_count: u32::default(),
+ p_stages: ::std::ptr::null(),
+ group_count: u32::default(),
+ p_groups: ::std::ptr::null(),
+ max_pipeline_ray_recursion_depth: u32::default(),
+ p_library_info: ::std::ptr::null(),
+ p_library_interface: ::std::ptr::null(),
+ p_dynamic_state: ::std::ptr::null(),
+ layout: PipelineLayout::default(),
+ base_pipeline_handle: Pipeline::default(),
+ base_pipeline_index: i32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RayTracingPipelineCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::RAY_TRACING_PIPELINE_CREATE_INFO_KHR;
+}
+impl RayTracingPipelineCreateInfoKHR {
+ pub fn builder<'a>() -> RayTracingPipelineCreateInfoKHRBuilder<'a> {
+ RayTracingPipelineCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RayTracingPipelineCreateInfoKHRBuilder<'a> {
+ inner: RayTracingPipelineCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsRayTracingPipelineCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for RayTracingPipelineCreateInfoKHRBuilder<'a> {
+ type Target = RayTracingPipelineCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RayTracingPipelineCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RayTracingPipelineCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn stages(mut self, stages: &'a [PipelineShaderStageCreateInfo]) -> Self {
+ self.inner.stage_count = stages.len() as _;
+ self.inner.p_stages = stages.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn groups(mut self, groups: &'a [RayTracingShaderGroupCreateInfoKHR]) -> Self {
+ self.inner.group_count = groups.len() as _;
+ self.inner.p_groups = groups.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn max_pipeline_ray_recursion_depth(
+ mut self,
+ max_pipeline_ray_recursion_depth: u32,
+ ) -> Self {
+ self.inner.max_pipeline_ray_recursion_depth = max_pipeline_ray_recursion_depth;
+ self
+ }
+ #[inline]
+ pub fn library_info(mut self, library_info: &'a PipelineLibraryCreateInfoKHR) -> Self {
+ self.inner.p_library_info = library_info;
+ self
+ }
+ #[inline]
+ pub fn library_interface(
+ mut self,
+ library_interface: &'a RayTracingPipelineInterfaceCreateInfoKHR,
+ ) -> Self {
+ self.inner.p_library_interface = library_interface;
+ self
+ }
+ #[inline]
+ pub fn dynamic_state(mut self, dynamic_state: &'a PipelineDynamicStateCreateInfo) -> Self {
+ self.inner.p_dynamic_state = dynamic_state;
+ self
+ }
+ #[inline]
+ pub fn layout(mut self, layout: PipelineLayout) -> Self {
+ self.inner.layout = layout;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_handle(mut self, base_pipeline_handle: Pipeline) -> Self {
+ self.inner.base_pipeline_handle = base_pipeline_handle;
+ self
+ }
+ #[inline]
+ pub fn base_pipeline_index(mut self, base_pipeline_index: i32) -> Self {
+ self.inner.base_pipeline_index = base_pipeline_index;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsRayTracingPipelineCreateInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RayTracingPipelineCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryTrianglesNV.html>"]
+pub struct GeometryTrianglesNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub vertex_data: Buffer,
+ pub vertex_offset: DeviceSize,
+ pub vertex_count: u32,
+ pub vertex_stride: DeviceSize,
+ pub vertex_format: Format,
+ pub index_data: Buffer,
+ pub index_offset: DeviceSize,
+ pub index_count: u32,
+ pub index_type: IndexType,
+ pub transform_data: Buffer,
+ pub transform_offset: DeviceSize,
+}
+impl ::std::default::Default for GeometryTrianglesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ vertex_data: Buffer::default(),
+ vertex_offset: DeviceSize::default(),
+ vertex_count: u32::default(),
+ vertex_stride: DeviceSize::default(),
+ vertex_format: Format::default(),
+ index_data: Buffer::default(),
+ index_offset: DeviceSize::default(),
+ index_count: u32::default(),
+ index_type: IndexType::default(),
+ transform_data: Buffer::default(),
+ transform_offset: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GeometryTrianglesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::GEOMETRY_TRIANGLES_NV;
+}
+impl GeometryTrianglesNV {
+ pub fn builder<'a>() -> GeometryTrianglesNVBuilder<'a> {
+ GeometryTrianglesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GeometryTrianglesNVBuilder<'a> {
+ inner: GeometryTrianglesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GeometryTrianglesNVBuilder<'a> {
+ type Target = GeometryTrianglesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GeometryTrianglesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GeometryTrianglesNVBuilder<'a> {
+ #[inline]
+ pub fn vertex_data(mut self, vertex_data: Buffer) -> Self {
+ self.inner.vertex_data = vertex_data;
+ self
+ }
+ #[inline]
+ pub fn vertex_offset(mut self, vertex_offset: DeviceSize) -> Self {
+ self.inner.vertex_offset = vertex_offset;
+ self
+ }
+ #[inline]
+ pub fn vertex_count(mut self, vertex_count: u32) -> Self {
+ self.inner.vertex_count = vertex_count;
+ self
+ }
+ #[inline]
+ pub fn vertex_stride(mut self, vertex_stride: DeviceSize) -> Self {
+ self.inner.vertex_stride = vertex_stride;
+ self
+ }
+ #[inline]
+ pub fn vertex_format(mut self, vertex_format: Format) -> Self {
+ self.inner.vertex_format = vertex_format;
+ self
+ }
+ #[inline]
+ pub fn index_data(mut self, index_data: Buffer) -> Self {
+ self.inner.index_data = index_data;
+ self
+ }
+ #[inline]
+ pub fn index_offset(mut self, index_offset: DeviceSize) -> Self {
+ self.inner.index_offset = index_offset;
+ self
+ }
+ #[inline]
+ pub fn index_count(mut self, index_count: u32) -> Self {
+ self.inner.index_count = index_count;
+ self
+ }
+ #[inline]
+ pub fn index_type(mut self, index_type: IndexType) -> Self {
+ self.inner.index_type = index_type;
+ self
+ }
+ #[inline]
+ pub fn transform_data(mut self, transform_data: Buffer) -> Self {
+ self.inner.transform_data = transform_data;
+ self
+ }
+ #[inline]
+ pub fn transform_offset(mut self, transform_offset: DeviceSize) -> Self {
+ self.inner.transform_offset = transform_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GeometryTrianglesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryAABBNV.html>"]
+pub struct GeometryAABBNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub aabb_data: Buffer,
+ pub num_aab_bs: u32,
+ pub stride: u32,
+ pub offset: DeviceSize,
+}
+impl ::std::default::Default for GeometryAABBNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ aabb_data: Buffer::default(),
+ num_aab_bs: u32::default(),
+ stride: u32::default(),
+ offset: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GeometryAABBNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::GEOMETRY_AABB_NV;
+}
+impl GeometryAABBNV {
+ pub fn builder<'a>() -> GeometryAABBNVBuilder<'a> {
+ GeometryAABBNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GeometryAABBNVBuilder<'a> {
+ inner: GeometryAABBNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GeometryAABBNVBuilder<'a> {
+ type Target = GeometryAABBNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GeometryAABBNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GeometryAABBNVBuilder<'a> {
+ #[inline]
+ pub fn aabb_data(mut self, aabb_data: Buffer) -> Self {
+ self.inner.aabb_data = aabb_data;
+ self
+ }
+ #[inline]
+ pub fn num_aab_bs(mut self, num_aab_bs: u32) -> Self {
+ self.inner.num_aab_bs = num_aab_bs;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: u32) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GeometryAABBNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryDataNV.html>"]
+pub struct GeometryDataNV {
+ pub triangles: GeometryTrianglesNV,
+ pub aabbs: GeometryAABBNV,
+}
+impl GeometryDataNV {
+ pub fn builder<'a>() -> GeometryDataNVBuilder<'a> {
+ GeometryDataNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GeometryDataNVBuilder<'a> {
+ inner: GeometryDataNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GeometryDataNVBuilder<'a> {
+ type Target = GeometryDataNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GeometryDataNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GeometryDataNVBuilder<'a> {
+ #[inline]
+ pub fn triangles(mut self, triangles: GeometryTrianglesNV) -> Self {
+ self.inner.triangles = triangles;
+ self
+ }
+ #[inline]
+ pub fn aabbs(mut self, aabbs: GeometryAABBNV) -> Self {
+ self.inner.aabbs = aabbs;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GeometryDataNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryNV.html>"]
+pub struct GeometryNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub geometry_type: GeometryTypeKHR,
+ pub geometry: GeometryDataNV,
+ pub flags: GeometryFlagsKHR,
+}
+impl ::std::default::Default for GeometryNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ geometry_type: GeometryTypeKHR::default(),
+ geometry: GeometryDataNV::default(),
+ flags: GeometryFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GeometryNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::GEOMETRY_NV;
+}
+impl GeometryNV {
+ pub fn builder<'a>() -> GeometryNVBuilder<'a> {
+ GeometryNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GeometryNVBuilder<'a> {
+ inner: GeometryNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for GeometryNVBuilder<'a> {
+ type Target = GeometryNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GeometryNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GeometryNVBuilder<'a> {
+ #[inline]
+ pub fn geometry_type(mut self, geometry_type: GeometryTypeKHR) -> Self {
+ self.inner.geometry_type = geometry_type;
+ self
+ }
+ #[inline]
+ pub fn geometry(mut self, geometry: GeometryDataNV) -> Self {
+ self.inner.geometry = geometry;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: GeometryFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GeometryNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureInfoNV.html>"]
+pub struct AccelerationStructureInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: AccelerationStructureTypeNV,
+ pub flags: BuildAccelerationStructureFlagsNV,
+ pub instance_count: u32,
+ pub geometry_count: u32,
+ pub p_geometries: *const GeometryNV,
+}
+impl ::std::default::Default for AccelerationStructureInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: AccelerationStructureTypeNV::default(),
+ flags: BuildAccelerationStructureFlagsNV::default(),
+ instance_count: u32::default(),
+ geometry_count: u32::default(),
+ p_geometries: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_INFO_NV;
+}
+impl AccelerationStructureInfoNV {
+ pub fn builder<'a>() -> AccelerationStructureInfoNVBuilder<'a> {
+ AccelerationStructureInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureInfoNVBuilder<'a> {
+ inner: AccelerationStructureInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureInfoNVBuilder<'a> {
+ type Target = AccelerationStructureInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureInfoNVBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: AccelerationStructureTypeNV) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: BuildAccelerationStructureFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn instance_count(mut self, instance_count: u32) -> Self {
+ self.inner.instance_count = instance_count;
+ self
+ }
+ #[inline]
+ pub fn geometries(mut self, geometries: &'a [GeometryNV]) -> Self {
+ self.inner.geometry_count = geometries.len() as _;
+ self.inner.p_geometries = geometries.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureCreateInfoNV.html>"]
+pub struct AccelerationStructureCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub compacted_size: DeviceSize,
+ pub info: AccelerationStructureInfoNV,
+}
+impl ::std::default::Default for AccelerationStructureCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ compacted_size: DeviceSize::default(),
+ info: AccelerationStructureInfoNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_CREATE_INFO_NV;
+}
+impl AccelerationStructureCreateInfoNV {
+ pub fn builder<'a>() -> AccelerationStructureCreateInfoNVBuilder<'a> {
+ AccelerationStructureCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureCreateInfoNVBuilder<'a> {
+ inner: AccelerationStructureCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsAccelerationStructureCreateInfoNV {}
+impl<'a> ::std::ops::Deref for AccelerationStructureCreateInfoNVBuilder<'a> {
+ type Target = AccelerationStructureCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn compacted_size(mut self, compacted_size: DeviceSize) -> Self {
+ self.inner.compacted_size = compacted_size;
+ self
+ }
+ #[inline]
+ pub fn info(mut self, info: AccelerationStructureInfoNV) -> Self {
+ self.inner.info = info;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsAccelerationStructureCreateInfoNV>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindAccelerationStructureMemoryInfoNV.html>"]
+pub struct BindAccelerationStructureMemoryInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acceleration_structure: AccelerationStructureNV,
+ pub memory: DeviceMemory,
+ pub memory_offset: DeviceSize,
+ pub device_index_count: u32,
+ pub p_device_indices: *const u32,
+}
+impl ::std::default::Default for BindAccelerationStructureMemoryInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acceleration_structure: AccelerationStructureNV::default(),
+ memory: DeviceMemory::default(),
+ memory_offset: DeviceSize::default(),
+ device_index_count: u32::default(),
+ p_device_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindAccelerationStructureMemoryInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV;
+}
+impl BindAccelerationStructureMemoryInfoNV {
+ pub fn builder<'a>() -> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
+ BindAccelerationStructureMemoryInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindAccelerationStructureMemoryInfoNVBuilder<'a> {
+ inner: BindAccelerationStructureMemoryInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BindAccelerationStructureMemoryInfoNVBuilder<'a> {
+ type Target = BindAccelerationStructureMemoryInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindAccelerationStructureMemoryInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindAccelerationStructureMemoryInfoNVBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structure(
+ mut self,
+ acceleration_structure: AccelerationStructureNV,
+ ) -> Self {
+ self.inner.acceleration_structure = acceleration_structure;
+ self
+ }
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self {
+ self.inner.memory_offset = memory_offset;
+ self
+ }
+ #[inline]
+ pub fn device_indices(mut self, device_indices: &'a [u32]) -> Self {
+ self.inner.device_index_count = device_indices.len() as _;
+ self.inner.p_device_indices = device_indices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindAccelerationStructureMemoryInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWriteDescriptorSetAccelerationStructureKHR.html>"]
+pub struct WriteDescriptorSetAccelerationStructureKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acceleration_structure_count: u32,
+ pub p_acceleration_structures: *const AccelerationStructureKHR,
+}
+impl ::std::default::Default for WriteDescriptorSetAccelerationStructureKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acceleration_structure_count: u32::default(),
+ p_acceleration_structures: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for WriteDescriptorSetAccelerationStructureKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR;
+}
+impl WriteDescriptorSetAccelerationStructureKHR {
+ pub fn builder<'a>() -> WriteDescriptorSetAccelerationStructureKHRBuilder<'a> {
+ WriteDescriptorSetAccelerationStructureKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct WriteDescriptorSetAccelerationStructureKHRBuilder<'a> {
+ inner: WriteDescriptorSetAccelerationStructureKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureKHRBuilder<'_> {}
+unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureKHR {}
+impl<'a> ::std::ops::Deref for WriteDescriptorSetAccelerationStructureKHRBuilder<'a> {
+ type Target = WriteDescriptorSetAccelerationStructureKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for WriteDescriptorSetAccelerationStructureKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> WriteDescriptorSetAccelerationStructureKHRBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structures(
+ mut self,
+ acceleration_structures: &'a [AccelerationStructureKHR],
+ ) -> Self {
+ self.inner.acceleration_structure_count = acceleration_structures.len() as _;
+ self.inner.p_acceleration_structures = acceleration_structures.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> WriteDescriptorSetAccelerationStructureKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkWriteDescriptorSetAccelerationStructureNV.html>"]
+pub struct WriteDescriptorSetAccelerationStructureNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acceleration_structure_count: u32,
+ pub p_acceleration_structures: *const AccelerationStructureNV,
+}
+impl ::std::default::Default for WriteDescriptorSetAccelerationStructureNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acceleration_structure_count: u32::default(),
+ p_acceleration_structures: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for WriteDescriptorSetAccelerationStructureNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV;
+}
+impl WriteDescriptorSetAccelerationStructureNV {
+ pub fn builder<'a>() -> WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
+ WriteDescriptorSetAccelerationStructureNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
+ inner: WriteDescriptorSetAccelerationStructureNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureNVBuilder<'_> {}
+unsafe impl ExtendsWriteDescriptorSet for WriteDescriptorSetAccelerationStructureNV {}
+impl<'a> ::std::ops::Deref for WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
+ type Target = WriteDescriptorSetAccelerationStructureNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> WriteDescriptorSetAccelerationStructureNVBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structures(
+ mut self,
+ acceleration_structures: &'a [AccelerationStructureNV],
+ ) -> Self {
+ self.inner.acceleration_structure_count = acceleration_structures.len() as _;
+ self.inner.p_acceleration_structures = acceleration_structures.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> WriteDescriptorSetAccelerationStructureNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMemoryRequirementsInfoNV.html>"]
+pub struct AccelerationStructureMemoryRequirementsInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: AccelerationStructureMemoryRequirementsTypeNV,
+ pub acceleration_structure: AccelerationStructureNV,
+}
+impl ::std::default::Default for AccelerationStructureMemoryRequirementsInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: AccelerationStructureMemoryRequirementsTypeNV::default(),
+ acceleration_structure: AccelerationStructureNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureMemoryRequirementsInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV;
+}
+impl AccelerationStructureMemoryRequirementsInfoNV {
+ pub fn builder<'a>() -> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
+ AccelerationStructureMemoryRequirementsInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
+ inner: AccelerationStructureMemoryRequirementsInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
+ type Target = AccelerationStructureMemoryRequirementsInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureMemoryRequirementsInfoNVBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: AccelerationStructureMemoryRequirementsTypeNV) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure(
+ mut self,
+ acceleration_structure: AccelerationStructureNV,
+ ) -> Self {
+ self.inner.acceleration_structure = acceleration_structure;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureMemoryRequirementsInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceAccelerationStructureFeaturesKHR.html>"]
+pub struct PhysicalDeviceAccelerationStructureFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub acceleration_structure: Bool32,
+ pub acceleration_structure_capture_replay: Bool32,
+ pub acceleration_structure_indirect_build: Bool32,
+ pub acceleration_structure_host_commands: Bool32,
+ pub descriptor_binding_acceleration_structure_update_after_bind: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceAccelerationStructureFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ acceleration_structure: Bool32::default(),
+ acceleration_structure_capture_replay: Bool32::default(),
+ acceleration_structure_indirect_build: Bool32::default(),
+ acceleration_structure_host_commands: Bool32::default(),
+ descriptor_binding_acceleration_structure_update_after_bind: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceAccelerationStructureFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR;
+}
+impl PhysicalDeviceAccelerationStructureFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'a> {
+ PhysicalDeviceAccelerationStructureFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceAccelerationStructureFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAccelerationStructureFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAccelerationStructureFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceAccelerationStructureFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceAccelerationStructureFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structure(mut self, acceleration_structure: bool) -> Self {
+ self.inner.acceleration_structure = acceleration_structure.into();
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure_capture_replay(
+ mut self,
+ acceleration_structure_capture_replay: bool,
+ ) -> Self {
+ self.inner.acceleration_structure_capture_replay =
+ acceleration_structure_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure_indirect_build(
+ mut self,
+ acceleration_structure_indirect_build: bool,
+ ) -> Self {
+ self.inner.acceleration_structure_indirect_build =
+ acceleration_structure_indirect_build.into();
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure_host_commands(
+ mut self,
+ acceleration_structure_host_commands: bool,
+ ) -> Self {
+ self.inner.acceleration_structure_host_commands =
+ acceleration_structure_host_commands.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_acceleration_structure_update_after_bind(
+ mut self,
+ descriptor_binding_acceleration_structure_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_acceleration_structure_update_after_bind =
+ descriptor_binding_acceleration_structure_update_after_bind.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceAccelerationStructureFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingPipelineFeaturesKHR.html>"]
+pub struct PhysicalDeviceRayTracingPipelineFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ray_tracing_pipeline: Bool32,
+ pub ray_tracing_pipeline_shader_group_handle_capture_replay: Bool32,
+ pub ray_tracing_pipeline_shader_group_handle_capture_replay_mixed: Bool32,
+ pub ray_tracing_pipeline_trace_rays_indirect: Bool32,
+ pub ray_traversal_primitive_culling: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingPipelineFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ray_tracing_pipeline: Bool32::default(),
+ ray_tracing_pipeline_shader_group_handle_capture_replay: Bool32::default(),
+ ray_tracing_pipeline_shader_group_handle_capture_replay_mixed: Bool32::default(),
+ ray_tracing_pipeline_trace_rays_indirect: Bool32::default(),
+ ray_traversal_primitive_culling: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingPipelineFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR;
+}
+impl PhysicalDeviceRayTracingPipelineFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'a> {
+ PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceRayTracingPipelineFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingPipelineFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingPipelineFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingPipelineFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingPipelineFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn ray_tracing_pipeline(mut self, ray_tracing_pipeline: bool) -> Self {
+ self.inner.ray_tracing_pipeline = ray_tracing_pipeline.into();
+ self
+ }
+ #[inline]
+ pub fn ray_tracing_pipeline_shader_group_handle_capture_replay(
+ mut self,
+ ray_tracing_pipeline_shader_group_handle_capture_replay: bool,
+ ) -> Self {
+ self.inner
+ .ray_tracing_pipeline_shader_group_handle_capture_replay =
+ ray_tracing_pipeline_shader_group_handle_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn ray_tracing_pipeline_shader_group_handle_capture_replay_mixed(
+ mut self,
+ ray_tracing_pipeline_shader_group_handle_capture_replay_mixed: bool,
+ ) -> Self {
+ self.inner
+ .ray_tracing_pipeline_shader_group_handle_capture_replay_mixed =
+ ray_tracing_pipeline_shader_group_handle_capture_replay_mixed.into();
+ self
+ }
+ #[inline]
+ pub fn ray_tracing_pipeline_trace_rays_indirect(
+ mut self,
+ ray_tracing_pipeline_trace_rays_indirect: bool,
+ ) -> Self {
+ self.inner.ray_tracing_pipeline_trace_rays_indirect =
+ ray_tracing_pipeline_trace_rays_indirect.into();
+ self
+ }
+ #[inline]
+ pub fn ray_traversal_primitive_culling(
+ mut self,
+ ray_traversal_primitive_culling: bool,
+ ) -> Self {
+ self.inner.ray_traversal_primitive_culling = ray_traversal_primitive_culling.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingPipelineFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayQueryFeaturesKHR.html>"]
+pub struct PhysicalDeviceRayQueryFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ray_query: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRayQueryFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ray_query: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayQueryFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR;
+}
+impl PhysicalDeviceRayQueryFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceRayQueryFeaturesKHRBuilder<'a> {
+ PhysicalDeviceRayQueryFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayQueryFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceRayQueryFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayQueryFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayQueryFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayQueryFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayQueryFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayQueryFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceRayQueryFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayQueryFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayQueryFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn ray_query(mut self, ray_query: bool) -> Self {
+ self.inner.ray_query = ray_query.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayQueryFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceAccelerationStructurePropertiesKHR.html>"]
+pub struct PhysicalDeviceAccelerationStructurePropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_geometry_count: u64,
+ pub max_instance_count: u64,
+ pub max_primitive_count: u64,
+ pub max_per_stage_descriptor_acceleration_structures: u32,
+ pub max_per_stage_descriptor_update_after_bind_acceleration_structures: u32,
+ pub max_descriptor_set_acceleration_structures: u32,
+ pub max_descriptor_set_update_after_bind_acceleration_structures: u32,
+ pub min_acceleration_structure_scratch_offset_alignment: u32,
+}
+impl ::std::default::Default for PhysicalDeviceAccelerationStructurePropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_geometry_count: u64::default(),
+ max_instance_count: u64::default(),
+ max_primitive_count: u64::default(),
+ max_per_stage_descriptor_acceleration_structures: u32::default(),
+ max_per_stage_descriptor_update_after_bind_acceleration_structures: u32::default(),
+ max_descriptor_set_acceleration_structures: u32::default(),
+ max_descriptor_set_update_after_bind_acceleration_structures: u32::default(),
+ min_acceleration_structure_scratch_offset_alignment: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceAccelerationStructurePropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR;
+}
+impl PhysicalDeviceAccelerationStructurePropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceAccelerationStructurePropertiesKHRBuilder<'a> {
+ PhysicalDeviceAccelerationStructurePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceAccelerationStructurePropertiesKHRBuilder<'a> {
+ inner: PhysicalDeviceAccelerationStructurePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceAccelerationStructurePropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceAccelerationStructurePropertiesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceAccelerationStructurePropertiesKHRBuilder<'a> {
+ type Target = PhysicalDeviceAccelerationStructurePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceAccelerationStructurePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceAccelerationStructurePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn max_geometry_count(mut self, max_geometry_count: u64) -> Self {
+ self.inner.max_geometry_count = max_geometry_count;
+ self
+ }
+ #[inline]
+ pub fn max_instance_count(mut self, max_instance_count: u64) -> Self {
+ self.inner.max_instance_count = max_instance_count;
+ self
+ }
+ #[inline]
+ pub fn max_primitive_count(mut self, max_primitive_count: u64) -> Self {
+ self.inner.max_primitive_count = max_primitive_count;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_acceleration_structures(
+ mut self,
+ max_per_stage_descriptor_acceleration_structures: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_acceleration_structures =
+ max_per_stage_descriptor_acceleration_structures;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_acceleration_structures(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_acceleration_structures: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_acceleration_structures =
+ max_per_stage_descriptor_update_after_bind_acceleration_structures;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_acceleration_structures(
+ mut self,
+ max_descriptor_set_acceleration_structures: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_acceleration_structures =
+ max_descriptor_set_acceleration_structures;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_acceleration_structures(
+ mut self,
+ max_descriptor_set_update_after_bind_acceleration_structures: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_acceleration_structures =
+ max_descriptor_set_update_after_bind_acceleration_structures;
+ self
+ }
+ #[inline]
+ pub fn min_acceleration_structure_scratch_offset_alignment(
+ mut self,
+ min_acceleration_structure_scratch_offset_alignment: u32,
+ ) -> Self {
+ self.inner
+ .min_acceleration_structure_scratch_offset_alignment =
+ min_acceleration_structure_scratch_offset_alignment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceAccelerationStructurePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingPipelinePropertiesKHR.html>"]
+pub struct PhysicalDeviceRayTracingPipelinePropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_group_handle_size: u32,
+ pub max_ray_recursion_depth: u32,
+ pub max_shader_group_stride: u32,
+ pub shader_group_base_alignment: u32,
+ pub shader_group_handle_capture_replay_size: u32,
+ pub max_ray_dispatch_invocation_count: u32,
+ pub shader_group_handle_alignment: u32,
+ pub max_ray_hit_attribute_size: u32,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingPipelinePropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_group_handle_size: u32::default(),
+ max_ray_recursion_depth: u32::default(),
+ max_shader_group_stride: u32::default(),
+ shader_group_base_alignment: u32::default(),
+ shader_group_handle_capture_replay_size: u32::default(),
+ max_ray_dispatch_invocation_count: u32::default(),
+ shader_group_handle_alignment: u32::default(),
+ max_ray_hit_attribute_size: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingPipelinePropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR;
+}
+impl PhysicalDeviceRayTracingPipelinePropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder<'a> {
+ PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder<'a> {
+ inner: PhysicalDeviceRayTracingPipelinePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPipelinePropertiesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingPipelinePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingPipelinePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn shader_group_handle_size(mut self, shader_group_handle_size: u32) -> Self {
+ self.inner.shader_group_handle_size = shader_group_handle_size;
+ self
+ }
+ #[inline]
+ pub fn max_ray_recursion_depth(mut self, max_ray_recursion_depth: u32) -> Self {
+ self.inner.max_ray_recursion_depth = max_ray_recursion_depth;
+ self
+ }
+ #[inline]
+ pub fn max_shader_group_stride(mut self, max_shader_group_stride: u32) -> Self {
+ self.inner.max_shader_group_stride = max_shader_group_stride;
+ self
+ }
+ #[inline]
+ pub fn shader_group_base_alignment(mut self, shader_group_base_alignment: u32) -> Self {
+ self.inner.shader_group_base_alignment = shader_group_base_alignment;
+ self
+ }
+ #[inline]
+ pub fn shader_group_handle_capture_replay_size(
+ mut self,
+ shader_group_handle_capture_replay_size: u32,
+ ) -> Self {
+ self.inner.shader_group_handle_capture_replay_size =
+ shader_group_handle_capture_replay_size;
+ self
+ }
+ #[inline]
+ pub fn max_ray_dispatch_invocation_count(
+ mut self,
+ max_ray_dispatch_invocation_count: u32,
+ ) -> Self {
+ self.inner.max_ray_dispatch_invocation_count = max_ray_dispatch_invocation_count;
+ self
+ }
+ #[inline]
+ pub fn shader_group_handle_alignment(mut self, shader_group_handle_alignment: u32) -> Self {
+ self.inner.shader_group_handle_alignment = shader_group_handle_alignment;
+ self
+ }
+ #[inline]
+ pub fn max_ray_hit_attribute_size(mut self, max_ray_hit_attribute_size: u32) -> Self {
+ self.inner.max_ray_hit_attribute_size = max_ray_hit_attribute_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingPipelinePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingPropertiesNV.html>"]
+pub struct PhysicalDeviceRayTracingPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_group_handle_size: u32,
+ pub max_recursion_depth: u32,
+ pub max_shader_group_stride: u32,
+ pub shader_group_base_alignment: u32,
+ pub max_geometry_count: u64,
+ pub max_instance_count: u64,
+ pub max_triangle_count: u64,
+ pub max_descriptor_set_acceleration_structures: u32,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_group_handle_size: u32::default(),
+ max_recursion_depth: u32::default(),
+ max_shader_group_stride: u32::default(),
+ shader_group_base_alignment: u32::default(),
+ max_geometry_count: u64::default(),
+ max_instance_count: u64::default(),
+ max_triangle_count: u64::default(),
+ max_descriptor_set_acceleration_structures: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingPropertiesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV;
+}
+impl PhysicalDeviceRayTracingPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
+ PhysicalDeviceRayTracingPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceRayTracingPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPropertiesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRayTracingPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn shader_group_handle_size(mut self, shader_group_handle_size: u32) -> Self {
+ self.inner.shader_group_handle_size = shader_group_handle_size;
+ self
+ }
+ #[inline]
+ pub fn max_recursion_depth(mut self, max_recursion_depth: u32) -> Self {
+ self.inner.max_recursion_depth = max_recursion_depth;
+ self
+ }
+ #[inline]
+ pub fn max_shader_group_stride(mut self, max_shader_group_stride: u32) -> Self {
+ self.inner.max_shader_group_stride = max_shader_group_stride;
+ self
+ }
+ #[inline]
+ pub fn shader_group_base_alignment(mut self, shader_group_base_alignment: u32) -> Self {
+ self.inner.shader_group_base_alignment = shader_group_base_alignment;
+ self
+ }
+ #[inline]
+ pub fn max_geometry_count(mut self, max_geometry_count: u64) -> Self {
+ self.inner.max_geometry_count = max_geometry_count;
+ self
+ }
+ #[inline]
+ pub fn max_instance_count(mut self, max_instance_count: u64) -> Self {
+ self.inner.max_instance_count = max_instance_count;
+ self
+ }
+ #[inline]
+ pub fn max_triangle_count(mut self, max_triangle_count: u64) -> Self {
+ self.inner.max_triangle_count = max_triangle_count;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_acceleration_structures(
+ mut self,
+ max_descriptor_set_acceleration_structures: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_acceleration_structures =
+ max_descriptor_set_acceleration_structures;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStridedDeviceAddressRegionKHR.html>"]
+pub struct StridedDeviceAddressRegionKHR {
+ pub device_address: DeviceAddress,
+ pub stride: DeviceSize,
+ pub size: DeviceSize,
+}
+impl StridedDeviceAddressRegionKHR {
+ pub fn builder<'a>() -> StridedDeviceAddressRegionKHRBuilder<'a> {
+ StridedDeviceAddressRegionKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct StridedDeviceAddressRegionKHRBuilder<'a> {
+ inner: StridedDeviceAddressRegionKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for StridedDeviceAddressRegionKHRBuilder<'a> {
+ type Target = StridedDeviceAddressRegionKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for StridedDeviceAddressRegionKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> StridedDeviceAddressRegionKHRBuilder<'a> {
+ #[inline]
+ pub fn device_address(mut self, device_address: DeviceAddress) -> Self {
+ self.inner.device_address = device_address;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: DeviceSize) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> StridedDeviceAddressRegionKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTraceRaysIndirectCommandKHR.html>"]
+pub struct TraceRaysIndirectCommandKHR {
+ pub width: u32,
+ pub height: u32,
+ pub depth: u32,
+}
+impl TraceRaysIndirectCommandKHR {
+ pub fn builder<'a>() -> TraceRaysIndirectCommandKHRBuilder<'a> {
+ TraceRaysIndirectCommandKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct TraceRaysIndirectCommandKHRBuilder<'a> {
+ inner: TraceRaysIndirectCommandKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for TraceRaysIndirectCommandKHRBuilder<'a> {
+ type Target = TraceRaysIndirectCommandKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for TraceRaysIndirectCommandKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> TraceRaysIndirectCommandKHRBuilder<'a> {
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn depth(mut self, depth: u32) -> Self {
+ self.inner.depth = depth;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> TraceRaysIndirectCommandKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTraceRaysIndirectCommand2KHR.html>"]
+pub struct TraceRaysIndirectCommand2KHR {
+ pub raygen_shader_record_address: DeviceAddress,
+ pub raygen_shader_record_size: DeviceSize,
+ pub miss_shader_binding_table_address: DeviceAddress,
+ pub miss_shader_binding_table_size: DeviceSize,
+ pub miss_shader_binding_table_stride: DeviceSize,
+ pub hit_shader_binding_table_address: DeviceAddress,
+ pub hit_shader_binding_table_size: DeviceSize,
+ pub hit_shader_binding_table_stride: DeviceSize,
+ pub callable_shader_binding_table_address: DeviceAddress,
+ pub callable_shader_binding_table_size: DeviceSize,
+ pub callable_shader_binding_table_stride: DeviceSize,
+ pub width: u32,
+ pub height: u32,
+ pub depth: u32,
+}
+impl TraceRaysIndirectCommand2KHR {
+ pub fn builder<'a>() -> TraceRaysIndirectCommand2KHRBuilder<'a> {
+ TraceRaysIndirectCommand2KHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct TraceRaysIndirectCommand2KHRBuilder<'a> {
+ inner: TraceRaysIndirectCommand2KHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for TraceRaysIndirectCommand2KHRBuilder<'a> {
+ type Target = TraceRaysIndirectCommand2KHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for TraceRaysIndirectCommand2KHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> TraceRaysIndirectCommand2KHRBuilder<'a> {
+ #[inline]
+ pub fn raygen_shader_record_address(
+ mut self,
+ raygen_shader_record_address: DeviceAddress,
+ ) -> Self {
+ self.inner.raygen_shader_record_address = raygen_shader_record_address;
+ self
+ }
+ #[inline]
+ pub fn raygen_shader_record_size(mut self, raygen_shader_record_size: DeviceSize) -> Self {
+ self.inner.raygen_shader_record_size = raygen_shader_record_size;
+ self
+ }
+ #[inline]
+ pub fn miss_shader_binding_table_address(
+ mut self,
+ miss_shader_binding_table_address: DeviceAddress,
+ ) -> Self {
+ self.inner.miss_shader_binding_table_address = miss_shader_binding_table_address;
+ self
+ }
+ #[inline]
+ pub fn miss_shader_binding_table_size(
+ mut self,
+ miss_shader_binding_table_size: DeviceSize,
+ ) -> Self {
+ self.inner.miss_shader_binding_table_size = miss_shader_binding_table_size;
+ self
+ }
+ #[inline]
+ pub fn miss_shader_binding_table_stride(
+ mut self,
+ miss_shader_binding_table_stride: DeviceSize,
+ ) -> Self {
+ self.inner.miss_shader_binding_table_stride = miss_shader_binding_table_stride;
+ self
+ }
+ #[inline]
+ pub fn hit_shader_binding_table_address(
+ mut self,
+ hit_shader_binding_table_address: DeviceAddress,
+ ) -> Self {
+ self.inner.hit_shader_binding_table_address = hit_shader_binding_table_address;
+ self
+ }
+ #[inline]
+ pub fn hit_shader_binding_table_size(
+ mut self,
+ hit_shader_binding_table_size: DeviceSize,
+ ) -> Self {
+ self.inner.hit_shader_binding_table_size = hit_shader_binding_table_size;
+ self
+ }
+ #[inline]
+ pub fn hit_shader_binding_table_stride(
+ mut self,
+ hit_shader_binding_table_stride: DeviceSize,
+ ) -> Self {
+ self.inner.hit_shader_binding_table_stride = hit_shader_binding_table_stride;
+ self
+ }
+ #[inline]
+ pub fn callable_shader_binding_table_address(
+ mut self,
+ callable_shader_binding_table_address: DeviceAddress,
+ ) -> Self {
+ self.inner.callable_shader_binding_table_address = callable_shader_binding_table_address;
+ self
+ }
+ #[inline]
+ pub fn callable_shader_binding_table_size(
+ mut self,
+ callable_shader_binding_table_size: DeviceSize,
+ ) -> Self {
+ self.inner.callable_shader_binding_table_size = callable_shader_binding_table_size;
+ self
+ }
+ #[inline]
+ pub fn callable_shader_binding_table_stride(
+ mut self,
+ callable_shader_binding_table_stride: DeviceSize,
+ ) -> Self {
+ self.inner.callable_shader_binding_table_stride = callable_shader_binding_table_stride;
+ self
+ }
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn depth(mut self, depth: u32) -> Self {
+ self.inner.depth = depth;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> TraceRaysIndirectCommand2KHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR.html>"]
+pub struct PhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ray_tracing_maintenance1: Bool32,
+ pub ray_tracing_pipeline_trace_rays_indirect2: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ray_tracing_maintenance1: Bool32::default(),
+ ray_tracing_pipeline_trace_rays_indirect2: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR;
+}
+impl PhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'a> {
+ PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceRayTracingMaintenance1FeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingMaintenance1FeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingMaintenance1FeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingMaintenance1FeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn ray_tracing_maintenance1(mut self, ray_tracing_maintenance1: bool) -> Self {
+ self.inner.ray_tracing_maintenance1 = ray_tracing_maintenance1.into();
+ self
+ }
+ #[inline]
+ pub fn ray_tracing_pipeline_trace_rays_indirect2(
+ mut self,
+ ray_tracing_pipeline_trace_rays_indirect2: bool,
+ ) -> Self {
+ self.inner.ray_tracing_pipeline_trace_rays_indirect2 =
+ ray_tracing_pipeline_trace_rays_indirect2.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrmFormatModifierPropertiesListEXT.html>"]
+pub struct DrmFormatModifierPropertiesListEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub drm_format_modifier_count: u32,
+ pub p_drm_format_modifier_properties: *mut DrmFormatModifierPropertiesEXT,
+}
+impl ::std::default::Default for DrmFormatModifierPropertiesListEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ drm_format_modifier_count: u32::default(),
+ p_drm_format_modifier_properties: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DrmFormatModifierPropertiesListEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT;
+}
+impl DrmFormatModifierPropertiesListEXT {
+ pub fn builder<'a>() -> DrmFormatModifierPropertiesListEXTBuilder<'a> {
+ DrmFormatModifierPropertiesListEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrmFormatModifierPropertiesListEXTBuilder<'a> {
+ inner: DrmFormatModifierPropertiesListEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesListEXTBuilder<'_> {}
+unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesListEXT {}
+impl<'a> ::std::ops::Deref for DrmFormatModifierPropertiesListEXTBuilder<'a> {
+ type Target = DrmFormatModifierPropertiesListEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrmFormatModifierPropertiesListEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrmFormatModifierPropertiesListEXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier_properties(
+ mut self,
+ drm_format_modifier_properties: &'a mut [DrmFormatModifierPropertiesEXT],
+ ) -> Self {
+ self.inner.drm_format_modifier_count = drm_format_modifier_properties.len() as _;
+ self.inner.p_drm_format_modifier_properties = drm_format_modifier_properties.as_mut_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrmFormatModifierPropertiesListEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrmFormatModifierPropertiesEXT.html>"]
+pub struct DrmFormatModifierPropertiesEXT {
+ pub drm_format_modifier: u64,
+ pub drm_format_modifier_plane_count: u32,
+ pub drm_format_modifier_tiling_features: FormatFeatureFlags,
+}
+impl DrmFormatModifierPropertiesEXT {
+ pub fn builder<'a>() -> DrmFormatModifierPropertiesEXTBuilder<'a> {
+ DrmFormatModifierPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrmFormatModifierPropertiesEXTBuilder<'a> {
+ inner: DrmFormatModifierPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DrmFormatModifierPropertiesEXTBuilder<'a> {
+ type Target = DrmFormatModifierPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrmFormatModifierPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrmFormatModifierPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self {
+ self.inner.drm_format_modifier = drm_format_modifier;
+ self
+ }
+ #[inline]
+ pub fn drm_format_modifier_plane_count(mut self, drm_format_modifier_plane_count: u32) -> Self {
+ self.inner.drm_format_modifier_plane_count = drm_format_modifier_plane_count;
+ self
+ }
+ #[inline]
+ pub fn drm_format_modifier_tiling_features(
+ mut self,
+ drm_format_modifier_tiling_features: FormatFeatureFlags,
+ ) -> Self {
+ self.inner.drm_format_modifier_tiling_features = drm_format_modifier_tiling_features;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrmFormatModifierPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageDrmFormatModifierInfoEXT.html>"]
+pub struct PhysicalDeviceImageDrmFormatModifierInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub drm_format_modifier: u64,
+ pub sharing_mode: SharingMode,
+ pub queue_family_index_count: u32,
+ pub p_queue_family_indices: *const u32,
+}
+impl ::std::default::Default for PhysicalDeviceImageDrmFormatModifierInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ drm_format_modifier: u64::default(),
+ sharing_mode: SharingMode::default(),
+ queue_family_index_count: u32::default(),
+ p_queue_family_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageDrmFormatModifierInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT;
+}
+impl PhysicalDeviceImageDrmFormatModifierInfoEXT {
+ pub fn builder<'a>() -> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
+ PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
+ inner: PhysicalDeviceImageDrmFormatModifierInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2
+ for PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceImageDrmFormatModifierInfoEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
+ type Target = PhysicalDeviceImageDrmFormatModifierInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageDrmFormatModifierInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self {
+ self.inner.drm_format_modifier = drm_format_modifier;
+ self
+ }
+ #[inline]
+ pub fn sharing_mode(mut self, sharing_mode: SharingMode) -> Self {
+ self.inner.sharing_mode = sharing_mode;
+ self
+ }
+ #[inline]
+ pub fn queue_family_indices(mut self, queue_family_indices: &'a [u32]) -> Self {
+ self.inner.queue_family_index_count = queue_family_indices.len() as _;
+ self.inner.p_queue_family_indices = queue_family_indices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageDrmFormatModifierInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageDrmFormatModifierListCreateInfoEXT.html>"]
+pub struct ImageDrmFormatModifierListCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub drm_format_modifier_count: u32,
+ pub p_drm_format_modifiers: *const u64,
+}
+impl ::std::default::Default for ImageDrmFormatModifierListCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ drm_format_modifier_count: u32::default(),
+ p_drm_format_modifiers: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageDrmFormatModifierListCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT;
+}
+impl ImageDrmFormatModifierListCreateInfoEXT {
+ pub fn builder<'a>() -> ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
+ ImageDrmFormatModifierListCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
+ inner: ImageDrmFormatModifierListCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierListCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierListCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
+ type Target = ImageDrmFormatModifierListCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageDrmFormatModifierListCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifiers(mut self, drm_format_modifiers: &'a [u64]) -> Self {
+ self.inner.drm_format_modifier_count = drm_format_modifiers.len() as _;
+ self.inner.p_drm_format_modifiers = drm_format_modifiers.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageDrmFormatModifierListCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageDrmFormatModifierExplicitCreateInfoEXT.html>"]
+pub struct ImageDrmFormatModifierExplicitCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub drm_format_modifier: u64,
+ pub drm_format_modifier_plane_count: u32,
+ pub p_plane_layouts: *const SubresourceLayout,
+}
+impl ::std::default::Default for ImageDrmFormatModifierExplicitCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ drm_format_modifier: u64::default(),
+ drm_format_modifier_plane_count: u32::default(),
+ p_plane_layouts: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageDrmFormatModifierExplicitCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT;
+}
+impl ImageDrmFormatModifierExplicitCreateInfoEXT {
+ pub fn builder<'a>() -> ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
+ ImageDrmFormatModifierExplicitCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
+ inner: ImageDrmFormatModifierExplicitCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImageDrmFormatModifierExplicitCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
+ type Target = ImageDrmFormatModifierExplicitCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageDrmFormatModifierExplicitCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self {
+ self.inner.drm_format_modifier = drm_format_modifier;
+ self
+ }
+ #[inline]
+ pub fn plane_layouts(mut self, plane_layouts: &'a [SubresourceLayout]) -> Self {
+ self.inner.drm_format_modifier_plane_count = plane_layouts.len() as _;
+ self.inner.p_plane_layouts = plane_layouts.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageDrmFormatModifierExplicitCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageDrmFormatModifierPropertiesEXT.html>"]
+pub struct ImageDrmFormatModifierPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub drm_format_modifier: u64,
+}
+impl ::std::default::Default for ImageDrmFormatModifierPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ drm_format_modifier: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageDrmFormatModifierPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT;
+}
+impl ImageDrmFormatModifierPropertiesEXT {
+ pub fn builder<'a>() -> ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
+ ImageDrmFormatModifierPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
+ inner: ImageDrmFormatModifierPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
+ type Target = ImageDrmFormatModifierPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageDrmFormatModifierPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self {
+ self.inner.drm_format_modifier = drm_format_modifier;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageDrmFormatModifierPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageStencilUsageCreateInfo.html>"]
+pub struct ImageStencilUsageCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub stencil_usage: ImageUsageFlags,
+}
+impl ::std::default::Default for ImageStencilUsageCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ stencil_usage: ImageUsageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageStencilUsageCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_STENCIL_USAGE_CREATE_INFO;
+}
+impl ImageStencilUsageCreateInfo {
+ pub fn builder<'a>() -> ImageStencilUsageCreateInfoBuilder<'a> {
+ ImageStencilUsageCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageStencilUsageCreateInfoBuilder<'a> {
+ inner: ImageStencilUsageCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImageStencilUsageCreateInfoBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImageStencilUsageCreateInfo {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageStencilUsageCreateInfoBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageStencilUsageCreateInfo {}
+impl<'a> ::std::ops::Deref for ImageStencilUsageCreateInfoBuilder<'a> {
+ type Target = ImageStencilUsageCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageStencilUsageCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageStencilUsageCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn stencil_usage(mut self, stencil_usage: ImageUsageFlags) -> Self {
+ self.inner.stencil_usage = stencil_usage;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageStencilUsageCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceMemoryOverallocationCreateInfoAMD.html>"]
+pub struct DeviceMemoryOverallocationCreateInfoAMD {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub overallocation_behavior: MemoryOverallocationBehaviorAMD,
+}
+impl ::std::default::Default for DeviceMemoryOverallocationCreateInfoAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ overallocation_behavior: MemoryOverallocationBehaviorAMD::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceMemoryOverallocationCreateInfoAMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD;
+}
+impl DeviceMemoryOverallocationCreateInfoAMD {
+ pub fn builder<'a>() -> DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
+ DeviceMemoryOverallocationCreateInfoAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
+ inner: DeviceMemoryOverallocationCreateInfoAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceCreateInfo for DeviceMemoryOverallocationCreateInfoAMDBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for DeviceMemoryOverallocationCreateInfoAMD {}
+impl<'a> ::std::ops::Deref for DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
+ type Target = DeviceMemoryOverallocationCreateInfoAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceMemoryOverallocationCreateInfoAMDBuilder<'a> {
+ #[inline]
+ pub fn overallocation_behavior(
+ mut self,
+ overallocation_behavior: MemoryOverallocationBehaviorAMD,
+ ) -> Self {
+ self.inner.overallocation_behavior = overallocation_behavior;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceMemoryOverallocationCreateInfoAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentDensityMapFeaturesEXT.html>"]
+pub struct PhysicalDeviceFragmentDensityMapFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_density_map: Bool32,
+ pub fragment_density_map_dynamic: Bool32,
+ pub fragment_density_map_non_subsampled_images: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentDensityMapFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_density_map: Bool32::default(),
+ fragment_density_map_dynamic: Bool32::default(),
+ fragment_density_map_non_subsampled_images: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentDensityMapFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT;
+}
+impl PhysicalDeviceFragmentDensityMapFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
+ PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceFragmentDensityMapFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentDensityMapFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceFragmentDensityMapFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentDensityMapFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn fragment_density_map(mut self, fragment_density_map: bool) -> Self {
+ self.inner.fragment_density_map = fragment_density_map.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_density_map_dynamic(mut self, fragment_density_map_dynamic: bool) -> Self {
+ self.inner.fragment_density_map_dynamic = fragment_density_map_dynamic.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_density_map_non_subsampled_images(
+ mut self,
+ fragment_density_map_non_subsampled_images: bool,
+ ) -> Self {
+ self.inner.fragment_density_map_non_subsampled_images =
+ fragment_density_map_non_subsampled_images.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentDensityMapFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentDensityMap2FeaturesEXT.html>"]
+pub struct PhysicalDeviceFragmentDensityMap2FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_density_map_deferred: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentDensityMap2FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_density_map_deferred: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentDensityMap2FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT;
+}
+impl PhysicalDeviceFragmentDensityMap2FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'a> {
+ PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceFragmentDensityMap2FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentDensityMap2FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMap2FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentDensityMap2FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn fragment_density_map_deferred(mut self, fragment_density_map_deferred: bool) -> Self {
+ self.inner.fragment_density_map_deferred = fragment_density_map_deferred.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentDensityMap2FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM.html>"]
+pub struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_density_map_offset: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_density_map_offset: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM;
+}
+impl PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'a> {
+ PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'a> {
+ inner: PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'a> {
+ type Target = PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOMBuilder<'a> {
+ #[inline]
+ pub fn fragment_density_map_offset(mut self, fragment_density_map_offset: bool) -> Self {
+ self.inner.fragment_density_map_offset = fragment_density_map_offset.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentDensityMapPropertiesEXT.html>"]
+pub struct PhysicalDeviceFragmentDensityMapPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_fragment_density_texel_size: Extent2D,
+ pub max_fragment_density_texel_size: Extent2D,
+ pub fragment_density_invocations: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentDensityMapPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_fragment_density_texel_size: Extent2D::default(),
+ max_fragment_density_texel_size: Extent2D::default(),
+ fragment_density_invocations: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentDensityMapPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT;
+}
+impl PhysicalDeviceFragmentDensityMapPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
+ PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceFragmentDensityMapPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentDensityMapPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceFragmentDensityMapPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentDensityMapPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn min_fragment_density_texel_size(
+ mut self,
+ min_fragment_density_texel_size: Extent2D,
+ ) -> Self {
+ self.inner.min_fragment_density_texel_size = min_fragment_density_texel_size;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_density_texel_size(
+ mut self,
+ max_fragment_density_texel_size: Extent2D,
+ ) -> Self {
+ self.inner.max_fragment_density_texel_size = max_fragment_density_texel_size;
+ self
+ }
+ #[inline]
+ pub fn fragment_density_invocations(mut self, fragment_density_invocations: bool) -> Self {
+ self.inner.fragment_density_invocations = fragment_density_invocations.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentDensityMapPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentDensityMap2PropertiesEXT.html>"]
+pub struct PhysicalDeviceFragmentDensityMap2PropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub subsampled_loads: Bool32,
+ pub subsampled_coarse_reconstruction_early_access: Bool32,
+ pub max_subsampled_array_layers: u32,
+ pub max_descriptor_set_subsampled_samplers: u32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentDensityMap2PropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ subsampled_loads: Bool32::default(),
+ subsampled_coarse_reconstruction_early_access: Bool32::default(),
+ max_subsampled_array_layers: u32::default(),
+ max_descriptor_set_subsampled_samplers: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentDensityMap2PropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT;
+}
+impl PhysicalDeviceFragmentDensityMap2PropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder<'a> {
+ PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceFragmentDensityMap2PropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentDensityMap2PropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentDensityMap2PropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn subsampled_loads(mut self, subsampled_loads: bool) -> Self {
+ self.inner.subsampled_loads = subsampled_loads.into();
+ self
+ }
+ #[inline]
+ pub fn subsampled_coarse_reconstruction_early_access(
+ mut self,
+ subsampled_coarse_reconstruction_early_access: bool,
+ ) -> Self {
+ self.inner.subsampled_coarse_reconstruction_early_access =
+ subsampled_coarse_reconstruction_early_access.into();
+ self
+ }
+ #[inline]
+ pub fn max_subsampled_array_layers(mut self, max_subsampled_array_layers: u32) -> Self {
+ self.inner.max_subsampled_array_layers = max_subsampled_array_layers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_subsampled_samplers(
+ mut self,
+ max_descriptor_set_subsampled_samplers: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_subsampled_samplers = max_descriptor_set_subsampled_samplers;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentDensityMap2PropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM.html>"]
+pub struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_density_offset_granularity: Extent2D,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_density_offset_granularity: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM;
+}
+impl PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder<'a> {
+ PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder<'a> {
+ inner: PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder<'a> {
+ type Target = PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOMBuilder<'a> {
+ #[inline]
+ pub fn fragment_density_offset_granularity(
+ mut self,
+ fragment_density_offset_granularity: Extent2D,
+ ) -> Self {
+ self.inner.fragment_density_offset_granularity = fragment_density_offset_granularity;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassFragmentDensityMapCreateInfoEXT.html>"]
+pub struct RenderPassFragmentDensityMapCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fragment_density_map_attachment: AttachmentReference,
+}
+impl ::std::default::Default for RenderPassFragmentDensityMapCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fragment_density_map_attachment: AttachmentReference::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassFragmentDensityMapCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT;
+}
+impl RenderPassFragmentDensityMapCreateInfoEXT {
+ pub fn builder<'a>() -> RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
+ RenderPassFragmentDensityMapCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
+ inner: RenderPassFragmentDensityMapCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassCreateInfo for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRenderPassCreateInfo for RenderPassFragmentDensityMapCreateInfoEXT {}
+unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassFragmentDensityMapCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
+ type Target = RenderPassFragmentDensityMapCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassFragmentDensityMapCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn fragment_density_map_attachment(
+ mut self,
+ fragment_density_map_attachment: AttachmentReference,
+ ) -> Self {
+ self.inner.fragment_density_map_attachment = fragment_density_map_attachment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassFragmentDensityMapCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassFragmentDensityMapOffsetEndInfoQCOM.html>"]
+pub struct SubpassFragmentDensityMapOffsetEndInfoQCOM {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fragment_density_offset_count: u32,
+ pub p_fragment_density_offsets: *const Offset2D,
+}
+impl ::std::default::Default for SubpassFragmentDensityMapOffsetEndInfoQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fragment_density_offset_count: u32::default(),
+ p_fragment_density_offsets: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassFragmentDensityMapOffsetEndInfoQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM;
+}
+impl SubpassFragmentDensityMapOffsetEndInfoQCOM {
+ pub fn builder<'a>() -> SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder<'a> {
+ SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder<'a> {
+ inner: SubpassFragmentDensityMapOffsetEndInfoQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubpassEndInfo for SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder<'_> {}
+unsafe impl ExtendsSubpassEndInfo for SubpassFragmentDensityMapOffsetEndInfoQCOM {}
+impl<'a> ::std::ops::Deref for SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder<'a> {
+ type Target = SubpassFragmentDensityMapOffsetEndInfoQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassFragmentDensityMapOffsetEndInfoQCOMBuilder<'a> {
+ #[inline]
+ pub fn fragment_density_offsets(mut self, fragment_density_offsets: &'a [Offset2D]) -> Self {
+ self.inner.fragment_density_offset_count = fragment_density_offsets.len() as _;
+ self.inner.p_fragment_density_offsets = fragment_density_offsets.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassFragmentDensityMapOffsetEndInfoQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceScalarBlockLayoutFeatures.html>"]
+pub struct PhysicalDeviceScalarBlockLayoutFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub scalar_block_layout: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceScalarBlockLayoutFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ scalar_block_layout: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceScalarBlockLayoutFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES;
+}
+impl PhysicalDeviceScalarBlockLayoutFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'a> {
+ PhysicalDeviceScalarBlockLayoutFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'a> {
+ inner: PhysicalDeviceScalarBlockLayoutFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceScalarBlockLayoutFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceScalarBlockLayoutFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceScalarBlockLayoutFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceScalarBlockLayoutFeaturesBuilder<'a> {
+ #[inline]
+ pub fn scalar_block_layout(mut self, scalar_block_layout: bool) -> Self {
+ self.inner.scalar_block_layout = scalar_block_layout.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceScalarBlockLayoutFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceProtectedCapabilitiesKHR.html>"]
+pub struct SurfaceProtectedCapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub supports_protected: Bool32,
+}
+impl ::std::default::Default for SurfaceProtectedCapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ supports_protected: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceProtectedCapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PROTECTED_CAPABILITIES_KHR;
+}
+impl SurfaceProtectedCapabilitiesKHR {
+ pub fn builder<'a>() -> SurfaceProtectedCapabilitiesKHRBuilder<'a> {
+ SurfaceProtectedCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceProtectedCapabilitiesKHRBuilder<'a> {
+ inner: SurfaceProtectedCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceProtectedCapabilitiesKHRBuilder<'_> {}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceProtectedCapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for SurfaceProtectedCapabilitiesKHRBuilder<'a> {
+ type Target = SurfaceProtectedCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceProtectedCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceProtectedCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn supports_protected(mut self, supports_protected: bool) -> Self {
+ self.inner.supports_protected = supports_protected.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceProtectedCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceUniformBufferStandardLayoutFeatures.html>"]
+pub struct PhysicalDeviceUniformBufferStandardLayoutFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub uniform_buffer_standard_layout: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceUniformBufferStandardLayoutFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ uniform_buffer_standard_layout: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceUniformBufferStandardLayoutFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES;
+}
+impl PhysicalDeviceUniformBufferStandardLayoutFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'a> {
+ PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'a> {
+ inner: PhysicalDeviceUniformBufferStandardLayoutFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceUniformBufferStandardLayoutFeatures {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceUniformBufferStandardLayoutFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceUniformBufferStandardLayoutFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceUniformBufferStandardLayoutFeaturesBuilder<'a> {
+ #[inline]
+ pub fn uniform_buffer_standard_layout(mut self, uniform_buffer_standard_layout: bool) -> Self {
+ self.inner.uniform_buffer_standard_layout = uniform_buffer_standard_layout.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceUniformBufferStandardLayoutFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDepthClipEnableFeaturesEXT.html>"]
+pub struct PhysicalDeviceDepthClipEnableFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub depth_clip_enable: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDepthClipEnableFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ depth_clip_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDepthClipEnableFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT;
+}
+impl PhysicalDeviceDepthClipEnableFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'a> {
+ PhysicalDeviceDepthClipEnableFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceDepthClipEnableFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClipEnableFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClipEnableFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDepthClipEnableFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDepthClipEnableFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn depth_clip_enable(mut self, depth_clip_enable: bool) -> Self {
+ self.inner.depth_clip_enable = depth_clip_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDepthClipEnableFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationDepthClipStateCreateInfoEXT.html>"]
+pub struct PipelineRasterizationDepthClipStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineRasterizationDepthClipStateCreateFlagsEXT,
+ pub depth_clip_enable: Bool32,
+}
+impl ::std::default::Default for PipelineRasterizationDepthClipStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineRasterizationDepthClipStateCreateFlagsEXT::default(),
+ depth_clip_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationDepthClipStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT;
+}
+impl PipelineRasterizationDepthClipStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineRasterizationDepthClipStateCreateInfoEXTBuilder<'a> {
+ PipelineRasterizationDepthClipStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationDepthClipStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineRasterizationDepthClipStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationDepthClipStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationDepthClipStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRasterizationDepthClipStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineRasterizationDepthClipStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRasterizationDepthClipStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationDepthClipStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineRasterizationDepthClipStateCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn depth_clip_enable(mut self, depth_clip_enable: bool) -> Self {
+ self.inner.depth_clip_enable = depth_clip_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationDepthClipStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryBudgetPropertiesEXT.html>"]
+pub struct PhysicalDeviceMemoryBudgetPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub heap_budget: [DeviceSize; MAX_MEMORY_HEAPS],
+ pub heap_usage: [DeviceSize; MAX_MEMORY_HEAPS],
+}
+impl ::std::default::Default for PhysicalDeviceMemoryBudgetPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ heap_budget: unsafe { ::std::mem::zeroed() },
+ heap_usage: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMemoryBudgetPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT;
+}
+impl PhysicalDeviceMemoryBudgetPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
+ PhysicalDeviceMemoryBudgetPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceMemoryBudgetPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceMemoryProperties2
+ for PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceMemoryProperties2 for PhysicalDeviceMemoryBudgetPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMemoryBudgetPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMemoryBudgetPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn heap_budget(mut self, heap_budget: [DeviceSize; MAX_MEMORY_HEAPS]) -> Self {
+ self.inner.heap_budget = heap_budget;
+ self
+ }
+ #[inline]
+ pub fn heap_usage(mut self, heap_usage: [DeviceSize; MAX_MEMORY_HEAPS]) -> Self {
+ self.inner.heap_usage = heap_usage;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMemoryBudgetPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryPriorityFeaturesEXT.html>"]
+pub struct PhysicalDeviceMemoryPriorityFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_priority: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMemoryPriorityFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_priority: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMemoryPriorityFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT;
+}
+impl PhysicalDeviceMemoryPriorityFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
+ PhysicalDeviceMemoryPriorityFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceMemoryPriorityFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMemoryPriorityFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMemoryPriorityFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMemoryPriorityFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMemoryPriorityFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn memory_priority(mut self, memory_priority: bool) -> Self {
+ self.inner.memory_priority = memory_priority.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMemoryPriorityFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryPriorityAllocateInfoEXT.html>"]
+pub struct MemoryPriorityAllocateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub priority: f32,
+}
+impl ::std::default::Default for MemoryPriorityAllocateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ priority: f32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryPriorityAllocateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_PRIORITY_ALLOCATE_INFO_EXT;
+}
+impl MemoryPriorityAllocateInfoEXT {
+ pub fn builder<'a>() -> MemoryPriorityAllocateInfoEXTBuilder<'a> {
+ MemoryPriorityAllocateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryPriorityAllocateInfoEXTBuilder<'a> {
+ inner: MemoryPriorityAllocateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryPriorityAllocateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryPriorityAllocateInfoEXT {}
+impl<'a> ::std::ops::Deref for MemoryPriorityAllocateInfoEXTBuilder<'a> {
+ type Target = MemoryPriorityAllocateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryPriorityAllocateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryPriorityAllocateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn priority(mut self, priority: f32) -> Self {
+ self.inner.priority = priority;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryPriorityAllocateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT.html>"]
+pub struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pageable_device_local_memory: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pageable_device_local_memory: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT;
+}
+impl PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'a> {
+ PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePageableDeviceLocalMemoryFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn pageable_device_local_memory(mut self, pageable_device_local_memory: bool) -> Self {
+ self.inner.pageable_device_local_memory = pageable_device_local_memory.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceBufferDeviceAddressFeatures.html>"]
+pub struct PhysicalDeviceBufferDeviceAddressFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub buffer_device_address: Bool32,
+ pub buffer_device_address_capture_replay: Bool32,
+ pub buffer_device_address_multi_device: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceBufferDeviceAddressFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ buffer_device_address: Bool32::default(),
+ buffer_device_address_capture_replay: Bool32::default(),
+ buffer_device_address_multi_device: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceBufferDeviceAddressFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES;
+}
+impl PhysicalDeviceBufferDeviceAddressFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'a> {
+ PhysicalDeviceBufferDeviceAddressFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'a> {
+ inner: PhysicalDeviceBufferDeviceAddressFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBufferDeviceAddressFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferDeviceAddressFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceBufferDeviceAddressFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceBufferDeviceAddressFeaturesBuilder<'a> {
+ #[inline]
+ pub fn buffer_device_address(mut self, buffer_device_address: bool) -> Self {
+ self.inner.buffer_device_address = buffer_device_address.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address_capture_replay(
+ mut self,
+ buffer_device_address_capture_replay: bool,
+ ) -> Self {
+ self.inner.buffer_device_address_capture_replay =
+ buffer_device_address_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address_multi_device(
+ mut self,
+ buffer_device_address_multi_device: bool,
+ ) -> Self {
+ self.inner.buffer_device_address_multi_device = buffer_device_address_multi_device.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceBufferDeviceAddressFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceBufferDeviceAddressFeaturesEXT.html>"]
+pub struct PhysicalDeviceBufferDeviceAddressFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub buffer_device_address: Bool32,
+ pub buffer_device_address_capture_replay: Bool32,
+ pub buffer_device_address_multi_device: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceBufferDeviceAddressFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ buffer_device_address: Bool32::default(),
+ buffer_device_address_capture_replay: Bool32::default(),
+ buffer_device_address_multi_device: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceBufferDeviceAddressFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT;
+}
+impl PhysicalDeviceBufferDeviceAddressFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'a> {
+ PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceBufferDeviceAddressFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBufferDeviceAddressFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBufferDeviceAddressFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceBufferDeviceAddressFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn buffer_device_address(mut self, buffer_device_address: bool) -> Self {
+ self.inner.buffer_device_address = buffer_device_address.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address_capture_replay(
+ mut self,
+ buffer_device_address_capture_replay: bool,
+ ) -> Self {
+ self.inner.buffer_device_address_capture_replay =
+ buffer_device_address_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address_multi_device(
+ mut self,
+ buffer_device_address_multi_device: bool,
+ ) -> Self {
+ self.inner.buffer_device_address_multi_device = buffer_device_address_multi_device.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceBufferDeviceAddressFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferDeviceAddressInfo.html>"]
+pub struct BufferDeviceAddressInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer: Buffer,
+}
+impl ::std::default::Default for BufferDeviceAddressInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer: Buffer::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferDeviceAddressInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_DEVICE_ADDRESS_INFO;
+}
+impl BufferDeviceAddressInfo {
+ pub fn builder<'a>() -> BufferDeviceAddressInfoBuilder<'a> {
+ BufferDeviceAddressInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferDeviceAddressInfoBuilder<'a> {
+ inner: BufferDeviceAddressInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferDeviceAddressInfoBuilder<'a> {
+ type Target = BufferDeviceAddressInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferDeviceAddressInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferDeviceAddressInfoBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferDeviceAddressInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferOpaqueCaptureAddressCreateInfo.html>"]
+pub struct BufferOpaqueCaptureAddressCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub opaque_capture_address: u64,
+}
+impl ::std::default::Default for BufferOpaqueCaptureAddressCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ opaque_capture_address: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferOpaqueCaptureAddressCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO;
+}
+impl BufferOpaqueCaptureAddressCreateInfo {
+ pub fn builder<'a>() -> BufferOpaqueCaptureAddressCreateInfoBuilder<'a> {
+ BufferOpaqueCaptureAddressCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferOpaqueCaptureAddressCreateInfoBuilder<'a> {
+ inner: BufferOpaqueCaptureAddressCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferCreateInfo for BufferOpaqueCaptureAddressCreateInfoBuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for BufferOpaqueCaptureAddressCreateInfo {}
+impl<'a> ::std::ops::Deref for BufferOpaqueCaptureAddressCreateInfoBuilder<'a> {
+ type Target = BufferOpaqueCaptureAddressCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferOpaqueCaptureAddressCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferOpaqueCaptureAddressCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn opaque_capture_address(mut self, opaque_capture_address: u64) -> Self {
+ self.inner.opaque_capture_address = opaque_capture_address;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferOpaqueCaptureAddressCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferDeviceAddressCreateInfoEXT.html>"]
+pub struct BufferDeviceAddressCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub device_address: DeviceAddress,
+}
+impl ::std::default::Default for BufferDeviceAddressCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ device_address: DeviceAddress::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferDeviceAddressCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT;
+}
+impl BufferDeviceAddressCreateInfoEXT {
+ pub fn builder<'a>() -> BufferDeviceAddressCreateInfoEXTBuilder<'a> {
+ BufferDeviceAddressCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferDeviceAddressCreateInfoEXTBuilder<'a> {
+ inner: BufferDeviceAddressCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferCreateInfo for BufferDeviceAddressCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for BufferDeviceAddressCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for BufferDeviceAddressCreateInfoEXTBuilder<'a> {
+ type Target = BufferDeviceAddressCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferDeviceAddressCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferDeviceAddressCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn device_address(mut self, device_address: DeviceAddress) -> Self {
+ self.inner.device_address = device_address;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferDeviceAddressCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageViewImageFormatInfoEXT.html>"]
+pub struct PhysicalDeviceImageViewImageFormatInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_view_type: ImageViewType,
+}
+impl ::std::default::Default for PhysicalDeviceImageViewImageFormatInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_view_type: ImageViewType::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageViewImageFormatInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT;
+}
+impl PhysicalDeviceImageViewImageFormatInfoEXT {
+ pub fn builder<'a>() -> PhysicalDeviceImageViewImageFormatInfoEXTBuilder<'a> {
+ PhysicalDeviceImageViewImageFormatInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageViewImageFormatInfoEXTBuilder<'a> {
+ inner: PhysicalDeviceImageViewImageFormatInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2
+ for PhysicalDeviceImageViewImageFormatInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for PhysicalDeviceImageViewImageFormatInfoEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageViewImageFormatInfoEXTBuilder<'a> {
+ type Target = PhysicalDeviceImageViewImageFormatInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageViewImageFormatInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageViewImageFormatInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn image_view_type(mut self, image_view_type: ImageViewType) -> Self {
+ self.inner.image_view_type = image_view_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageViewImageFormatInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFilterCubicImageViewImageFormatPropertiesEXT.html>"]
+pub struct FilterCubicImageViewImageFormatPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub filter_cubic: Bool32,
+ pub filter_cubic_minmax: Bool32,
+}
+impl ::std::default::Default for FilterCubicImageViewImageFormatPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ filter_cubic: Bool32::default(),
+ filter_cubic_minmax: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FilterCubicImageViewImageFormatPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT;
+}
+impl FilterCubicImageViewImageFormatPropertiesEXT {
+ pub fn builder<'a>() -> FilterCubicImageViewImageFormatPropertiesEXTBuilder<'a> {
+ FilterCubicImageViewImageFormatPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FilterCubicImageViewImageFormatPropertiesEXTBuilder<'a> {
+ inner: FilterCubicImageViewImageFormatPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageFormatProperties2
+ for FilterCubicImageViewImageFormatPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsImageFormatProperties2 for FilterCubicImageViewImageFormatPropertiesEXT {}
+impl<'a> ::std::ops::Deref for FilterCubicImageViewImageFormatPropertiesEXTBuilder<'a> {
+ type Target = FilterCubicImageViewImageFormatPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FilterCubicImageViewImageFormatPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FilterCubicImageViewImageFormatPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn filter_cubic(mut self, filter_cubic: bool) -> Self {
+ self.inner.filter_cubic = filter_cubic.into();
+ self
+ }
+ #[inline]
+ pub fn filter_cubic_minmax(mut self, filter_cubic_minmax: bool) -> Self {
+ self.inner.filter_cubic_minmax = filter_cubic_minmax.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FilterCubicImageViewImageFormatPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImagelessFramebufferFeatures.html>"]
+pub struct PhysicalDeviceImagelessFramebufferFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub imageless_framebuffer: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImagelessFramebufferFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ imageless_framebuffer: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImagelessFramebufferFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES;
+}
+impl PhysicalDeviceImagelessFramebufferFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceImagelessFramebufferFeaturesBuilder<'a> {
+ PhysicalDeviceImagelessFramebufferFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImagelessFramebufferFeaturesBuilder<'a> {
+ inner: PhysicalDeviceImagelessFramebufferFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceImagelessFramebufferFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImagelessFramebufferFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImagelessFramebufferFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImagelessFramebufferFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImagelessFramebufferFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceImagelessFramebufferFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImagelessFramebufferFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImagelessFramebufferFeaturesBuilder<'a> {
+ #[inline]
+ pub fn imageless_framebuffer(mut self, imageless_framebuffer: bool) -> Self {
+ self.inner.imageless_framebuffer = imageless_framebuffer.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImagelessFramebufferFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFramebufferAttachmentsCreateInfo.html>"]
+pub struct FramebufferAttachmentsCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub attachment_image_info_count: u32,
+ pub p_attachment_image_infos: *const FramebufferAttachmentImageInfo,
+}
+impl ::std::default::Default for FramebufferAttachmentsCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ attachment_image_info_count: u32::default(),
+ p_attachment_image_infos: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FramebufferAttachmentsCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_ATTACHMENTS_CREATE_INFO;
+}
+impl FramebufferAttachmentsCreateInfo {
+ pub fn builder<'a>() -> FramebufferAttachmentsCreateInfoBuilder<'a> {
+ FramebufferAttachmentsCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FramebufferAttachmentsCreateInfoBuilder<'a> {
+ inner: FramebufferAttachmentsCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFramebufferCreateInfo for FramebufferAttachmentsCreateInfoBuilder<'_> {}
+unsafe impl ExtendsFramebufferCreateInfo for FramebufferAttachmentsCreateInfo {}
+impl<'a> ::std::ops::Deref for FramebufferAttachmentsCreateInfoBuilder<'a> {
+ type Target = FramebufferAttachmentsCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FramebufferAttachmentsCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FramebufferAttachmentsCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn attachment_image_infos(
+ mut self,
+ attachment_image_infos: &'a [FramebufferAttachmentImageInfo],
+ ) -> Self {
+ self.inner.attachment_image_info_count = attachment_image_infos.len() as _;
+ self.inner.p_attachment_image_infos = attachment_image_infos.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FramebufferAttachmentsCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFramebufferAttachmentImageInfo.html>"]
+pub struct FramebufferAttachmentImageInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ImageCreateFlags,
+ pub usage: ImageUsageFlags,
+ pub width: u32,
+ pub height: u32,
+ pub layer_count: u32,
+ pub view_format_count: u32,
+ pub p_view_formats: *const Format,
+}
+impl ::std::default::Default for FramebufferAttachmentImageInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ImageCreateFlags::default(),
+ usage: ImageUsageFlags::default(),
+ width: u32::default(),
+ height: u32::default(),
+ layer_count: u32::default(),
+ view_format_count: u32::default(),
+ p_view_formats: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FramebufferAttachmentImageInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_ATTACHMENT_IMAGE_INFO;
+}
+impl FramebufferAttachmentImageInfo {
+ pub fn builder<'a>() -> FramebufferAttachmentImageInfoBuilder<'a> {
+ FramebufferAttachmentImageInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FramebufferAttachmentImageInfoBuilder<'a> {
+ inner: FramebufferAttachmentImageInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for FramebufferAttachmentImageInfoBuilder<'a> {
+ type Target = FramebufferAttachmentImageInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FramebufferAttachmentImageInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FramebufferAttachmentImageInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ImageCreateFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: ImageUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn layer_count(mut self, layer_count: u32) -> Self {
+ self.inner.layer_count = layer_count;
+ self
+ }
+ #[inline]
+ pub fn view_formats(mut self, view_formats: &'a [Format]) -> Self {
+ self.inner.view_format_count = view_formats.len() as _;
+ self.inner.p_view_formats = view_formats.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FramebufferAttachmentImageInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassAttachmentBeginInfo.html>"]
+pub struct RenderPassAttachmentBeginInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub attachment_count: u32,
+ pub p_attachments: *const ImageView,
+}
+impl ::std::default::Default for RenderPassAttachmentBeginInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ attachment_count: u32::default(),
+ p_attachments: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassAttachmentBeginInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_ATTACHMENT_BEGIN_INFO;
+}
+impl RenderPassAttachmentBeginInfo {
+ pub fn builder<'a>() -> RenderPassAttachmentBeginInfoBuilder<'a> {
+ RenderPassAttachmentBeginInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassAttachmentBeginInfoBuilder<'a> {
+ inner: RenderPassAttachmentBeginInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassBeginInfo for RenderPassAttachmentBeginInfoBuilder<'_> {}
+unsafe impl ExtendsRenderPassBeginInfo for RenderPassAttachmentBeginInfo {}
+impl<'a> ::std::ops::Deref for RenderPassAttachmentBeginInfoBuilder<'a> {
+ type Target = RenderPassAttachmentBeginInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassAttachmentBeginInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassAttachmentBeginInfoBuilder<'a> {
+ #[inline]
+ pub fn attachments(mut self, attachments: &'a [ImageView]) -> Self {
+ self.inner.attachment_count = attachments.len() as _;
+ self.inner.p_attachments = attachments.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassAttachmentBeginInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTextureCompressionASTCHDRFeatures.html>"]
+pub struct PhysicalDeviceTextureCompressionASTCHDRFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub texture_compression_astc_hdr: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTextureCompressionASTCHDRFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ texture_compression_astc_hdr: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTextureCompressionASTCHDRFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES;
+}
+impl PhysicalDeviceTextureCompressionASTCHDRFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'a> {
+ PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'a> {
+ inner: PhysicalDeviceTextureCompressionASTCHDRFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTextureCompressionASTCHDRFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTextureCompressionASTCHDRFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceTextureCompressionASTCHDRFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTextureCompressionASTCHDRFeaturesBuilder<'a> {
+ #[inline]
+ pub fn texture_compression_astc_hdr(mut self, texture_compression_astc_hdr: bool) -> Self {
+ self.inner.texture_compression_astc_hdr = texture_compression_astc_hdr.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTextureCompressionASTCHDRFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCooperativeMatrixFeaturesNV.html>"]
+pub struct PhysicalDeviceCooperativeMatrixFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub cooperative_matrix: Bool32,
+ pub cooperative_matrix_robust_buffer_access: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceCooperativeMatrixFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ cooperative_matrix: Bool32::default(),
+ cooperative_matrix_robust_buffer_access: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCooperativeMatrixFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV;
+}
+impl PhysicalDeviceCooperativeMatrixFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'a> {
+ PhysicalDeviceCooperativeMatrixFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceCooperativeMatrixFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCooperativeMatrixFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCooperativeMatrixFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceCooperativeMatrixFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCooperativeMatrixFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn cooperative_matrix(mut self, cooperative_matrix: bool) -> Self {
+ self.inner.cooperative_matrix = cooperative_matrix.into();
+ self
+ }
+ #[inline]
+ pub fn cooperative_matrix_robust_buffer_access(
+ mut self,
+ cooperative_matrix_robust_buffer_access: bool,
+ ) -> Self {
+ self.inner.cooperative_matrix_robust_buffer_access =
+ cooperative_matrix_robust_buffer_access.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCooperativeMatrixFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCooperativeMatrixPropertiesNV.html>"]
+pub struct PhysicalDeviceCooperativeMatrixPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub cooperative_matrix_supported_stages: ShaderStageFlags,
+}
+impl ::std::default::Default for PhysicalDeviceCooperativeMatrixPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ cooperative_matrix_supported_stages: ShaderStageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCooperativeMatrixPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV;
+}
+impl PhysicalDeviceCooperativeMatrixPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceCooperativeMatrixPropertiesNVBuilder<'a> {
+ PhysicalDeviceCooperativeMatrixPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCooperativeMatrixPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceCooperativeMatrixPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceCooperativeMatrixPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCooperativeMatrixPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCooperativeMatrixPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceCooperativeMatrixPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCooperativeMatrixPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCooperativeMatrixPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn cooperative_matrix_supported_stages(
+ mut self,
+ cooperative_matrix_supported_stages: ShaderStageFlags,
+ ) -> Self {
+ self.inner.cooperative_matrix_supported_stages = cooperative_matrix_supported_stages;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCooperativeMatrixPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCooperativeMatrixPropertiesNV.html>"]
+pub struct CooperativeMatrixPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub m_size: u32,
+ pub n_size: u32,
+ pub k_size: u32,
+ pub a_type: ComponentTypeNV,
+ pub b_type: ComponentTypeNV,
+ pub c_type: ComponentTypeNV,
+ pub d_type: ComponentTypeNV,
+ pub scope: ScopeNV,
+}
+impl ::std::default::Default for CooperativeMatrixPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ m_size: u32::default(),
+ n_size: u32::default(),
+ k_size: u32::default(),
+ a_type: ComponentTypeNV::default(),
+ b_type: ComponentTypeNV::default(),
+ c_type: ComponentTypeNV::default(),
+ d_type: ComponentTypeNV::default(),
+ scope: ScopeNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CooperativeMatrixPropertiesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::COOPERATIVE_MATRIX_PROPERTIES_NV;
+}
+impl CooperativeMatrixPropertiesNV {
+ pub fn builder<'a>() -> CooperativeMatrixPropertiesNVBuilder<'a> {
+ CooperativeMatrixPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CooperativeMatrixPropertiesNVBuilder<'a> {
+ inner: CooperativeMatrixPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CooperativeMatrixPropertiesNVBuilder<'a> {
+ type Target = CooperativeMatrixPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CooperativeMatrixPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CooperativeMatrixPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn m_size(mut self, m_size: u32) -> Self {
+ self.inner.m_size = m_size;
+ self
+ }
+ #[inline]
+ pub fn n_size(mut self, n_size: u32) -> Self {
+ self.inner.n_size = n_size;
+ self
+ }
+ #[inline]
+ pub fn k_size(mut self, k_size: u32) -> Self {
+ self.inner.k_size = k_size;
+ self
+ }
+ #[inline]
+ pub fn a_type(mut self, a_type: ComponentTypeNV) -> Self {
+ self.inner.a_type = a_type;
+ self
+ }
+ #[inline]
+ pub fn b_type(mut self, b_type: ComponentTypeNV) -> Self {
+ self.inner.b_type = b_type;
+ self
+ }
+ #[inline]
+ pub fn c_type(mut self, c_type: ComponentTypeNV) -> Self {
+ self.inner.c_type = c_type;
+ self
+ }
+ #[inline]
+ pub fn d_type(mut self, d_type: ComponentTypeNV) -> Self {
+ self.inner.d_type = d_type;
+ self
+ }
+ #[inline]
+ pub fn scope(mut self, scope: ScopeNV) -> Self {
+ self.inner.scope = scope;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CooperativeMatrixPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceYcbcrImageArraysFeaturesEXT.html>"]
+pub struct PhysicalDeviceYcbcrImageArraysFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ycbcr_image_arrays: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceYcbcrImageArraysFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ycbcr_image_arrays: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceYcbcrImageArraysFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT;
+}
+impl PhysicalDeviceYcbcrImageArraysFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'a> {
+ PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceYcbcrImageArraysFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceYcbcrImageArraysFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcrImageArraysFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceYcbcrImageArraysFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn ycbcr_image_arrays(mut self, ycbcr_image_arrays: bool) -> Self {
+ self.inner.ycbcr_image_arrays = ycbcr_image_arrays.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceYcbcrImageArraysFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewHandleInfoNVX.html>"]
+pub struct ImageViewHandleInfoNVX {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_view: ImageView,
+ pub descriptor_type: DescriptorType,
+ pub sampler: Sampler,
+}
+impl ::std::default::Default for ImageViewHandleInfoNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_view: ImageView::default(),
+ descriptor_type: DescriptorType::default(),
+ sampler: Sampler::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewHandleInfoNVX {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_HANDLE_INFO_NVX;
+}
+impl ImageViewHandleInfoNVX {
+ pub fn builder<'a>() -> ImageViewHandleInfoNVXBuilder<'a> {
+ ImageViewHandleInfoNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewHandleInfoNVXBuilder<'a> {
+ inner: ImageViewHandleInfoNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageViewHandleInfoNVXBuilder<'a> {
+ type Target = ImageViewHandleInfoNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewHandleInfoNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewHandleInfoNVXBuilder<'a> {
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[inline]
+ pub fn descriptor_type(mut self, descriptor_type: DescriptorType) -> Self {
+ self.inner.descriptor_type = descriptor_type;
+ self
+ }
+ #[inline]
+ pub fn sampler(mut self, sampler: Sampler) -> Self {
+ self.inner.sampler = sampler;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewHandleInfoNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewAddressPropertiesNVX.html>"]
+pub struct ImageViewAddressPropertiesNVX {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_address: DeviceAddress,
+ pub size: DeviceSize,
+}
+impl ::std::default::Default for ImageViewAddressPropertiesNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_address: DeviceAddress::default(),
+ size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewAddressPropertiesNVX {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_ADDRESS_PROPERTIES_NVX;
+}
+impl ImageViewAddressPropertiesNVX {
+ pub fn builder<'a>() -> ImageViewAddressPropertiesNVXBuilder<'a> {
+ ImageViewAddressPropertiesNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewAddressPropertiesNVXBuilder<'a> {
+ inner: ImageViewAddressPropertiesNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageViewAddressPropertiesNVXBuilder<'a> {
+ type Target = ImageViewAddressPropertiesNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewAddressPropertiesNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewAddressPropertiesNVXBuilder<'a> {
+ #[inline]
+ pub fn device_address(mut self, device_address: DeviceAddress) -> Self {
+ self.inner.device_address = device_address;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewAddressPropertiesNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentFrameTokenGGP.html>"]
+pub struct PresentFrameTokenGGP {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub frame_token: GgpFrameToken,
+}
+impl ::std::default::Default for PresentFrameTokenGGP {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ frame_token: GgpFrameToken::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PresentFrameTokenGGP {
+ const STRUCTURE_TYPE: StructureType = StructureType::PRESENT_FRAME_TOKEN_GGP;
+}
+impl PresentFrameTokenGGP {
+ pub fn builder<'a>() -> PresentFrameTokenGGPBuilder<'a> {
+ PresentFrameTokenGGPBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PresentFrameTokenGGPBuilder<'a> {
+ inner: PresentFrameTokenGGP,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for PresentFrameTokenGGPBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for PresentFrameTokenGGP {}
+impl<'a> ::std::ops::Deref for PresentFrameTokenGGPBuilder<'a> {
+ type Target = PresentFrameTokenGGP;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PresentFrameTokenGGPBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PresentFrameTokenGGPBuilder<'a> {
+ #[inline]
+ pub fn frame_token(mut self, frame_token: GgpFrameToken) -> Self {
+ self.inner.frame_token = frame_token;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PresentFrameTokenGGP {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCreationFeedback.html>"]
+pub struct PipelineCreationFeedback {
+ pub flags: PipelineCreationFeedbackFlags,
+ pub duration: u64,
+}
+impl PipelineCreationFeedback {
+ pub fn builder<'a>() -> PipelineCreationFeedbackBuilder<'a> {
+ PipelineCreationFeedbackBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCreationFeedbackBuilder<'a> {
+ inner: PipelineCreationFeedback,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineCreationFeedbackBuilder<'a> {
+ type Target = PipelineCreationFeedback;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCreationFeedbackBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCreationFeedbackBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCreationFeedbackFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn duration(mut self, duration: u64) -> Self {
+ self.inner.duration = duration;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCreationFeedback {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCreationFeedbackCreateInfo.html>"]
+pub struct PipelineCreationFeedbackCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_pipeline_creation_feedback: *mut PipelineCreationFeedback,
+ pub pipeline_stage_creation_feedback_count: u32,
+ pub p_pipeline_stage_creation_feedbacks: *mut PipelineCreationFeedback,
+}
+impl ::std::default::Default for PipelineCreationFeedbackCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_pipeline_creation_feedback: ::std::ptr::null_mut(),
+ pipeline_stage_creation_feedback_count: u32::default(),
+ p_pipeline_stage_creation_feedbacks: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineCreationFeedbackCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_CREATION_FEEDBACK_CREATE_INFO;
+}
+impl PipelineCreationFeedbackCreateInfo {
+ pub fn builder<'a>() -> PipelineCreationFeedbackCreateInfoBuilder<'a> {
+ PipelineCreationFeedbackCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCreationFeedbackCreateInfoBuilder<'a> {
+ inner: PipelineCreationFeedbackCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCreationFeedbackCreateInfoBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCreationFeedbackCreateInfo {}
+unsafe impl ExtendsComputePipelineCreateInfo for PipelineCreationFeedbackCreateInfoBuilder<'_> {}
+unsafe impl ExtendsComputePipelineCreateInfo for PipelineCreationFeedbackCreateInfo {}
+unsafe impl ExtendsRayTracingPipelineCreateInfoNV
+ for PipelineCreationFeedbackCreateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsRayTracingPipelineCreateInfoNV for PipelineCreationFeedbackCreateInfo {}
+unsafe impl ExtendsRayTracingPipelineCreateInfoKHR
+ for PipelineCreationFeedbackCreateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsRayTracingPipelineCreateInfoKHR for PipelineCreationFeedbackCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineCreationFeedbackCreateInfoBuilder<'a> {
+ type Target = PipelineCreationFeedbackCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCreationFeedbackCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCreationFeedbackCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn pipeline_creation_feedback(
+ mut self,
+ pipeline_creation_feedback: &'a mut PipelineCreationFeedback,
+ ) -> Self {
+ self.inner.p_pipeline_creation_feedback = pipeline_creation_feedback;
+ self
+ }
+ #[inline]
+ pub fn pipeline_stage_creation_feedbacks(
+ mut self,
+ pipeline_stage_creation_feedbacks: &'a mut [PipelineCreationFeedback],
+ ) -> Self {
+ self.inner.pipeline_stage_creation_feedback_count =
+ pipeline_stage_creation_feedbacks.len() as _;
+ self.inner.p_pipeline_stage_creation_feedbacks =
+ pipeline_stage_creation_feedbacks.as_mut_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCreationFeedbackCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceFullScreenExclusiveInfoEXT.html>"]
+pub struct SurfaceFullScreenExclusiveInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub full_screen_exclusive: FullScreenExclusiveEXT,
+}
+impl ::std::default::Default for SurfaceFullScreenExclusiveInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ full_screen_exclusive: FullScreenExclusiveEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceFullScreenExclusiveInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT;
+}
+impl SurfaceFullScreenExclusiveInfoEXT {
+ pub fn builder<'a>() -> SurfaceFullScreenExclusiveInfoEXTBuilder<'a> {
+ SurfaceFullScreenExclusiveInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceFullScreenExclusiveInfoEXTBuilder<'a> {
+ inner: SurfaceFullScreenExclusiveInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfaceFullScreenExclusiveInfoEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfaceFullScreenExclusiveInfoEXT {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SurfaceFullScreenExclusiveInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SurfaceFullScreenExclusiveInfoEXT {}
+impl<'a> ::std::ops::Deref for SurfaceFullScreenExclusiveInfoEXTBuilder<'a> {
+ type Target = SurfaceFullScreenExclusiveInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceFullScreenExclusiveInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceFullScreenExclusiveInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn full_screen_exclusive(mut self, full_screen_exclusive: FullScreenExclusiveEXT) -> Self {
+ self.inner.full_screen_exclusive = full_screen_exclusive;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceFullScreenExclusiveInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceFullScreenExclusiveWin32InfoEXT.html>"]
+pub struct SurfaceFullScreenExclusiveWin32InfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub hmonitor: HMONITOR,
+}
+impl ::std::default::Default for SurfaceFullScreenExclusiveWin32InfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ hmonitor: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceFullScreenExclusiveWin32InfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT;
+}
+impl SurfaceFullScreenExclusiveWin32InfoEXT {
+ pub fn builder<'a>() -> SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'a> {
+ SurfaceFullScreenExclusiveWin32InfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'a> {
+ inner: SurfaceFullScreenExclusiveWin32InfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR
+ for SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfaceFullScreenExclusiveWin32InfoEXT {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SurfaceFullScreenExclusiveWin32InfoEXT {}
+impl<'a> ::std::ops::Deref for SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'a> {
+ type Target = SurfaceFullScreenExclusiveWin32InfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceFullScreenExclusiveWin32InfoEXTBuilder<'a> {
+ #[inline]
+ pub fn hmonitor(mut self, hmonitor: HMONITOR) -> Self {
+ self.inner.hmonitor = hmonitor;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceFullScreenExclusiveWin32InfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilitiesFullScreenExclusiveEXT.html>"]
+pub struct SurfaceCapabilitiesFullScreenExclusiveEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub full_screen_exclusive_supported: Bool32,
+}
+impl ::std::default::Default for SurfaceCapabilitiesFullScreenExclusiveEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ full_screen_exclusive_supported: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceCapabilitiesFullScreenExclusiveEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT;
+}
+impl SurfaceCapabilitiesFullScreenExclusiveEXT {
+ pub fn builder<'a>() -> SurfaceCapabilitiesFullScreenExclusiveEXTBuilder<'a> {
+ SurfaceCapabilitiesFullScreenExclusiveEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceCapabilitiesFullScreenExclusiveEXTBuilder<'a> {
+ inner: SurfaceCapabilitiesFullScreenExclusiveEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR
+ for SurfaceCapabilitiesFullScreenExclusiveEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceCapabilitiesFullScreenExclusiveEXT {}
+impl<'a> ::std::ops::Deref for SurfaceCapabilitiesFullScreenExclusiveEXTBuilder<'a> {
+ type Target = SurfaceCapabilitiesFullScreenExclusiveEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceCapabilitiesFullScreenExclusiveEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceCapabilitiesFullScreenExclusiveEXTBuilder<'a> {
+ #[inline]
+ pub fn full_screen_exclusive_supported(
+ mut self,
+ full_screen_exclusive_supported: bool,
+ ) -> Self {
+ self.inner.full_screen_exclusive_supported = full_screen_exclusive_supported.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceCapabilitiesFullScreenExclusiveEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePresentBarrierFeaturesNV.html>"]
+pub struct PhysicalDevicePresentBarrierFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_barrier: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePresentBarrierFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_barrier: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePresentBarrierFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV;
+}
+impl PhysicalDevicePresentBarrierFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDevicePresentBarrierFeaturesNVBuilder<'a> {
+ PhysicalDevicePresentBarrierFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePresentBarrierFeaturesNVBuilder<'a> {
+ inner: PhysicalDevicePresentBarrierFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentBarrierFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePresentBarrierFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentBarrierFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePresentBarrierFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePresentBarrierFeaturesNVBuilder<'a> {
+ type Target = PhysicalDevicePresentBarrierFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePresentBarrierFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePresentBarrierFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn present_barrier(mut self, present_barrier: bool) -> Self {
+ self.inner.present_barrier = present_barrier.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePresentBarrierFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilitiesPresentBarrierNV.html>"]
+pub struct SurfaceCapabilitiesPresentBarrierNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_barrier_supported: Bool32,
+}
+impl ::std::default::Default for SurfaceCapabilitiesPresentBarrierNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_barrier_supported: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfaceCapabilitiesPresentBarrierNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_CAPABILITIES_PRESENT_BARRIER_NV;
+}
+impl SurfaceCapabilitiesPresentBarrierNV {
+ pub fn builder<'a>() -> SurfaceCapabilitiesPresentBarrierNVBuilder<'a> {
+ SurfaceCapabilitiesPresentBarrierNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfaceCapabilitiesPresentBarrierNVBuilder<'a> {
+ inner: SurfaceCapabilitiesPresentBarrierNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceCapabilitiesPresentBarrierNVBuilder<'_> {}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfaceCapabilitiesPresentBarrierNV {}
+impl<'a> ::std::ops::Deref for SurfaceCapabilitiesPresentBarrierNVBuilder<'a> {
+ type Target = SurfaceCapabilitiesPresentBarrierNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfaceCapabilitiesPresentBarrierNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfaceCapabilitiesPresentBarrierNVBuilder<'a> {
+ #[inline]
+ pub fn present_barrier_supported(mut self, present_barrier_supported: bool) -> Self {
+ self.inner.present_barrier_supported = present_barrier_supported.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfaceCapabilitiesPresentBarrierNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainPresentBarrierCreateInfoNV.html>"]
+pub struct SwapchainPresentBarrierCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_barrier_enable: Bool32,
+}
+impl ::std::default::Default for SwapchainPresentBarrierCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_barrier_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainPresentBarrierCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV;
+}
+impl SwapchainPresentBarrierCreateInfoNV {
+ pub fn builder<'a>() -> SwapchainPresentBarrierCreateInfoNVBuilder<'a> {
+ SwapchainPresentBarrierCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainPresentBarrierCreateInfoNVBuilder<'a> {
+ inner: SwapchainPresentBarrierCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentBarrierCreateInfoNVBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentBarrierCreateInfoNV {}
+impl<'a> ::std::ops::Deref for SwapchainPresentBarrierCreateInfoNVBuilder<'a> {
+ type Target = SwapchainPresentBarrierCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainPresentBarrierCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainPresentBarrierCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn present_barrier_enable(mut self, present_barrier_enable: bool) -> Self {
+ self.inner.present_barrier_enable = present_barrier_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainPresentBarrierCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePerformanceQueryFeaturesKHR.html>"]
+pub struct PhysicalDevicePerformanceQueryFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub performance_counter_query_pools: Bool32,
+ pub performance_counter_multiple_query_pools: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePerformanceQueryFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ performance_counter_query_pools: Bool32::default(),
+ performance_counter_multiple_query_pools: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePerformanceQueryFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR;
+}
+impl PhysicalDevicePerformanceQueryFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'a> {
+ PhysicalDevicePerformanceQueryFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'a> {
+ inner: PhysicalDevicePerformanceQueryFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePerformanceQueryFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePerformanceQueryFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDevicePerformanceQueryFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePerformanceQueryFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn performance_counter_query_pools(
+ mut self,
+ performance_counter_query_pools: bool,
+ ) -> Self {
+ self.inner.performance_counter_query_pools = performance_counter_query_pools.into();
+ self
+ }
+ #[inline]
+ pub fn performance_counter_multiple_query_pools(
+ mut self,
+ performance_counter_multiple_query_pools: bool,
+ ) -> Self {
+ self.inner.performance_counter_multiple_query_pools =
+ performance_counter_multiple_query_pools.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePerformanceQueryFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePerformanceQueryPropertiesKHR.html>"]
+pub struct PhysicalDevicePerformanceQueryPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub allow_command_buffer_query_copies: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePerformanceQueryPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ allow_command_buffer_query_copies: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePerformanceQueryPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR;
+}
+impl PhysicalDevicePerformanceQueryPropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePerformanceQueryPropertiesKHRBuilder<'a> {
+ PhysicalDevicePerformanceQueryPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePerformanceQueryPropertiesKHRBuilder<'a> {
+ inner: PhysicalDevicePerformanceQueryPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDevicePerformanceQueryPropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePerformanceQueryPropertiesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePerformanceQueryPropertiesKHRBuilder<'a> {
+ type Target = PhysicalDevicePerformanceQueryPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePerformanceQueryPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePerformanceQueryPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn allow_command_buffer_query_copies(
+ mut self,
+ allow_command_buffer_query_copies: bool,
+ ) -> Self {
+ self.inner.allow_command_buffer_query_copies = allow_command_buffer_query_copies.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePerformanceQueryPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterKHR.html>"]
+pub struct PerformanceCounterKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub unit: PerformanceCounterUnitKHR,
+ pub scope: PerformanceCounterScopeKHR,
+ pub storage: PerformanceCounterStorageKHR,
+ pub uuid: [u8; UUID_SIZE],
+}
+impl ::std::default::Default for PerformanceCounterKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ unit: PerformanceCounterUnitKHR::default(),
+ scope: PerformanceCounterScopeKHR::default(),
+ storage: PerformanceCounterStorageKHR::default(),
+ uuid: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceCounterKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_COUNTER_KHR;
+}
+impl PerformanceCounterKHR {
+ pub fn builder<'a>() -> PerformanceCounterKHRBuilder<'a> {
+ PerformanceCounterKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceCounterKHRBuilder<'a> {
+ inner: PerformanceCounterKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceCounterKHRBuilder<'a> {
+ type Target = PerformanceCounterKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceCounterKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceCounterKHRBuilder<'a> {
+ #[inline]
+ pub fn unit(mut self, unit: PerformanceCounterUnitKHR) -> Self {
+ self.inner.unit = unit;
+ self
+ }
+ #[inline]
+ pub fn scope(mut self, scope: PerformanceCounterScopeKHR) -> Self {
+ self.inner.scope = scope;
+ self
+ }
+ #[inline]
+ pub fn storage(mut self, storage: PerformanceCounterStorageKHR) -> Self {
+ self.inner.storage = storage;
+ self
+ }
+ #[inline]
+ pub fn uuid(mut self, uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.uuid = uuid;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceCounterKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterDescriptionKHR.html>"]
+pub struct PerformanceCounterDescriptionKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: PerformanceCounterDescriptionFlagsKHR,
+ pub name: [c_char; MAX_DESCRIPTION_SIZE],
+ pub category: [c_char; MAX_DESCRIPTION_SIZE],
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PerformanceCounterDescriptionKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PerformanceCounterDescriptionKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("flags", &self.flags)
+ .field("name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.name.as_ptr())
+ })
+ .field("category", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.category.as_ptr())
+ })
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .finish()
+ }
+}
+impl ::std::default::Default for PerformanceCounterDescriptionKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: PerformanceCounterDescriptionFlagsKHR::default(),
+ name: unsafe { ::std::mem::zeroed() },
+ category: unsafe { ::std::mem::zeroed() },
+ description: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceCounterDescriptionKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_COUNTER_DESCRIPTION_KHR;
+}
+impl PerformanceCounterDescriptionKHR {
+ pub fn builder<'a>() -> PerformanceCounterDescriptionKHRBuilder<'a> {
+ PerformanceCounterDescriptionKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceCounterDescriptionKHRBuilder<'a> {
+ inner: PerformanceCounterDescriptionKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceCounterDescriptionKHRBuilder<'a> {
+ type Target = PerformanceCounterDescriptionKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceCounterDescriptionKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceCounterDescriptionKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PerformanceCounterDescriptionFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[inline]
+ pub fn category(mut self, category: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.category = category;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceCounterDescriptionKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPoolPerformanceCreateInfoKHR.html>"]
+pub struct QueryPoolPerformanceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub queue_family_index: u32,
+ pub counter_index_count: u32,
+ pub p_counter_indices: *const u32,
+}
+impl ::std::default::Default for QueryPoolPerformanceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ queue_family_index: u32::default(),
+ counter_index_count: u32::default(),
+ p_counter_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueryPoolPerformanceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR;
+}
+impl QueryPoolPerformanceCreateInfoKHR {
+ pub fn builder<'a>() -> QueryPoolPerformanceCreateInfoKHRBuilder<'a> {
+ QueryPoolPerformanceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueryPoolPerformanceCreateInfoKHRBuilder<'a> {
+ inner: QueryPoolPerformanceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolPerformanceCreateInfoKHRBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolPerformanceCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for QueryPoolPerformanceCreateInfoKHRBuilder<'a> {
+ type Target = QueryPoolPerformanceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueryPoolPerformanceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueryPoolPerformanceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn queue_family_index(mut self, queue_family_index: u32) -> Self {
+ self.inner.queue_family_index = queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn counter_indices(mut self, counter_indices: &'a [u32]) -> Self {
+ self.inner.counter_index_count = counter_indices.len() as _;
+ self.inner.p_counter_indices = counter_indices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueryPoolPerformanceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterResultKHR.html>"]
+pub union PerformanceCounterResultKHR {
+ pub int32: i32,
+ pub int64: i64,
+ pub uint32: u32,
+ pub uint64: u64,
+ pub float32: f32,
+ pub float64: f64,
+}
+impl ::std::default::Default for PerformanceCounterResultKHR {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAcquireProfilingLockInfoKHR.html>"]
+pub struct AcquireProfilingLockInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: AcquireProfilingLockFlagsKHR,
+ pub timeout: u64,
+}
+impl ::std::default::Default for AcquireProfilingLockInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: AcquireProfilingLockFlagsKHR::default(),
+ timeout: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AcquireProfilingLockInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACQUIRE_PROFILING_LOCK_INFO_KHR;
+}
+impl AcquireProfilingLockInfoKHR {
+ pub fn builder<'a>() -> AcquireProfilingLockInfoKHRBuilder<'a> {
+ AcquireProfilingLockInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AcquireProfilingLockInfoKHRBuilder<'a> {
+ inner: AcquireProfilingLockInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AcquireProfilingLockInfoKHRBuilder<'a> {
+ type Target = AcquireProfilingLockInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AcquireProfilingLockInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AcquireProfilingLockInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: AcquireProfilingLockFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn timeout(mut self, timeout: u64) -> Self {
+ self.inner.timeout = timeout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AcquireProfilingLockInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceQuerySubmitInfoKHR.html>"]
+pub struct PerformanceQuerySubmitInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub counter_pass_index: u32,
+}
+impl ::std::default::Default for PerformanceQuerySubmitInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ counter_pass_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceQuerySubmitInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_QUERY_SUBMIT_INFO_KHR;
+}
+impl PerformanceQuerySubmitInfoKHR {
+ pub fn builder<'a>() -> PerformanceQuerySubmitInfoKHRBuilder<'a> {
+ PerformanceQuerySubmitInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceQuerySubmitInfoKHRBuilder<'a> {
+ inner: PerformanceQuerySubmitInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for PerformanceQuerySubmitInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for PerformanceQuerySubmitInfoKHR {}
+unsafe impl ExtendsSubmitInfo2 for PerformanceQuerySubmitInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo2 for PerformanceQuerySubmitInfoKHR {}
+impl<'a> ::std::ops::Deref for PerformanceQuerySubmitInfoKHRBuilder<'a> {
+ type Target = PerformanceQuerySubmitInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceQuerySubmitInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceQuerySubmitInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn counter_pass_index(mut self, counter_pass_index: u32) -> Self {
+ self.inner.counter_pass_index = counter_pass_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceQuerySubmitInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkHeadlessSurfaceCreateInfoEXT.html>"]
+pub struct HeadlessSurfaceCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: HeadlessSurfaceCreateFlagsEXT,
+}
+impl ::std::default::Default for HeadlessSurfaceCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: HeadlessSurfaceCreateFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for HeadlessSurfaceCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::HEADLESS_SURFACE_CREATE_INFO_EXT;
+}
+impl HeadlessSurfaceCreateInfoEXT {
+ pub fn builder<'a>() -> HeadlessSurfaceCreateInfoEXTBuilder<'a> {
+ HeadlessSurfaceCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct HeadlessSurfaceCreateInfoEXTBuilder<'a> {
+ inner: HeadlessSurfaceCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for HeadlessSurfaceCreateInfoEXTBuilder<'a> {
+ type Target = HeadlessSurfaceCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for HeadlessSurfaceCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> HeadlessSurfaceCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: HeadlessSurfaceCreateFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> HeadlessSurfaceCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCoverageReductionModeFeaturesNV.html>"]
+pub struct PhysicalDeviceCoverageReductionModeFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub coverage_reduction_mode: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceCoverageReductionModeFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ coverage_reduction_mode: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCoverageReductionModeFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV;
+}
+impl PhysicalDeviceCoverageReductionModeFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'a> {
+ PhysicalDeviceCoverageReductionModeFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceCoverageReductionModeFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCoverageReductionModeFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCoverageReductionModeFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceCoverageReductionModeFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCoverageReductionModeFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn coverage_reduction_mode(mut self, coverage_reduction_mode: bool) -> Self {
+ self.inner.coverage_reduction_mode = coverage_reduction_mode.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCoverageReductionModeFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCoverageReductionStateCreateInfoNV.html>"]
+pub struct PipelineCoverageReductionStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: PipelineCoverageReductionStateCreateFlagsNV,
+ pub coverage_reduction_mode: CoverageReductionModeNV,
+}
+impl ::std::default::Default for PipelineCoverageReductionStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: PipelineCoverageReductionStateCreateFlagsNV::default(),
+ coverage_reduction_mode: CoverageReductionModeNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineCoverageReductionStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV;
+}
+impl PipelineCoverageReductionStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineCoverageReductionStateCreateInfoNVBuilder<'a> {
+ PipelineCoverageReductionStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCoverageReductionStateCreateInfoNVBuilder<'a> {
+ inner: PipelineCoverageReductionStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineCoverageReductionStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineMultisampleStateCreateInfo
+ for PipelineCoverageReductionStateCreateInfoNV
+{
+}
+impl<'a> ::std::ops::Deref for PipelineCoverageReductionStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineCoverageReductionStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCoverageReductionStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCoverageReductionStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: PipelineCoverageReductionStateCreateFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn coverage_reduction_mode(
+ mut self,
+ coverage_reduction_mode: CoverageReductionModeNV,
+ ) -> Self {
+ self.inner.coverage_reduction_mode = coverage_reduction_mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCoverageReductionStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFramebufferMixedSamplesCombinationNV.html>"]
+pub struct FramebufferMixedSamplesCombinationNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub coverage_reduction_mode: CoverageReductionModeNV,
+ pub rasterization_samples: SampleCountFlags,
+ pub depth_stencil_samples: SampleCountFlags,
+ pub color_samples: SampleCountFlags,
+}
+impl ::std::default::Default for FramebufferMixedSamplesCombinationNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ coverage_reduction_mode: CoverageReductionModeNV::default(),
+ rasterization_samples: SampleCountFlags::default(),
+ depth_stencil_samples: SampleCountFlags::default(),
+ color_samples: SampleCountFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FramebufferMixedSamplesCombinationNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV;
+}
+impl FramebufferMixedSamplesCombinationNV {
+ pub fn builder<'a>() -> FramebufferMixedSamplesCombinationNVBuilder<'a> {
+ FramebufferMixedSamplesCombinationNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FramebufferMixedSamplesCombinationNVBuilder<'a> {
+ inner: FramebufferMixedSamplesCombinationNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for FramebufferMixedSamplesCombinationNVBuilder<'a> {
+ type Target = FramebufferMixedSamplesCombinationNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FramebufferMixedSamplesCombinationNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FramebufferMixedSamplesCombinationNVBuilder<'a> {
+ #[inline]
+ pub fn coverage_reduction_mode(
+ mut self,
+ coverage_reduction_mode: CoverageReductionModeNV,
+ ) -> Self {
+ self.inner.coverage_reduction_mode = coverage_reduction_mode;
+ self
+ }
+ #[inline]
+ pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self {
+ self.inner.rasterization_samples = rasterization_samples;
+ self
+ }
+ #[inline]
+ pub fn depth_stencil_samples(mut self, depth_stencil_samples: SampleCountFlags) -> Self {
+ self.inner.depth_stencil_samples = depth_stencil_samples;
+ self
+ }
+ #[inline]
+ pub fn color_samples(mut self, color_samples: SampleCountFlags) -> Self {
+ self.inner.color_samples = color_samples;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FramebufferMixedSamplesCombinationNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL.html>"]
+pub struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_integer_functions2: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_integer_functions2: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL;
+}
+impl PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+ pub fn builder<'a>() -> PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'a> {
+ PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'a> {
+ inner: PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'a> {
+ type Target = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderIntegerFunctions2FeaturesINTELBuilder<'a> {
+ #[inline]
+ pub fn shader_integer_functions2(mut self, shader_integer_functions2: bool) -> Self {
+ self.inner.shader_integer_functions2 = shader_integer_functions2.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceValueDataINTEL.html>"]
+pub union PerformanceValueDataINTEL {
+ pub value32: u32,
+ pub value64: u64,
+ pub value_float: f32,
+ pub value_bool: Bool32,
+ pub value_string: *const c_char,
+}
+impl ::std::default::Default for PerformanceValueDataINTEL {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceValueINTEL.html>"]
+pub struct PerformanceValueINTEL {
+ pub ty: PerformanceValueTypeINTEL,
+ pub data: PerformanceValueDataINTEL,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PerformanceValueINTEL {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PerformanceValueINTEL")
+ .field("ty", &self.ty)
+ .field("data", &"union")
+ .finish()
+ }
+}
+impl PerformanceValueINTEL {
+ pub fn builder<'a>() -> PerformanceValueINTELBuilder<'a> {
+ PerformanceValueINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceValueINTELBuilder<'a> {
+ inner: PerformanceValueINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceValueINTELBuilder<'a> {
+ type Target = PerformanceValueINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceValueINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceValueINTELBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: PerformanceValueTypeINTEL) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: PerformanceValueDataINTEL) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceValueINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInitializePerformanceApiInfoINTEL.html>"]
+pub struct InitializePerformanceApiInfoINTEL {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_user_data: *mut c_void,
+}
+impl ::std::default::Default for InitializePerformanceApiInfoINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_user_data: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for InitializePerformanceApiInfoINTEL {
+ const STRUCTURE_TYPE: StructureType = StructureType::INITIALIZE_PERFORMANCE_API_INFO_INTEL;
+}
+impl InitializePerformanceApiInfoINTEL {
+ pub fn builder<'a>() -> InitializePerformanceApiInfoINTELBuilder<'a> {
+ InitializePerformanceApiInfoINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct InitializePerformanceApiInfoINTELBuilder<'a> {
+ inner: InitializePerformanceApiInfoINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for InitializePerformanceApiInfoINTELBuilder<'a> {
+ type Target = InitializePerformanceApiInfoINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for InitializePerformanceApiInfoINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> InitializePerformanceApiInfoINTELBuilder<'a> {
+ #[inline]
+ pub fn user_data(mut self, user_data: *mut c_void) -> Self {
+ self.inner.p_user_data = user_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> InitializePerformanceApiInfoINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPoolPerformanceQueryCreateInfoINTEL.html>"]
+pub struct QueryPoolPerformanceQueryCreateInfoINTEL {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub performance_counters_sampling: QueryPoolSamplingModeINTEL,
+}
+impl ::std::default::Default for QueryPoolPerformanceQueryCreateInfoINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ performance_counters_sampling: QueryPoolSamplingModeINTEL::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueryPoolPerformanceQueryCreateInfoINTEL {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL;
+}
+impl QueryPoolPerformanceQueryCreateInfoINTEL {
+ pub fn builder<'a>() -> QueryPoolPerformanceQueryCreateInfoINTELBuilder<'a> {
+ QueryPoolPerformanceQueryCreateInfoINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueryPoolPerformanceQueryCreateInfoINTELBuilder<'a> {
+ inner: QueryPoolPerformanceQueryCreateInfoINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolPerformanceQueryCreateInfoINTELBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for QueryPoolPerformanceQueryCreateInfoINTEL {}
+impl<'a> ::std::ops::Deref for QueryPoolPerformanceQueryCreateInfoINTELBuilder<'a> {
+ type Target = QueryPoolPerformanceQueryCreateInfoINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueryPoolPerformanceQueryCreateInfoINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueryPoolPerformanceQueryCreateInfoINTELBuilder<'a> {
+ #[inline]
+ pub fn performance_counters_sampling(
+ mut self,
+ performance_counters_sampling: QueryPoolSamplingModeINTEL,
+ ) -> Self {
+ self.inner.performance_counters_sampling = performance_counters_sampling;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueryPoolPerformanceQueryCreateInfoINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceMarkerInfoINTEL.html>"]
+pub struct PerformanceMarkerInfoINTEL {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub marker: u64,
+}
+impl ::std::default::Default for PerformanceMarkerInfoINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ marker: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceMarkerInfoINTEL {
+ const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_MARKER_INFO_INTEL;
+}
+impl PerformanceMarkerInfoINTEL {
+ pub fn builder<'a>() -> PerformanceMarkerInfoINTELBuilder<'a> {
+ PerformanceMarkerInfoINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceMarkerInfoINTELBuilder<'a> {
+ inner: PerformanceMarkerInfoINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceMarkerInfoINTELBuilder<'a> {
+ type Target = PerformanceMarkerInfoINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceMarkerInfoINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceMarkerInfoINTELBuilder<'a> {
+ #[inline]
+ pub fn marker(mut self, marker: u64) -> Self {
+ self.inner.marker = marker;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceMarkerInfoINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceStreamMarkerInfoINTEL.html>"]
+pub struct PerformanceStreamMarkerInfoINTEL {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub marker: u32,
+}
+impl ::std::default::Default for PerformanceStreamMarkerInfoINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ marker: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceStreamMarkerInfoINTEL {
+ const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_STREAM_MARKER_INFO_INTEL;
+}
+impl PerformanceStreamMarkerInfoINTEL {
+ pub fn builder<'a>() -> PerformanceStreamMarkerInfoINTELBuilder<'a> {
+ PerformanceStreamMarkerInfoINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceStreamMarkerInfoINTELBuilder<'a> {
+ inner: PerformanceStreamMarkerInfoINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceStreamMarkerInfoINTELBuilder<'a> {
+ type Target = PerformanceStreamMarkerInfoINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceStreamMarkerInfoINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceStreamMarkerInfoINTELBuilder<'a> {
+ #[inline]
+ pub fn marker(mut self, marker: u32) -> Self {
+ self.inner.marker = marker;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceStreamMarkerInfoINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceOverrideInfoINTEL.html>"]
+pub struct PerformanceOverrideInfoINTEL {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: PerformanceOverrideTypeINTEL,
+ pub enable: Bool32,
+ pub parameter: u64,
+}
+impl ::std::default::Default for PerformanceOverrideInfoINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: PerformanceOverrideTypeINTEL::default(),
+ enable: Bool32::default(),
+ parameter: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceOverrideInfoINTEL {
+ const STRUCTURE_TYPE: StructureType = StructureType::PERFORMANCE_OVERRIDE_INFO_INTEL;
+}
+impl PerformanceOverrideInfoINTEL {
+ pub fn builder<'a>() -> PerformanceOverrideInfoINTELBuilder<'a> {
+ PerformanceOverrideInfoINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceOverrideInfoINTELBuilder<'a> {
+ inner: PerformanceOverrideInfoINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceOverrideInfoINTELBuilder<'a> {
+ type Target = PerformanceOverrideInfoINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceOverrideInfoINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceOverrideInfoINTELBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: PerformanceOverrideTypeINTEL) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn enable(mut self, enable: bool) -> Self {
+ self.inner.enable = enable.into();
+ self
+ }
+ #[inline]
+ pub fn parameter(mut self, parameter: u64) -> Self {
+ self.inner.parameter = parameter;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceOverrideInfoINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceConfigurationAcquireInfoINTEL.html>"]
+pub struct PerformanceConfigurationAcquireInfoINTEL {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: PerformanceConfigurationTypeINTEL,
+}
+impl ::std::default::Default for PerformanceConfigurationAcquireInfoINTEL {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: PerformanceConfigurationTypeINTEL::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PerformanceConfigurationAcquireInfoINTEL {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL;
+}
+impl PerformanceConfigurationAcquireInfoINTEL {
+ pub fn builder<'a>() -> PerformanceConfigurationAcquireInfoINTELBuilder<'a> {
+ PerformanceConfigurationAcquireInfoINTELBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PerformanceConfigurationAcquireInfoINTELBuilder<'a> {
+ inner: PerformanceConfigurationAcquireInfoINTEL,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PerformanceConfigurationAcquireInfoINTELBuilder<'a> {
+ type Target = PerformanceConfigurationAcquireInfoINTEL;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PerformanceConfigurationAcquireInfoINTELBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PerformanceConfigurationAcquireInfoINTELBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: PerformanceConfigurationTypeINTEL) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PerformanceConfigurationAcquireInfoINTEL {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderClockFeaturesKHR.html>"]
+pub struct PhysicalDeviceShaderClockFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_subgroup_clock: Bool32,
+ pub shader_device_clock: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderClockFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_subgroup_clock: Bool32::default(),
+ shader_device_clock: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderClockFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR;
+}
+impl PhysicalDeviceShaderClockFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceShaderClockFeaturesKHRBuilder<'a> {
+ PhysicalDeviceShaderClockFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderClockFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceShaderClockFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderClockFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderClockFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderClockFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderClockFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderClockFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceShaderClockFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderClockFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderClockFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn shader_subgroup_clock(mut self, shader_subgroup_clock: bool) -> Self {
+ self.inner.shader_subgroup_clock = shader_subgroup_clock.into();
+ self
+ }
+ #[inline]
+ pub fn shader_device_clock(mut self, shader_device_clock: bool) -> Self {
+ self.inner.shader_device_clock = shader_device_clock.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderClockFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceIndexTypeUint8FeaturesEXT.html>"]
+pub struct PhysicalDeviceIndexTypeUint8FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub index_type_uint8: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceIndexTypeUint8FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ index_type_uint8: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceIndexTypeUint8FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT;
+}
+impl PhysicalDeviceIndexTypeUint8FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'a> {
+ PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceIndexTypeUint8FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceIndexTypeUint8FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceIndexTypeUint8FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceIndexTypeUint8FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceIndexTypeUint8FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn index_type_uint8(mut self, index_type_uint8: bool) -> Self {
+ self.inner.index_type_uint8 = index_type_uint8.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceIndexTypeUint8FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderSMBuiltinsPropertiesNV.html>"]
+pub struct PhysicalDeviceShaderSMBuiltinsPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_sm_count: u32,
+ pub shader_warps_per_sm: u32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderSMBuiltinsPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_sm_count: u32::default(),
+ shader_warps_per_sm: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderSMBuiltinsPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV;
+}
+impl PhysicalDeviceShaderSMBuiltinsPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder<'a> {
+ PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceShaderSMBuiltinsPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderSMBuiltinsPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderSMBuiltinsPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn shader_sm_count(mut self, shader_sm_count: u32) -> Self {
+ self.inner.shader_sm_count = shader_sm_count;
+ self
+ }
+ #[inline]
+ pub fn shader_warps_per_sm(mut self, shader_warps_per_sm: u32) -> Self {
+ self.inner.shader_warps_per_sm = shader_warps_per_sm;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderSMBuiltinsPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderSMBuiltinsFeaturesNV.html>"]
+pub struct PhysicalDeviceShaderSMBuiltinsFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_sm_builtins: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderSMBuiltinsFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_sm_builtins: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderSMBuiltinsFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV;
+}
+impl PhysicalDeviceShaderSMBuiltinsFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'a> {
+ PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceShaderSMBuiltinsFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderSMBuiltinsFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSMBuiltinsFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderSMBuiltinsFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn shader_sm_builtins(mut self, shader_sm_builtins: bool) -> Self {
+ self.inner.shader_sm_builtins = shader_sm_builtins.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderSMBuiltinsFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT.html>"]
+pub struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_shader_sample_interlock: Bool32,
+ pub fragment_shader_pixel_interlock: Bool32,
+ pub fragment_shader_shading_rate_interlock: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_shader_sample_interlock: Bool32::default(),
+ fragment_shader_pixel_interlock: Bool32::default(),
+ fragment_shader_shading_rate_interlock: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT;
+}
+impl PhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'a> {
+ PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceFragmentShaderInterlockFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShaderInterlockFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShaderInterlockFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShaderInterlockFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn fragment_shader_sample_interlock(
+ mut self,
+ fragment_shader_sample_interlock: bool,
+ ) -> Self {
+ self.inner.fragment_shader_sample_interlock = fragment_shader_sample_interlock.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shader_pixel_interlock(
+ mut self,
+ fragment_shader_pixel_interlock: bool,
+ ) -> Self {
+ self.inner.fragment_shader_pixel_interlock = fragment_shader_pixel_interlock.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shader_shading_rate_interlock(
+ mut self,
+ fragment_shader_shading_rate_interlock: bool,
+ ) -> Self {
+ self.inner.fragment_shader_shading_rate_interlock =
+ fragment_shader_shading_rate_interlock.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures.html>"]
+pub struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub separate_depth_stencil_layouts: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ separate_depth_stencil_layouts: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES;
+}
+impl PhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'a> {
+ PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'a> {
+ inner: PhysicalDeviceSeparateDepthStencilLayoutsFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSeparateDepthStencilLayoutsFeatures {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSeparateDepthStencilLayoutsFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSeparateDepthStencilLayoutsFeaturesBuilder<'a> {
+ #[inline]
+ pub fn separate_depth_stencil_layouts(mut self, separate_depth_stencil_layouts: bool) -> Self {
+ self.inner.separate_depth_stencil_layouts = separate_depth_stencil_layouts.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentReferenceStencilLayout.html>"]
+pub struct AttachmentReferenceStencilLayout {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub stencil_layout: ImageLayout,
+}
+impl ::std::default::Default for AttachmentReferenceStencilLayout {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ stencil_layout: ImageLayout::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AttachmentReferenceStencilLayout {
+ const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_REFERENCE_STENCIL_LAYOUT;
+}
+impl AttachmentReferenceStencilLayout {
+ pub fn builder<'a>() -> AttachmentReferenceStencilLayoutBuilder<'a> {
+ AttachmentReferenceStencilLayoutBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentReferenceStencilLayoutBuilder<'a> {
+ inner: AttachmentReferenceStencilLayout,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAttachmentReference2 for AttachmentReferenceStencilLayoutBuilder<'_> {}
+unsafe impl ExtendsAttachmentReference2 for AttachmentReferenceStencilLayout {}
+impl<'a> ::std::ops::Deref for AttachmentReferenceStencilLayoutBuilder<'a> {
+ type Target = AttachmentReferenceStencilLayout;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentReferenceStencilLayoutBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentReferenceStencilLayoutBuilder<'a> {
+ #[inline]
+ pub fn stencil_layout(mut self, stencil_layout: ImageLayout) -> Self {
+ self.inner.stencil_layout = stencil_layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentReferenceStencilLayout {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT.html>"]
+pub struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub primitive_topology_list_restart: Bool32,
+ pub primitive_topology_patch_list_restart: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ primitive_topology_list_restart: Bool32::default(),
+ primitive_topology_patch_list_restart: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT;
+}
+impl PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'a> {
+ PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePrimitiveTopologyListRestartFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn primitive_topology_list_restart(
+ mut self,
+ primitive_topology_list_restart: bool,
+ ) -> Self {
+ self.inner.primitive_topology_list_restart = primitive_topology_list_restart.into();
+ self
+ }
+ #[inline]
+ pub fn primitive_topology_patch_list_restart(
+ mut self,
+ primitive_topology_patch_list_restart: bool,
+ ) -> Self {
+ self.inner.primitive_topology_patch_list_restart =
+ primitive_topology_patch_list_restart.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentDescriptionStencilLayout.html>"]
+pub struct AttachmentDescriptionStencilLayout {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub stencil_initial_layout: ImageLayout,
+ pub stencil_final_layout: ImageLayout,
+}
+impl ::std::default::Default for AttachmentDescriptionStencilLayout {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ stencil_initial_layout: ImageLayout::default(),
+ stencil_final_layout: ImageLayout::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AttachmentDescriptionStencilLayout {
+ const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT;
+}
+impl AttachmentDescriptionStencilLayout {
+ pub fn builder<'a>() -> AttachmentDescriptionStencilLayoutBuilder<'a> {
+ AttachmentDescriptionStencilLayoutBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentDescriptionStencilLayoutBuilder<'a> {
+ inner: AttachmentDescriptionStencilLayout,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAttachmentDescription2 for AttachmentDescriptionStencilLayoutBuilder<'_> {}
+unsafe impl ExtendsAttachmentDescription2 for AttachmentDescriptionStencilLayout {}
+impl<'a> ::std::ops::Deref for AttachmentDescriptionStencilLayoutBuilder<'a> {
+ type Target = AttachmentDescriptionStencilLayout;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentDescriptionStencilLayoutBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentDescriptionStencilLayoutBuilder<'a> {
+ #[inline]
+ pub fn stencil_initial_layout(mut self, stencil_initial_layout: ImageLayout) -> Self {
+ self.inner.stencil_initial_layout = stencil_initial_layout;
+ self
+ }
+ #[inline]
+ pub fn stencil_final_layout(mut self, stencil_final_layout: ImageLayout) -> Self {
+ self.inner.stencil_final_layout = stencil_final_layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentDescriptionStencilLayout {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR.html>"]
+pub struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_executable_info: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_executable_info: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
+}
+impl PhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'a> {
+ PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'a> {
+ inner: PhysicalDevicePipelineExecutablePropertiesFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineExecutablePropertiesFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePipelineExecutablePropertiesFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn pipeline_executable_info(mut self, pipeline_executable_info: bool) -> Self {
+ self.inner.pipeline_executable_info = pipeline_executable_info.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineInfoKHR.html>"]
+pub struct PipelineInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub pipeline: Pipeline,
+}
+impl ::std::default::Default for PipelineInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ pipeline: Pipeline::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_INFO_KHR;
+}
+impl PipelineInfoKHR {
+ pub fn builder<'a>() -> PipelineInfoKHRBuilder<'a> {
+ PipelineInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineInfoKHRBuilder<'a> {
+ inner: PipelineInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineInfoKHRBuilder<'a> {
+ type Target = PipelineInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn pipeline(mut self, pipeline: Pipeline) -> Self {
+ self.inner.pipeline = pipeline;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineExecutablePropertiesKHR.html>"]
+pub struct PipelineExecutablePropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub stages: ShaderStageFlags,
+ pub name: [c_char; MAX_DESCRIPTION_SIZE],
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub subgroup_size: u32,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PipelineExecutablePropertiesKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PipelineExecutablePropertiesKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("stages", &self.stages)
+ .field("name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.name.as_ptr())
+ })
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("subgroup_size", &self.subgroup_size)
+ .finish()
+ }
+}
+impl ::std::default::Default for PipelineExecutablePropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ stages: ShaderStageFlags::default(),
+ name: unsafe { ::std::mem::zeroed() },
+ description: unsafe { ::std::mem::zeroed() },
+ subgroup_size: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineExecutablePropertiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_PROPERTIES_KHR;
+}
+impl PipelineExecutablePropertiesKHR {
+ pub fn builder<'a>() -> PipelineExecutablePropertiesKHRBuilder<'a> {
+ PipelineExecutablePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineExecutablePropertiesKHRBuilder<'a> {
+ inner: PipelineExecutablePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineExecutablePropertiesKHRBuilder<'a> {
+ type Target = PipelineExecutablePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineExecutablePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineExecutablePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn stages(mut self, stages: ShaderStageFlags) -> Self {
+ self.inner.stages = stages;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn subgroup_size(mut self, subgroup_size: u32) -> Self {
+ self.inner.subgroup_size = subgroup_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineExecutablePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineExecutableInfoKHR.html>"]
+pub struct PipelineExecutableInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub pipeline: Pipeline,
+ pub executable_index: u32,
+}
+impl ::std::default::Default for PipelineExecutableInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ pipeline: Pipeline::default(),
+ executable_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineExecutableInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_INFO_KHR;
+}
+impl PipelineExecutableInfoKHR {
+ pub fn builder<'a>() -> PipelineExecutableInfoKHRBuilder<'a> {
+ PipelineExecutableInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineExecutableInfoKHRBuilder<'a> {
+ inner: PipelineExecutableInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineExecutableInfoKHRBuilder<'a> {
+ type Target = PipelineExecutableInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineExecutableInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineExecutableInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn pipeline(mut self, pipeline: Pipeline) -> Self {
+ self.inner.pipeline = pipeline;
+ self
+ }
+ #[inline]
+ pub fn executable_index(mut self, executable_index: u32) -> Self {
+ self.inner.executable_index = executable_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineExecutableInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineExecutableStatisticValueKHR.html>"]
+pub union PipelineExecutableStatisticValueKHR {
+ pub b32: Bool32,
+ pub i64: i64,
+ pub u64: u64,
+ pub f64: f64,
+}
+impl ::std::default::Default for PipelineExecutableStatisticValueKHR {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineExecutableStatisticKHR.html>"]
+pub struct PipelineExecutableStatisticKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub name: [c_char; MAX_DESCRIPTION_SIZE],
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub format: PipelineExecutableStatisticFormatKHR,
+ pub value: PipelineExecutableStatisticValueKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PipelineExecutableStatisticKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PipelineExecutableStatisticKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.name.as_ptr())
+ })
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("format", &self.format)
+ .field("value", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for PipelineExecutableStatisticKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ name: unsafe { ::std::mem::zeroed() },
+ description: unsafe { ::std::mem::zeroed() },
+ format: PipelineExecutableStatisticFormatKHR::default(),
+ value: PipelineExecutableStatisticValueKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineExecutableStatisticKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_EXECUTABLE_STATISTIC_KHR;
+}
+impl PipelineExecutableStatisticKHR {
+ pub fn builder<'a>() -> PipelineExecutableStatisticKHRBuilder<'a> {
+ PipelineExecutableStatisticKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineExecutableStatisticKHRBuilder<'a> {
+ inner: PipelineExecutableStatisticKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineExecutableStatisticKHRBuilder<'a> {
+ type Target = PipelineExecutableStatisticKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineExecutableStatisticKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineExecutableStatisticKHRBuilder<'a> {
+ #[inline]
+ pub fn name(mut self, name: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: PipelineExecutableStatisticFormatKHR) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn value(mut self, value: PipelineExecutableStatisticValueKHR) -> Self {
+ self.inner.value = value;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineExecutableStatisticKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineExecutableInternalRepresentationKHR.html>"]
+pub struct PipelineExecutableInternalRepresentationKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub name: [c_char; MAX_DESCRIPTION_SIZE],
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub is_text: Bool32,
+ pub data_size: usize,
+ pub p_data: *mut c_void,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PipelineExecutableInternalRepresentationKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PipelineExecutableInternalRepresentationKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.name.as_ptr())
+ })
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("is_text", &self.is_text)
+ .field("data_size", &self.data_size)
+ .field("p_data", &self.p_data)
+ .finish()
+ }
+}
+impl ::std::default::Default for PipelineExecutableInternalRepresentationKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ name: unsafe { ::std::mem::zeroed() },
+ description: unsafe { ::std::mem::zeroed() },
+ is_text: Bool32::default(),
+ data_size: usize::default(),
+ p_data: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineExecutableInternalRepresentationKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR;
+}
+impl PipelineExecutableInternalRepresentationKHR {
+ pub fn builder<'a>() -> PipelineExecutableInternalRepresentationKHRBuilder<'a> {
+ PipelineExecutableInternalRepresentationKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineExecutableInternalRepresentationKHRBuilder<'a> {
+ inner: PipelineExecutableInternalRepresentationKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelineExecutableInternalRepresentationKHRBuilder<'a> {
+ type Target = PipelineExecutableInternalRepresentationKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineExecutableInternalRepresentationKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineExecutableInternalRepresentationKHRBuilder<'a> {
+ #[inline]
+ pub fn name(mut self, name: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn is_text(mut self, is_text: bool) -> Self {
+ self.inner.is_text = is_text.into();
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: &'a mut [u8]) -> Self {
+ self.inner.data_size = data.len();
+ self.inner.p_data = data.as_mut_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineExecutableInternalRepresentationKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures.html>"]
+pub struct PhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_demote_to_helper_invocation: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_demote_to_helper_invocation: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES;
+}
+impl PhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'a> {
+ PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderDemoteToHelperInvocationFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderDemoteToHelperInvocationFeatures
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderDemoteToHelperInvocationFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderDemoteToHelperInvocationFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_demote_to_helper_invocation(
+ mut self,
+ shader_demote_to_helper_invocation: bool,
+ ) -> Self {
+ self.inner.shader_demote_to_helper_invocation = shader_demote_to_helper_invocation.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT.html>"]
+pub struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub texel_buffer_alignment: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ texel_buffer_alignment: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT;
+}
+impl PhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'a> {
+ PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceTexelBufferAlignmentFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTexelBufferAlignmentFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTexelBufferAlignmentFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTexelBufferAlignmentFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn texel_buffer_alignment(mut self, texel_buffer_alignment: bool) -> Self {
+ self.inner.texel_buffer_alignment = texel_buffer_alignment.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTexelBufferAlignmentProperties.html>"]
+pub struct PhysicalDeviceTexelBufferAlignmentProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub storage_texel_buffer_offset_alignment_bytes: DeviceSize,
+ pub storage_texel_buffer_offset_single_texel_alignment: Bool32,
+ pub uniform_texel_buffer_offset_alignment_bytes: DeviceSize,
+ pub uniform_texel_buffer_offset_single_texel_alignment: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTexelBufferAlignmentProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ storage_texel_buffer_offset_alignment_bytes: DeviceSize::default(),
+ storage_texel_buffer_offset_single_texel_alignment: Bool32::default(),
+ uniform_texel_buffer_offset_alignment_bytes: DeviceSize::default(),
+ uniform_texel_buffer_offset_single_texel_alignment: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTexelBufferAlignmentProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES;
+}
+impl PhysicalDeviceTexelBufferAlignmentProperties {
+ pub fn builder<'a>() -> PhysicalDeviceTexelBufferAlignmentPropertiesBuilder<'a> {
+ PhysicalDeviceTexelBufferAlignmentPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTexelBufferAlignmentPropertiesBuilder<'a> {
+ inner: PhysicalDeviceTexelBufferAlignmentProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceTexelBufferAlignmentPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceTexelBufferAlignmentProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTexelBufferAlignmentPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceTexelBufferAlignmentProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTexelBufferAlignmentPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTexelBufferAlignmentPropertiesBuilder<'a> {
+ #[inline]
+ pub fn storage_texel_buffer_offset_alignment_bytes(
+ mut self,
+ storage_texel_buffer_offset_alignment_bytes: DeviceSize,
+ ) -> Self {
+ self.inner.storage_texel_buffer_offset_alignment_bytes =
+ storage_texel_buffer_offset_alignment_bytes;
+ self
+ }
+ #[inline]
+ pub fn storage_texel_buffer_offset_single_texel_alignment(
+ mut self,
+ storage_texel_buffer_offset_single_texel_alignment: bool,
+ ) -> Self {
+ self.inner
+ .storage_texel_buffer_offset_single_texel_alignment =
+ storage_texel_buffer_offset_single_texel_alignment.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_texel_buffer_offset_alignment_bytes(
+ mut self,
+ uniform_texel_buffer_offset_alignment_bytes: DeviceSize,
+ ) -> Self {
+ self.inner.uniform_texel_buffer_offset_alignment_bytes =
+ uniform_texel_buffer_offset_alignment_bytes;
+ self
+ }
+ #[inline]
+ pub fn uniform_texel_buffer_offset_single_texel_alignment(
+ mut self,
+ uniform_texel_buffer_offset_single_texel_alignment: bool,
+ ) -> Self {
+ self.inner
+ .uniform_texel_buffer_offset_single_texel_alignment =
+ uniform_texel_buffer_offset_single_texel_alignment.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTexelBufferAlignmentProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSubgroupSizeControlFeatures.html>"]
+pub struct PhysicalDeviceSubgroupSizeControlFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub subgroup_size_control: Bool32,
+ pub compute_full_subgroups: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSubgroupSizeControlFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ subgroup_size_control: Bool32::default(),
+ compute_full_subgroups: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSubgroupSizeControlFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES;
+}
+impl PhysicalDeviceSubgroupSizeControlFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'a> {
+ PhysicalDeviceSubgroupSizeControlFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'a> {
+ inner: PhysicalDeviceSubgroupSizeControlFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSubgroupSizeControlFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubgroupSizeControlFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceSubgroupSizeControlFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSubgroupSizeControlFeaturesBuilder<'a> {
+ #[inline]
+ pub fn subgroup_size_control(mut self, subgroup_size_control: bool) -> Self {
+ self.inner.subgroup_size_control = subgroup_size_control.into();
+ self
+ }
+ #[inline]
+ pub fn compute_full_subgroups(mut self, compute_full_subgroups: bool) -> Self {
+ self.inner.compute_full_subgroups = compute_full_subgroups.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSubgroupSizeControlFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSubgroupSizeControlProperties.html>"]
+pub struct PhysicalDeviceSubgroupSizeControlProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_subgroup_size: u32,
+ pub max_subgroup_size: u32,
+ pub max_compute_workgroup_subgroups: u32,
+ pub required_subgroup_size_stages: ShaderStageFlags,
+}
+impl ::std::default::Default for PhysicalDeviceSubgroupSizeControlProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_subgroup_size: u32::default(),
+ max_subgroup_size: u32::default(),
+ max_compute_workgroup_subgroups: u32::default(),
+ required_subgroup_size_stages: ShaderStageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSubgroupSizeControlProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES;
+}
+impl PhysicalDeviceSubgroupSizeControlProperties {
+ pub fn builder<'a>() -> PhysicalDeviceSubgroupSizeControlPropertiesBuilder<'a> {
+ PhysicalDeviceSubgroupSizeControlPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSubgroupSizeControlPropertiesBuilder<'a> {
+ inner: PhysicalDeviceSubgroupSizeControlProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceSubgroupSizeControlPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubgroupSizeControlProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSubgroupSizeControlPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceSubgroupSizeControlProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubgroupSizeControlPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSubgroupSizeControlPropertiesBuilder<'a> {
+ #[inline]
+ pub fn min_subgroup_size(mut self, min_subgroup_size: u32) -> Self {
+ self.inner.min_subgroup_size = min_subgroup_size;
+ self
+ }
+ #[inline]
+ pub fn max_subgroup_size(mut self, max_subgroup_size: u32) -> Self {
+ self.inner.max_subgroup_size = max_subgroup_size;
+ self
+ }
+ #[inline]
+ pub fn max_compute_workgroup_subgroups(mut self, max_compute_workgroup_subgroups: u32) -> Self {
+ self.inner.max_compute_workgroup_subgroups = max_compute_workgroup_subgroups;
+ self
+ }
+ #[inline]
+ pub fn required_subgroup_size_stages(
+ mut self,
+ required_subgroup_size_stages: ShaderStageFlags,
+ ) -> Self {
+ self.inner.required_subgroup_size_stages = required_subgroup_size_stages;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSubgroupSizeControlProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineShaderStageRequiredSubgroupSizeCreateInfo.html>"]
+pub struct PipelineShaderStageRequiredSubgroupSizeCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub required_subgroup_size: u32,
+}
+impl ::std::default::Default for PipelineShaderStageRequiredSubgroupSizeCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ required_subgroup_size: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineShaderStageRequiredSubgroupSizeCreateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO;
+}
+impl PipelineShaderStageRequiredSubgroupSizeCreateInfo {
+ pub fn builder<'a>() -> PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder<'a> {
+ PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder<'a> {
+ inner: PipelineShaderStageRequiredSubgroupSizeCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo
+ for PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo
+ for PipelineShaderStageRequiredSubgroupSizeCreateInfo
+{
+}
+impl<'a> ::std::ops::Deref for PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder<'a> {
+ type Target = PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineShaderStageRequiredSubgroupSizeCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn required_subgroup_size(mut self, required_subgroup_size: u32) -> Self {
+ self.inner.required_subgroup_size = required_subgroup_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineShaderStageRequiredSubgroupSizeCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassShadingPipelineCreateInfoHUAWEI.html>"]
+pub struct SubpassShadingPipelineCreateInfoHUAWEI {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub render_pass: RenderPass,
+ pub subpass: u32,
+}
+impl ::std::default::Default for SubpassShadingPipelineCreateInfoHUAWEI {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ render_pass: RenderPass::default(),
+ subpass: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassShadingPipelineCreateInfoHUAWEI {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI;
+}
+impl SubpassShadingPipelineCreateInfoHUAWEI {
+ pub fn builder<'a>() -> SubpassShadingPipelineCreateInfoHUAWEIBuilder<'a> {
+ SubpassShadingPipelineCreateInfoHUAWEIBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassShadingPipelineCreateInfoHUAWEIBuilder<'a> {
+ inner: SubpassShadingPipelineCreateInfoHUAWEI,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsComputePipelineCreateInfo for SubpassShadingPipelineCreateInfoHUAWEIBuilder<'_> {}
+unsafe impl ExtendsComputePipelineCreateInfo for SubpassShadingPipelineCreateInfoHUAWEI {}
+impl<'a> ::std::ops::Deref for SubpassShadingPipelineCreateInfoHUAWEIBuilder<'a> {
+ type Target = SubpassShadingPipelineCreateInfoHUAWEI;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassShadingPipelineCreateInfoHUAWEIBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassShadingPipelineCreateInfoHUAWEIBuilder<'a> {
+ #[inline]
+ pub fn render_pass(mut self, render_pass: RenderPass) -> Self {
+ self.inner.render_pass = render_pass;
+ self
+ }
+ #[inline]
+ pub fn subpass(mut self, subpass: u32) -> Self {
+ self.inner.subpass = subpass;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassShadingPipelineCreateInfoHUAWEI {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSubpassShadingPropertiesHUAWEI.html>"]
+pub struct PhysicalDeviceSubpassShadingPropertiesHUAWEI {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_subpass_shading_workgroup_size_aspect_ratio: u32,
+}
+impl ::std::default::Default for PhysicalDeviceSubpassShadingPropertiesHUAWEI {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_subpass_shading_workgroup_size_aspect_ratio: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSubpassShadingPropertiesHUAWEI {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI;
+}
+impl PhysicalDeviceSubpassShadingPropertiesHUAWEI {
+ pub fn builder<'a>() -> PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder<'a> {
+ PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder<'a> {
+ inner: PhysicalDeviceSubpassShadingPropertiesHUAWEI,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceSubpassShadingPropertiesHUAWEI {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder<'a> {
+ type Target = PhysicalDeviceSubpassShadingPropertiesHUAWEI;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSubpassShadingPropertiesHUAWEIBuilder<'a> {
+ #[inline]
+ pub fn max_subpass_shading_workgroup_size_aspect_ratio(
+ mut self,
+ max_subpass_shading_workgroup_size_aspect_ratio: u32,
+ ) -> Self {
+ self.inner.max_subpass_shading_workgroup_size_aspect_ratio =
+ max_subpass_shading_workgroup_size_aspect_ratio;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSubpassShadingPropertiesHUAWEI {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryOpaqueCaptureAddressAllocateInfo.html>"]
+pub struct MemoryOpaqueCaptureAddressAllocateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub opaque_capture_address: u64,
+}
+impl ::std::default::Default for MemoryOpaqueCaptureAddressAllocateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ opaque_capture_address: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryOpaqueCaptureAddressAllocateInfo {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO;
+}
+impl MemoryOpaqueCaptureAddressAllocateInfo {
+ pub fn builder<'a>() -> MemoryOpaqueCaptureAddressAllocateInfoBuilder<'a> {
+ MemoryOpaqueCaptureAddressAllocateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryOpaqueCaptureAddressAllocateInfoBuilder<'a> {
+ inner: MemoryOpaqueCaptureAddressAllocateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryOpaqueCaptureAddressAllocateInfoBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for MemoryOpaqueCaptureAddressAllocateInfo {}
+impl<'a> ::std::ops::Deref for MemoryOpaqueCaptureAddressAllocateInfoBuilder<'a> {
+ type Target = MemoryOpaqueCaptureAddressAllocateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryOpaqueCaptureAddressAllocateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryOpaqueCaptureAddressAllocateInfoBuilder<'a> {
+ #[inline]
+ pub fn opaque_capture_address(mut self, opaque_capture_address: u64) -> Self {
+ self.inner.opaque_capture_address = opaque_capture_address;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryOpaqueCaptureAddressAllocateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceMemoryOpaqueCaptureAddressInfo.html>"]
+pub struct DeviceMemoryOpaqueCaptureAddressInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+}
+impl ::std::default::Default for DeviceMemoryOpaqueCaptureAddressInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceMemoryOpaqueCaptureAddressInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO;
+}
+impl DeviceMemoryOpaqueCaptureAddressInfo {
+ pub fn builder<'a>() -> DeviceMemoryOpaqueCaptureAddressInfoBuilder<'a> {
+ DeviceMemoryOpaqueCaptureAddressInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceMemoryOpaqueCaptureAddressInfoBuilder<'a> {
+ inner: DeviceMemoryOpaqueCaptureAddressInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceMemoryOpaqueCaptureAddressInfoBuilder<'a> {
+ type Target = DeviceMemoryOpaqueCaptureAddressInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceMemoryOpaqueCaptureAddressInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceMemoryOpaqueCaptureAddressInfoBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceMemoryOpaqueCaptureAddressInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceLineRasterizationFeaturesEXT.html>"]
+pub struct PhysicalDeviceLineRasterizationFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub rectangular_lines: Bool32,
+ pub bresenham_lines: Bool32,
+ pub smooth_lines: Bool32,
+ pub stippled_rectangular_lines: Bool32,
+ pub stippled_bresenham_lines: Bool32,
+ pub stippled_smooth_lines: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceLineRasterizationFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ rectangular_lines: Bool32::default(),
+ bresenham_lines: Bool32::default(),
+ smooth_lines: Bool32::default(),
+ stippled_rectangular_lines: Bool32::default(),
+ stippled_bresenham_lines: Bool32::default(),
+ stippled_smooth_lines: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceLineRasterizationFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT;
+}
+impl PhysicalDeviceLineRasterizationFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'a> {
+ PhysicalDeviceLineRasterizationFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceLineRasterizationFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLineRasterizationFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLineRasterizationFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceLineRasterizationFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceLineRasterizationFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn rectangular_lines(mut self, rectangular_lines: bool) -> Self {
+ self.inner.rectangular_lines = rectangular_lines.into();
+ self
+ }
+ #[inline]
+ pub fn bresenham_lines(mut self, bresenham_lines: bool) -> Self {
+ self.inner.bresenham_lines = bresenham_lines.into();
+ self
+ }
+ #[inline]
+ pub fn smooth_lines(mut self, smooth_lines: bool) -> Self {
+ self.inner.smooth_lines = smooth_lines.into();
+ self
+ }
+ #[inline]
+ pub fn stippled_rectangular_lines(mut self, stippled_rectangular_lines: bool) -> Self {
+ self.inner.stippled_rectangular_lines = stippled_rectangular_lines.into();
+ self
+ }
+ #[inline]
+ pub fn stippled_bresenham_lines(mut self, stippled_bresenham_lines: bool) -> Self {
+ self.inner.stippled_bresenham_lines = stippled_bresenham_lines.into();
+ self
+ }
+ #[inline]
+ pub fn stippled_smooth_lines(mut self, stippled_smooth_lines: bool) -> Self {
+ self.inner.stippled_smooth_lines = stippled_smooth_lines.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceLineRasterizationFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceLineRasterizationPropertiesEXT.html>"]
+pub struct PhysicalDeviceLineRasterizationPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub line_sub_pixel_precision_bits: u32,
+}
+impl ::std::default::Default for PhysicalDeviceLineRasterizationPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ line_sub_pixel_precision_bits: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceLineRasterizationPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT;
+}
+impl PhysicalDeviceLineRasterizationPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceLineRasterizationPropertiesEXTBuilder<'a> {
+ PhysicalDeviceLineRasterizationPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceLineRasterizationPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceLineRasterizationPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceLineRasterizationPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceLineRasterizationPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceLineRasterizationPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceLineRasterizationPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceLineRasterizationPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceLineRasterizationPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn line_sub_pixel_precision_bits(mut self, line_sub_pixel_precision_bits: u32) -> Self {
+ self.inner.line_sub_pixel_precision_bits = line_sub_pixel_precision_bits;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceLineRasterizationPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationLineStateCreateInfoEXT.html>"]
+pub struct PipelineRasterizationLineStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub line_rasterization_mode: LineRasterizationModeEXT,
+ pub stippled_line_enable: Bool32,
+ pub line_stipple_factor: u32,
+ pub line_stipple_pattern: u16,
+}
+impl ::std::default::Default for PipelineRasterizationLineStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ line_rasterization_mode: LineRasterizationModeEXT::default(),
+ stippled_line_enable: Bool32::default(),
+ line_stipple_factor: u32::default(),
+ line_stipple_pattern: u16::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationLineStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT;
+}
+impl PipelineRasterizationLineStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineRasterizationLineStateCreateInfoEXTBuilder<'a> {
+ PipelineRasterizationLineStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationLineStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineRasterizationLineStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationLineStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationLineStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRasterizationLineStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineRasterizationLineStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRasterizationLineStateCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationLineStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn line_rasterization_mode(
+ mut self,
+ line_rasterization_mode: LineRasterizationModeEXT,
+ ) -> Self {
+ self.inner.line_rasterization_mode = line_rasterization_mode;
+ self
+ }
+ #[inline]
+ pub fn stippled_line_enable(mut self, stippled_line_enable: bool) -> Self {
+ self.inner.stippled_line_enable = stippled_line_enable.into();
+ self
+ }
+ #[inline]
+ pub fn line_stipple_factor(mut self, line_stipple_factor: u32) -> Self {
+ self.inner.line_stipple_factor = line_stipple_factor;
+ self
+ }
+ #[inline]
+ pub fn line_stipple_pattern(mut self, line_stipple_pattern: u16) -> Self {
+ self.inner.line_stipple_pattern = line_stipple_pattern;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationLineStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePipelineCreationCacheControlFeatures.html>"]
+pub struct PhysicalDevicePipelineCreationCacheControlFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_creation_cache_control: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePipelineCreationCacheControlFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_creation_cache_control: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePipelineCreationCacheControlFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES;
+}
+impl PhysicalDevicePipelineCreationCacheControlFeatures {
+ pub fn builder<'a>() -> PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'a> {
+ PhysicalDevicePipelineCreationCacheControlFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'a> {
+ inner: PhysicalDevicePipelineCreationCacheControlFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineCreationCacheControlFeatures {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineCreationCacheControlFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'a> {
+ type Target = PhysicalDevicePipelineCreationCacheControlFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePipelineCreationCacheControlFeaturesBuilder<'a> {
+ #[inline]
+ pub fn pipeline_creation_cache_control(
+ mut self,
+ pipeline_creation_cache_control: bool,
+ ) -> Self {
+ self.inner.pipeline_creation_cache_control = pipeline_creation_cache_control.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePipelineCreationCacheControlFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkan11Features.html>"]
+pub struct PhysicalDeviceVulkan11Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub storage_buffer16_bit_access: Bool32,
+ pub uniform_and_storage_buffer16_bit_access: Bool32,
+ pub storage_push_constant16: Bool32,
+ pub storage_input_output16: Bool32,
+ pub multiview: Bool32,
+ pub multiview_geometry_shader: Bool32,
+ pub multiview_tessellation_shader: Bool32,
+ pub variable_pointers_storage_buffer: Bool32,
+ pub variable_pointers: Bool32,
+ pub protected_memory: Bool32,
+ pub sampler_ycbcr_conversion: Bool32,
+ pub shader_draw_parameters: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVulkan11Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ storage_buffer16_bit_access: Bool32::default(),
+ uniform_and_storage_buffer16_bit_access: Bool32::default(),
+ storage_push_constant16: Bool32::default(),
+ storage_input_output16: Bool32::default(),
+ multiview: Bool32::default(),
+ multiview_geometry_shader: Bool32::default(),
+ multiview_tessellation_shader: Bool32::default(),
+ variable_pointers_storage_buffer: Bool32::default(),
+ variable_pointers: Bool32::default(),
+ protected_memory: Bool32::default(),
+ sampler_ycbcr_conversion: Bool32::default(),
+ shader_draw_parameters: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkan11Features {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
+}
+impl PhysicalDeviceVulkan11Features {
+ pub fn builder<'a>() -> PhysicalDeviceVulkan11FeaturesBuilder<'a> {
+ PhysicalDeviceVulkan11FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkan11FeaturesBuilder<'a> {
+ inner: PhysicalDeviceVulkan11Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan11FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan11Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan11FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan11Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkan11FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceVulkan11Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkan11FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkan11FeaturesBuilder<'a> {
+ #[inline]
+ pub fn storage_buffer16_bit_access(mut self, storage_buffer16_bit_access: bool) -> Self {
+ self.inner.storage_buffer16_bit_access = storage_buffer16_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_and_storage_buffer16_bit_access(
+ mut self,
+ uniform_and_storage_buffer16_bit_access: bool,
+ ) -> Self {
+ self.inner.uniform_and_storage_buffer16_bit_access =
+ uniform_and_storage_buffer16_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn storage_push_constant16(mut self, storage_push_constant16: bool) -> Self {
+ self.inner.storage_push_constant16 = storage_push_constant16.into();
+ self
+ }
+ #[inline]
+ pub fn storage_input_output16(mut self, storage_input_output16: bool) -> Self {
+ self.inner.storage_input_output16 = storage_input_output16.into();
+ self
+ }
+ #[inline]
+ pub fn multiview(mut self, multiview: bool) -> Self {
+ self.inner.multiview = multiview.into();
+ self
+ }
+ #[inline]
+ pub fn multiview_geometry_shader(mut self, multiview_geometry_shader: bool) -> Self {
+ self.inner.multiview_geometry_shader = multiview_geometry_shader.into();
+ self
+ }
+ #[inline]
+ pub fn multiview_tessellation_shader(mut self, multiview_tessellation_shader: bool) -> Self {
+ self.inner.multiview_tessellation_shader = multiview_tessellation_shader.into();
+ self
+ }
+ #[inline]
+ pub fn variable_pointers_storage_buffer(
+ mut self,
+ variable_pointers_storage_buffer: bool,
+ ) -> Self {
+ self.inner.variable_pointers_storage_buffer = variable_pointers_storage_buffer.into();
+ self
+ }
+ #[inline]
+ pub fn variable_pointers(mut self, variable_pointers: bool) -> Self {
+ self.inner.variable_pointers = variable_pointers.into();
+ self
+ }
+ #[inline]
+ pub fn protected_memory(mut self, protected_memory: bool) -> Self {
+ self.inner.protected_memory = protected_memory.into();
+ self
+ }
+ #[inline]
+ pub fn sampler_ycbcr_conversion(mut self, sampler_ycbcr_conversion: bool) -> Self {
+ self.inner.sampler_ycbcr_conversion = sampler_ycbcr_conversion.into();
+ self
+ }
+ #[inline]
+ pub fn shader_draw_parameters(mut self, shader_draw_parameters: bool) -> Self {
+ self.inner.shader_draw_parameters = shader_draw_parameters.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkan11Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkan11Properties.html>"]
+pub struct PhysicalDeviceVulkan11Properties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_uuid: [u8; UUID_SIZE],
+ pub driver_uuid: [u8; UUID_SIZE],
+ pub device_luid: [u8; LUID_SIZE],
+ pub device_node_mask: u32,
+ pub device_luid_valid: Bool32,
+ pub subgroup_size: u32,
+ pub subgroup_supported_stages: ShaderStageFlags,
+ pub subgroup_supported_operations: SubgroupFeatureFlags,
+ pub subgroup_quad_operations_in_all_stages: Bool32,
+ pub point_clipping_behavior: PointClippingBehavior,
+ pub max_multiview_view_count: u32,
+ pub max_multiview_instance_index: u32,
+ pub protected_no_fault: Bool32,
+ pub max_per_set_descriptors: u32,
+ pub max_memory_allocation_size: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceVulkan11Properties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_uuid: unsafe { ::std::mem::zeroed() },
+ driver_uuid: unsafe { ::std::mem::zeroed() },
+ device_luid: unsafe { ::std::mem::zeroed() },
+ device_node_mask: u32::default(),
+ device_luid_valid: Bool32::default(),
+ subgroup_size: u32::default(),
+ subgroup_supported_stages: ShaderStageFlags::default(),
+ subgroup_supported_operations: SubgroupFeatureFlags::default(),
+ subgroup_quad_operations_in_all_stages: Bool32::default(),
+ point_clipping_behavior: PointClippingBehavior::default(),
+ max_multiview_view_count: u32::default(),
+ max_multiview_instance_index: u32::default(),
+ protected_no_fault: Bool32::default(),
+ max_per_set_descriptors: u32::default(),
+ max_memory_allocation_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkan11Properties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES;
+}
+impl PhysicalDeviceVulkan11Properties {
+ pub fn builder<'a>() -> PhysicalDeviceVulkan11PropertiesBuilder<'a> {
+ PhysicalDeviceVulkan11PropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkan11PropertiesBuilder<'a> {
+ inner: PhysicalDeviceVulkan11Properties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan11PropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan11Properties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkan11PropertiesBuilder<'a> {
+ type Target = PhysicalDeviceVulkan11Properties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkan11PropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkan11PropertiesBuilder<'a> {
+ #[inline]
+ pub fn device_uuid(mut self, device_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.device_uuid = device_uuid;
+ self
+ }
+ #[inline]
+ pub fn driver_uuid(mut self, driver_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.driver_uuid = driver_uuid;
+ self
+ }
+ #[inline]
+ pub fn device_luid(mut self, device_luid: [u8; LUID_SIZE]) -> Self {
+ self.inner.device_luid = device_luid;
+ self
+ }
+ #[inline]
+ pub fn device_node_mask(mut self, device_node_mask: u32) -> Self {
+ self.inner.device_node_mask = device_node_mask;
+ self
+ }
+ #[inline]
+ pub fn device_luid_valid(mut self, device_luid_valid: bool) -> Self {
+ self.inner.device_luid_valid = device_luid_valid.into();
+ self
+ }
+ #[inline]
+ pub fn subgroup_size(mut self, subgroup_size: u32) -> Self {
+ self.inner.subgroup_size = subgroup_size;
+ self
+ }
+ #[inline]
+ pub fn subgroup_supported_stages(
+ mut self,
+ subgroup_supported_stages: ShaderStageFlags,
+ ) -> Self {
+ self.inner.subgroup_supported_stages = subgroup_supported_stages;
+ self
+ }
+ #[inline]
+ pub fn subgroup_supported_operations(
+ mut self,
+ subgroup_supported_operations: SubgroupFeatureFlags,
+ ) -> Self {
+ self.inner.subgroup_supported_operations = subgroup_supported_operations;
+ self
+ }
+ #[inline]
+ pub fn subgroup_quad_operations_in_all_stages(
+ mut self,
+ subgroup_quad_operations_in_all_stages: bool,
+ ) -> Self {
+ self.inner.subgroup_quad_operations_in_all_stages =
+ subgroup_quad_operations_in_all_stages.into();
+ self
+ }
+ #[inline]
+ pub fn point_clipping_behavior(
+ mut self,
+ point_clipping_behavior: PointClippingBehavior,
+ ) -> Self {
+ self.inner.point_clipping_behavior = point_clipping_behavior;
+ self
+ }
+ #[inline]
+ pub fn max_multiview_view_count(mut self, max_multiview_view_count: u32) -> Self {
+ self.inner.max_multiview_view_count = max_multiview_view_count;
+ self
+ }
+ #[inline]
+ pub fn max_multiview_instance_index(mut self, max_multiview_instance_index: u32) -> Self {
+ self.inner.max_multiview_instance_index = max_multiview_instance_index;
+ self
+ }
+ #[inline]
+ pub fn protected_no_fault(mut self, protected_no_fault: bool) -> Self {
+ self.inner.protected_no_fault = protected_no_fault.into();
+ self
+ }
+ #[inline]
+ pub fn max_per_set_descriptors(mut self, max_per_set_descriptors: u32) -> Self {
+ self.inner.max_per_set_descriptors = max_per_set_descriptors;
+ self
+ }
+ #[inline]
+ pub fn max_memory_allocation_size(mut self, max_memory_allocation_size: DeviceSize) -> Self {
+ self.inner.max_memory_allocation_size = max_memory_allocation_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkan11Properties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkan12Features.html>"]
+pub struct PhysicalDeviceVulkan12Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub sampler_mirror_clamp_to_edge: Bool32,
+ pub draw_indirect_count: Bool32,
+ pub storage_buffer8_bit_access: Bool32,
+ pub uniform_and_storage_buffer8_bit_access: Bool32,
+ pub storage_push_constant8: Bool32,
+ pub shader_buffer_int64_atomics: Bool32,
+ pub shader_shared_int64_atomics: Bool32,
+ pub shader_float16: Bool32,
+ pub shader_int8: Bool32,
+ pub descriptor_indexing: Bool32,
+ pub shader_input_attachment_array_dynamic_indexing: Bool32,
+ pub shader_uniform_texel_buffer_array_dynamic_indexing: Bool32,
+ pub shader_storage_texel_buffer_array_dynamic_indexing: Bool32,
+ pub shader_uniform_buffer_array_non_uniform_indexing: Bool32,
+ pub shader_sampled_image_array_non_uniform_indexing: Bool32,
+ pub shader_storage_buffer_array_non_uniform_indexing: Bool32,
+ pub shader_storage_image_array_non_uniform_indexing: Bool32,
+ pub shader_input_attachment_array_non_uniform_indexing: Bool32,
+ pub shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32,
+ pub shader_storage_texel_buffer_array_non_uniform_indexing: Bool32,
+ pub descriptor_binding_uniform_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_sampled_image_update_after_bind: Bool32,
+ pub descriptor_binding_storage_image_update_after_bind: Bool32,
+ pub descriptor_binding_storage_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_storage_texel_buffer_update_after_bind: Bool32,
+ pub descriptor_binding_update_unused_while_pending: Bool32,
+ pub descriptor_binding_partially_bound: Bool32,
+ pub descriptor_binding_variable_descriptor_count: Bool32,
+ pub runtime_descriptor_array: Bool32,
+ pub sampler_filter_minmax: Bool32,
+ pub scalar_block_layout: Bool32,
+ pub imageless_framebuffer: Bool32,
+ pub uniform_buffer_standard_layout: Bool32,
+ pub shader_subgroup_extended_types: Bool32,
+ pub separate_depth_stencil_layouts: Bool32,
+ pub host_query_reset: Bool32,
+ pub timeline_semaphore: Bool32,
+ pub buffer_device_address: Bool32,
+ pub buffer_device_address_capture_replay: Bool32,
+ pub buffer_device_address_multi_device: Bool32,
+ pub vulkan_memory_model: Bool32,
+ pub vulkan_memory_model_device_scope: Bool32,
+ pub vulkan_memory_model_availability_visibility_chains: Bool32,
+ pub shader_output_viewport_index: Bool32,
+ pub shader_output_layer: Bool32,
+ pub subgroup_broadcast_dynamic_id: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVulkan12Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ sampler_mirror_clamp_to_edge: Bool32::default(),
+ draw_indirect_count: Bool32::default(),
+ storage_buffer8_bit_access: Bool32::default(),
+ uniform_and_storage_buffer8_bit_access: Bool32::default(),
+ storage_push_constant8: Bool32::default(),
+ shader_buffer_int64_atomics: Bool32::default(),
+ shader_shared_int64_atomics: Bool32::default(),
+ shader_float16: Bool32::default(),
+ shader_int8: Bool32::default(),
+ descriptor_indexing: Bool32::default(),
+ shader_input_attachment_array_dynamic_indexing: Bool32::default(),
+ shader_uniform_texel_buffer_array_dynamic_indexing: Bool32::default(),
+ shader_storage_texel_buffer_array_dynamic_indexing: Bool32::default(),
+ shader_uniform_buffer_array_non_uniform_indexing: Bool32::default(),
+ shader_sampled_image_array_non_uniform_indexing: Bool32::default(),
+ shader_storage_buffer_array_non_uniform_indexing: Bool32::default(),
+ shader_storage_image_array_non_uniform_indexing: Bool32::default(),
+ shader_input_attachment_array_non_uniform_indexing: Bool32::default(),
+ shader_uniform_texel_buffer_array_non_uniform_indexing: Bool32::default(),
+ shader_storage_texel_buffer_array_non_uniform_indexing: Bool32::default(),
+ descriptor_binding_uniform_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_sampled_image_update_after_bind: Bool32::default(),
+ descriptor_binding_storage_image_update_after_bind: Bool32::default(),
+ descriptor_binding_storage_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_uniform_texel_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_storage_texel_buffer_update_after_bind: Bool32::default(),
+ descriptor_binding_update_unused_while_pending: Bool32::default(),
+ descriptor_binding_partially_bound: Bool32::default(),
+ descriptor_binding_variable_descriptor_count: Bool32::default(),
+ runtime_descriptor_array: Bool32::default(),
+ sampler_filter_minmax: Bool32::default(),
+ scalar_block_layout: Bool32::default(),
+ imageless_framebuffer: Bool32::default(),
+ uniform_buffer_standard_layout: Bool32::default(),
+ shader_subgroup_extended_types: Bool32::default(),
+ separate_depth_stencil_layouts: Bool32::default(),
+ host_query_reset: Bool32::default(),
+ timeline_semaphore: Bool32::default(),
+ buffer_device_address: Bool32::default(),
+ buffer_device_address_capture_replay: Bool32::default(),
+ buffer_device_address_multi_device: Bool32::default(),
+ vulkan_memory_model: Bool32::default(),
+ vulkan_memory_model_device_scope: Bool32::default(),
+ vulkan_memory_model_availability_visibility_chains: Bool32::default(),
+ shader_output_viewport_index: Bool32::default(),
+ shader_output_layer: Bool32::default(),
+ subgroup_broadcast_dynamic_id: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkan12Features {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
+}
+impl PhysicalDeviceVulkan12Features {
+ pub fn builder<'a>() -> PhysicalDeviceVulkan12FeaturesBuilder<'a> {
+ PhysicalDeviceVulkan12FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkan12FeaturesBuilder<'a> {
+ inner: PhysicalDeviceVulkan12Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan12FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan12Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan12FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan12Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkan12FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceVulkan12Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkan12FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkan12FeaturesBuilder<'a> {
+ #[inline]
+ pub fn sampler_mirror_clamp_to_edge(mut self, sampler_mirror_clamp_to_edge: bool) -> Self {
+ self.inner.sampler_mirror_clamp_to_edge = sampler_mirror_clamp_to_edge.into();
+ self
+ }
+ #[inline]
+ pub fn draw_indirect_count(mut self, draw_indirect_count: bool) -> Self {
+ self.inner.draw_indirect_count = draw_indirect_count.into();
+ self
+ }
+ #[inline]
+ pub fn storage_buffer8_bit_access(mut self, storage_buffer8_bit_access: bool) -> Self {
+ self.inner.storage_buffer8_bit_access = storage_buffer8_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_and_storage_buffer8_bit_access(
+ mut self,
+ uniform_and_storage_buffer8_bit_access: bool,
+ ) -> Self {
+ self.inner.uniform_and_storage_buffer8_bit_access =
+ uniform_and_storage_buffer8_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn storage_push_constant8(mut self, storage_push_constant8: bool) -> Self {
+ self.inner.storage_push_constant8 = storage_push_constant8.into();
+ self
+ }
+ #[inline]
+ pub fn shader_buffer_int64_atomics(mut self, shader_buffer_int64_atomics: bool) -> Self {
+ self.inner.shader_buffer_int64_atomics = shader_buffer_int64_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_shared_int64_atomics(mut self, shader_shared_int64_atomics: bool) -> Self {
+ self.inner.shader_shared_int64_atomics = shader_shared_int64_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn shader_float16(mut self, shader_float16: bool) -> Self {
+ self.inner.shader_float16 = shader_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_int8(mut self, shader_int8: bool) -> Self {
+ self.inner.shader_int8 = shader_int8.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_indexing(mut self, descriptor_indexing: bool) -> Self {
+ self.inner.descriptor_indexing = descriptor_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_input_attachment_array_dynamic_indexing(
+ mut self,
+ shader_input_attachment_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner.shader_input_attachment_array_dynamic_indexing =
+ shader_input_attachment_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_texel_buffer_array_dynamic_indexing(
+ mut self,
+ shader_uniform_texel_buffer_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_uniform_texel_buffer_array_dynamic_indexing =
+ shader_uniform_texel_buffer_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_texel_buffer_array_dynamic_indexing(
+ mut self,
+ shader_storage_texel_buffer_array_dynamic_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_texel_buffer_array_dynamic_indexing =
+ shader_storage_texel_buffer_array_dynamic_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_uniform_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_uniform_buffer_array_non_uniform_indexing =
+ shader_uniform_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_sampled_image_array_non_uniform_indexing(
+ mut self,
+ shader_sampled_image_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_sampled_image_array_non_uniform_indexing =
+ shader_sampled_image_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_storage_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_storage_buffer_array_non_uniform_indexing =
+ shader_storage_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_array_non_uniform_indexing(
+ mut self,
+ shader_storage_image_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner.shader_storage_image_array_non_uniform_indexing =
+ shader_storage_image_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_input_attachment_array_non_uniform_indexing(
+ mut self,
+ shader_input_attachment_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_input_attachment_array_non_uniform_indexing =
+ shader_input_attachment_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_texel_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_uniform_texel_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_uniform_texel_buffer_array_non_uniform_indexing =
+ shader_uniform_texel_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_texel_buffer_array_non_uniform_indexing(
+ mut self,
+ shader_storage_texel_buffer_array_non_uniform_indexing: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_texel_buffer_array_non_uniform_indexing =
+ shader_storage_texel_buffer_array_non_uniform_indexing.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_uniform_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_uniform_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_uniform_buffer_update_after_bind =
+ descriptor_binding_uniform_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_sampled_image_update_after_bind(
+ mut self,
+ descriptor_binding_sampled_image_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_sampled_image_update_after_bind =
+ descriptor_binding_sampled_image_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_storage_image_update_after_bind(
+ mut self,
+ descriptor_binding_storage_image_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_storage_image_update_after_bind =
+ descriptor_binding_storage_image_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_storage_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_storage_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_storage_buffer_update_after_bind =
+ descriptor_binding_storage_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_uniform_texel_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_uniform_texel_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_uniform_texel_buffer_update_after_bind =
+ descriptor_binding_uniform_texel_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_storage_texel_buffer_update_after_bind(
+ mut self,
+ descriptor_binding_storage_texel_buffer_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_storage_texel_buffer_update_after_bind =
+ descriptor_binding_storage_texel_buffer_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_update_unused_while_pending(
+ mut self,
+ descriptor_binding_update_unused_while_pending: bool,
+ ) -> Self {
+ self.inner.descriptor_binding_update_unused_while_pending =
+ descriptor_binding_update_unused_while_pending.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_partially_bound(
+ mut self,
+ descriptor_binding_partially_bound: bool,
+ ) -> Self {
+ self.inner.descriptor_binding_partially_bound = descriptor_binding_partially_bound.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_variable_descriptor_count(
+ mut self,
+ descriptor_binding_variable_descriptor_count: bool,
+ ) -> Self {
+ self.inner.descriptor_binding_variable_descriptor_count =
+ descriptor_binding_variable_descriptor_count.into();
+ self
+ }
+ #[inline]
+ pub fn runtime_descriptor_array(mut self, runtime_descriptor_array: bool) -> Self {
+ self.inner.runtime_descriptor_array = runtime_descriptor_array.into();
+ self
+ }
+ #[inline]
+ pub fn sampler_filter_minmax(mut self, sampler_filter_minmax: bool) -> Self {
+ self.inner.sampler_filter_minmax = sampler_filter_minmax.into();
+ self
+ }
+ #[inline]
+ pub fn scalar_block_layout(mut self, scalar_block_layout: bool) -> Self {
+ self.inner.scalar_block_layout = scalar_block_layout.into();
+ self
+ }
+ #[inline]
+ pub fn imageless_framebuffer(mut self, imageless_framebuffer: bool) -> Self {
+ self.inner.imageless_framebuffer = imageless_framebuffer.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_buffer_standard_layout(mut self, uniform_buffer_standard_layout: bool) -> Self {
+ self.inner.uniform_buffer_standard_layout = uniform_buffer_standard_layout.into();
+ self
+ }
+ #[inline]
+ pub fn shader_subgroup_extended_types(mut self, shader_subgroup_extended_types: bool) -> Self {
+ self.inner.shader_subgroup_extended_types = shader_subgroup_extended_types.into();
+ self
+ }
+ #[inline]
+ pub fn separate_depth_stencil_layouts(mut self, separate_depth_stencil_layouts: bool) -> Self {
+ self.inner.separate_depth_stencil_layouts = separate_depth_stencil_layouts.into();
+ self
+ }
+ #[inline]
+ pub fn host_query_reset(mut self, host_query_reset: bool) -> Self {
+ self.inner.host_query_reset = host_query_reset.into();
+ self
+ }
+ #[inline]
+ pub fn timeline_semaphore(mut self, timeline_semaphore: bool) -> Self {
+ self.inner.timeline_semaphore = timeline_semaphore.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address(mut self, buffer_device_address: bool) -> Self {
+ self.inner.buffer_device_address = buffer_device_address.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address_capture_replay(
+ mut self,
+ buffer_device_address_capture_replay: bool,
+ ) -> Self {
+ self.inner.buffer_device_address_capture_replay =
+ buffer_device_address_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn buffer_device_address_multi_device(
+ mut self,
+ buffer_device_address_multi_device: bool,
+ ) -> Self {
+ self.inner.buffer_device_address_multi_device = buffer_device_address_multi_device.into();
+ self
+ }
+ #[inline]
+ pub fn vulkan_memory_model(mut self, vulkan_memory_model: bool) -> Self {
+ self.inner.vulkan_memory_model = vulkan_memory_model.into();
+ self
+ }
+ #[inline]
+ pub fn vulkan_memory_model_device_scope(
+ mut self,
+ vulkan_memory_model_device_scope: bool,
+ ) -> Self {
+ self.inner.vulkan_memory_model_device_scope = vulkan_memory_model_device_scope.into();
+ self
+ }
+ #[inline]
+ pub fn vulkan_memory_model_availability_visibility_chains(
+ mut self,
+ vulkan_memory_model_availability_visibility_chains: bool,
+ ) -> Self {
+ self.inner
+ .vulkan_memory_model_availability_visibility_chains =
+ vulkan_memory_model_availability_visibility_chains.into();
+ self
+ }
+ #[inline]
+ pub fn shader_output_viewport_index(mut self, shader_output_viewport_index: bool) -> Self {
+ self.inner.shader_output_viewport_index = shader_output_viewport_index.into();
+ self
+ }
+ #[inline]
+ pub fn shader_output_layer(mut self, shader_output_layer: bool) -> Self {
+ self.inner.shader_output_layer = shader_output_layer.into();
+ self
+ }
+ #[inline]
+ pub fn subgroup_broadcast_dynamic_id(mut self, subgroup_broadcast_dynamic_id: bool) -> Self {
+ self.inner.subgroup_broadcast_dynamic_id = subgroup_broadcast_dynamic_id.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkan12Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkan12Properties.html>"]
+pub struct PhysicalDeviceVulkan12Properties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub driver_id: DriverId,
+ pub driver_name: [c_char; MAX_DRIVER_NAME_SIZE],
+ pub driver_info: [c_char; MAX_DRIVER_INFO_SIZE],
+ pub conformance_version: ConformanceVersion,
+ pub denorm_behavior_independence: ShaderFloatControlsIndependence,
+ pub rounding_mode_independence: ShaderFloatControlsIndependence,
+ pub shader_signed_zero_inf_nan_preserve_float16: Bool32,
+ pub shader_signed_zero_inf_nan_preserve_float32: Bool32,
+ pub shader_signed_zero_inf_nan_preserve_float64: Bool32,
+ pub shader_denorm_preserve_float16: Bool32,
+ pub shader_denorm_preserve_float32: Bool32,
+ pub shader_denorm_preserve_float64: Bool32,
+ pub shader_denorm_flush_to_zero_float16: Bool32,
+ pub shader_denorm_flush_to_zero_float32: Bool32,
+ pub shader_denorm_flush_to_zero_float64: Bool32,
+ pub shader_rounding_mode_rte_float16: Bool32,
+ pub shader_rounding_mode_rte_float32: Bool32,
+ pub shader_rounding_mode_rte_float64: Bool32,
+ pub shader_rounding_mode_rtz_float16: Bool32,
+ pub shader_rounding_mode_rtz_float32: Bool32,
+ pub shader_rounding_mode_rtz_float64: Bool32,
+ pub max_update_after_bind_descriptors_in_all_pools: u32,
+ pub shader_uniform_buffer_array_non_uniform_indexing_native: Bool32,
+ pub shader_sampled_image_array_non_uniform_indexing_native: Bool32,
+ pub shader_storage_buffer_array_non_uniform_indexing_native: Bool32,
+ pub shader_storage_image_array_non_uniform_indexing_native: Bool32,
+ pub shader_input_attachment_array_non_uniform_indexing_native: Bool32,
+ pub robust_buffer_access_update_after_bind: Bool32,
+ pub quad_divergent_implicit_lod: Bool32,
+ pub max_per_stage_descriptor_update_after_bind_samplers: u32,
+ pub max_per_stage_descriptor_update_after_bind_uniform_buffers: u32,
+ pub max_per_stage_descriptor_update_after_bind_storage_buffers: u32,
+ pub max_per_stage_descriptor_update_after_bind_sampled_images: u32,
+ pub max_per_stage_descriptor_update_after_bind_storage_images: u32,
+ pub max_per_stage_descriptor_update_after_bind_input_attachments: u32,
+ pub max_per_stage_update_after_bind_resources: u32,
+ pub max_descriptor_set_update_after_bind_samplers: u32,
+ pub max_descriptor_set_update_after_bind_uniform_buffers: u32,
+ pub max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32,
+ pub max_descriptor_set_update_after_bind_storage_buffers: u32,
+ pub max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32,
+ pub max_descriptor_set_update_after_bind_sampled_images: u32,
+ pub max_descriptor_set_update_after_bind_storage_images: u32,
+ pub max_descriptor_set_update_after_bind_input_attachments: u32,
+ pub supported_depth_resolve_modes: ResolveModeFlags,
+ pub supported_stencil_resolve_modes: ResolveModeFlags,
+ pub independent_resolve_none: Bool32,
+ pub independent_resolve: Bool32,
+ pub filter_minmax_single_component_formats: Bool32,
+ pub filter_minmax_image_component_mapping: Bool32,
+ pub max_timeline_semaphore_value_difference: u64,
+ pub framebuffer_integer_color_sample_counts: SampleCountFlags,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PhysicalDeviceVulkan12Properties {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PhysicalDeviceVulkan12Properties")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("driver_id", &self.driver_id)
+ .field("driver_name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.driver_name.as_ptr())
+ })
+ .field("driver_info", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.driver_info.as_ptr())
+ })
+ .field("conformance_version", &self.conformance_version)
+ .field(
+ "denorm_behavior_independence",
+ &self.denorm_behavior_independence,
+ )
+ .field(
+ "rounding_mode_independence",
+ &self.rounding_mode_independence,
+ )
+ .field(
+ "shader_signed_zero_inf_nan_preserve_float16",
+ &self.shader_signed_zero_inf_nan_preserve_float16,
+ )
+ .field(
+ "shader_signed_zero_inf_nan_preserve_float32",
+ &self.shader_signed_zero_inf_nan_preserve_float32,
+ )
+ .field(
+ "shader_signed_zero_inf_nan_preserve_float64",
+ &self.shader_signed_zero_inf_nan_preserve_float64,
+ )
+ .field(
+ "shader_denorm_preserve_float16",
+ &self.shader_denorm_preserve_float16,
+ )
+ .field(
+ "shader_denorm_preserve_float32",
+ &self.shader_denorm_preserve_float32,
+ )
+ .field(
+ "shader_denorm_preserve_float64",
+ &self.shader_denorm_preserve_float64,
+ )
+ .field(
+ "shader_denorm_flush_to_zero_float16",
+ &self.shader_denorm_flush_to_zero_float16,
+ )
+ .field(
+ "shader_denorm_flush_to_zero_float32",
+ &self.shader_denorm_flush_to_zero_float32,
+ )
+ .field(
+ "shader_denorm_flush_to_zero_float64",
+ &self.shader_denorm_flush_to_zero_float64,
+ )
+ .field(
+ "shader_rounding_mode_rte_float16",
+ &self.shader_rounding_mode_rte_float16,
+ )
+ .field(
+ "shader_rounding_mode_rte_float32",
+ &self.shader_rounding_mode_rte_float32,
+ )
+ .field(
+ "shader_rounding_mode_rte_float64",
+ &self.shader_rounding_mode_rte_float64,
+ )
+ .field(
+ "shader_rounding_mode_rtz_float16",
+ &self.shader_rounding_mode_rtz_float16,
+ )
+ .field(
+ "shader_rounding_mode_rtz_float32",
+ &self.shader_rounding_mode_rtz_float32,
+ )
+ .field(
+ "shader_rounding_mode_rtz_float64",
+ &self.shader_rounding_mode_rtz_float64,
+ )
+ .field(
+ "max_update_after_bind_descriptors_in_all_pools",
+ &self.max_update_after_bind_descriptors_in_all_pools,
+ )
+ .field(
+ "shader_uniform_buffer_array_non_uniform_indexing_native",
+ &self.shader_uniform_buffer_array_non_uniform_indexing_native,
+ )
+ .field(
+ "shader_sampled_image_array_non_uniform_indexing_native",
+ &self.shader_sampled_image_array_non_uniform_indexing_native,
+ )
+ .field(
+ "shader_storage_buffer_array_non_uniform_indexing_native",
+ &self.shader_storage_buffer_array_non_uniform_indexing_native,
+ )
+ .field(
+ "shader_storage_image_array_non_uniform_indexing_native",
+ &self.shader_storage_image_array_non_uniform_indexing_native,
+ )
+ .field(
+ "shader_input_attachment_array_non_uniform_indexing_native",
+ &self.shader_input_attachment_array_non_uniform_indexing_native,
+ )
+ .field(
+ "robust_buffer_access_update_after_bind",
+ &self.robust_buffer_access_update_after_bind,
+ )
+ .field(
+ "quad_divergent_implicit_lod",
+ &self.quad_divergent_implicit_lod,
+ )
+ .field(
+ "max_per_stage_descriptor_update_after_bind_samplers",
+ &self.max_per_stage_descriptor_update_after_bind_samplers,
+ )
+ .field(
+ "max_per_stage_descriptor_update_after_bind_uniform_buffers",
+ &self.max_per_stage_descriptor_update_after_bind_uniform_buffers,
+ )
+ .field(
+ "max_per_stage_descriptor_update_after_bind_storage_buffers",
+ &self.max_per_stage_descriptor_update_after_bind_storage_buffers,
+ )
+ .field(
+ "max_per_stage_descriptor_update_after_bind_sampled_images",
+ &self.max_per_stage_descriptor_update_after_bind_sampled_images,
+ )
+ .field(
+ "max_per_stage_descriptor_update_after_bind_storage_images",
+ &self.max_per_stage_descriptor_update_after_bind_storage_images,
+ )
+ .field(
+ "max_per_stage_descriptor_update_after_bind_input_attachments",
+ &self.max_per_stage_descriptor_update_after_bind_input_attachments,
+ )
+ .field(
+ "max_per_stage_update_after_bind_resources",
+ &self.max_per_stage_update_after_bind_resources,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_samplers",
+ &self.max_descriptor_set_update_after_bind_samplers,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_uniform_buffers",
+ &self.max_descriptor_set_update_after_bind_uniform_buffers,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_uniform_buffers_dynamic",
+ &self.max_descriptor_set_update_after_bind_uniform_buffers_dynamic,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_storage_buffers",
+ &self.max_descriptor_set_update_after_bind_storage_buffers,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_storage_buffers_dynamic",
+ &self.max_descriptor_set_update_after_bind_storage_buffers_dynamic,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_sampled_images",
+ &self.max_descriptor_set_update_after_bind_sampled_images,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_storage_images",
+ &self.max_descriptor_set_update_after_bind_storage_images,
+ )
+ .field(
+ "max_descriptor_set_update_after_bind_input_attachments",
+ &self.max_descriptor_set_update_after_bind_input_attachments,
+ )
+ .field(
+ "supported_depth_resolve_modes",
+ &self.supported_depth_resolve_modes,
+ )
+ .field(
+ "supported_stencil_resolve_modes",
+ &self.supported_stencil_resolve_modes,
+ )
+ .field("independent_resolve_none", &self.independent_resolve_none)
+ .field("independent_resolve", &self.independent_resolve)
+ .field(
+ "filter_minmax_single_component_formats",
+ &self.filter_minmax_single_component_formats,
+ )
+ .field(
+ "filter_minmax_image_component_mapping",
+ &self.filter_minmax_image_component_mapping,
+ )
+ .field(
+ "max_timeline_semaphore_value_difference",
+ &self.max_timeline_semaphore_value_difference,
+ )
+ .field(
+ "framebuffer_integer_color_sample_counts",
+ &self.framebuffer_integer_color_sample_counts,
+ )
+ .finish()
+ }
+}
+impl ::std::default::Default for PhysicalDeviceVulkan12Properties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ driver_id: DriverId::default(),
+ driver_name: unsafe { ::std::mem::zeroed() },
+ driver_info: unsafe { ::std::mem::zeroed() },
+ conformance_version: ConformanceVersion::default(),
+ denorm_behavior_independence: ShaderFloatControlsIndependence::default(),
+ rounding_mode_independence: ShaderFloatControlsIndependence::default(),
+ shader_signed_zero_inf_nan_preserve_float16: Bool32::default(),
+ shader_signed_zero_inf_nan_preserve_float32: Bool32::default(),
+ shader_signed_zero_inf_nan_preserve_float64: Bool32::default(),
+ shader_denorm_preserve_float16: Bool32::default(),
+ shader_denorm_preserve_float32: Bool32::default(),
+ shader_denorm_preserve_float64: Bool32::default(),
+ shader_denorm_flush_to_zero_float16: Bool32::default(),
+ shader_denorm_flush_to_zero_float32: Bool32::default(),
+ shader_denorm_flush_to_zero_float64: Bool32::default(),
+ shader_rounding_mode_rte_float16: Bool32::default(),
+ shader_rounding_mode_rte_float32: Bool32::default(),
+ shader_rounding_mode_rte_float64: Bool32::default(),
+ shader_rounding_mode_rtz_float16: Bool32::default(),
+ shader_rounding_mode_rtz_float32: Bool32::default(),
+ shader_rounding_mode_rtz_float64: Bool32::default(),
+ max_update_after_bind_descriptors_in_all_pools: u32::default(),
+ shader_uniform_buffer_array_non_uniform_indexing_native: Bool32::default(),
+ shader_sampled_image_array_non_uniform_indexing_native: Bool32::default(),
+ shader_storage_buffer_array_non_uniform_indexing_native: Bool32::default(),
+ shader_storage_image_array_non_uniform_indexing_native: Bool32::default(),
+ shader_input_attachment_array_non_uniform_indexing_native: Bool32::default(),
+ robust_buffer_access_update_after_bind: Bool32::default(),
+ quad_divergent_implicit_lod: Bool32::default(),
+ max_per_stage_descriptor_update_after_bind_samplers: u32::default(),
+ max_per_stage_descriptor_update_after_bind_uniform_buffers: u32::default(),
+ max_per_stage_descriptor_update_after_bind_storage_buffers: u32::default(),
+ max_per_stage_descriptor_update_after_bind_sampled_images: u32::default(),
+ max_per_stage_descriptor_update_after_bind_storage_images: u32::default(),
+ max_per_stage_descriptor_update_after_bind_input_attachments: u32::default(),
+ max_per_stage_update_after_bind_resources: u32::default(),
+ max_descriptor_set_update_after_bind_samplers: u32::default(),
+ max_descriptor_set_update_after_bind_uniform_buffers: u32::default(),
+ max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32::default(),
+ max_descriptor_set_update_after_bind_storage_buffers: u32::default(),
+ max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32::default(),
+ max_descriptor_set_update_after_bind_sampled_images: u32::default(),
+ max_descriptor_set_update_after_bind_storage_images: u32::default(),
+ max_descriptor_set_update_after_bind_input_attachments: u32::default(),
+ supported_depth_resolve_modes: ResolveModeFlags::default(),
+ supported_stencil_resolve_modes: ResolveModeFlags::default(),
+ independent_resolve_none: Bool32::default(),
+ independent_resolve: Bool32::default(),
+ filter_minmax_single_component_formats: Bool32::default(),
+ filter_minmax_image_component_mapping: Bool32::default(),
+ max_timeline_semaphore_value_difference: u64::default(),
+ framebuffer_integer_color_sample_counts: SampleCountFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkan12Properties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES;
+}
+impl PhysicalDeviceVulkan12Properties {
+ pub fn builder<'a>() -> PhysicalDeviceVulkan12PropertiesBuilder<'a> {
+ PhysicalDeviceVulkan12PropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkan12PropertiesBuilder<'a> {
+ inner: PhysicalDeviceVulkan12Properties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan12PropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan12Properties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkan12PropertiesBuilder<'a> {
+ type Target = PhysicalDeviceVulkan12Properties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkan12PropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkan12PropertiesBuilder<'a> {
+ #[inline]
+ pub fn driver_id(mut self, driver_id: DriverId) -> Self {
+ self.inner.driver_id = driver_id;
+ self
+ }
+ #[inline]
+ pub fn driver_name(mut self, driver_name: [c_char; MAX_DRIVER_NAME_SIZE]) -> Self {
+ self.inner.driver_name = driver_name;
+ self
+ }
+ #[inline]
+ pub fn driver_info(mut self, driver_info: [c_char; MAX_DRIVER_INFO_SIZE]) -> Self {
+ self.inner.driver_info = driver_info;
+ self
+ }
+ #[inline]
+ pub fn conformance_version(mut self, conformance_version: ConformanceVersion) -> Self {
+ self.inner.conformance_version = conformance_version;
+ self
+ }
+ #[inline]
+ pub fn denorm_behavior_independence(
+ mut self,
+ denorm_behavior_independence: ShaderFloatControlsIndependence,
+ ) -> Self {
+ self.inner.denorm_behavior_independence = denorm_behavior_independence;
+ self
+ }
+ #[inline]
+ pub fn rounding_mode_independence(
+ mut self,
+ rounding_mode_independence: ShaderFloatControlsIndependence,
+ ) -> Self {
+ self.inner.rounding_mode_independence = rounding_mode_independence;
+ self
+ }
+ #[inline]
+ pub fn shader_signed_zero_inf_nan_preserve_float16(
+ mut self,
+ shader_signed_zero_inf_nan_preserve_float16: bool,
+ ) -> Self {
+ self.inner.shader_signed_zero_inf_nan_preserve_float16 =
+ shader_signed_zero_inf_nan_preserve_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_signed_zero_inf_nan_preserve_float32(
+ mut self,
+ shader_signed_zero_inf_nan_preserve_float32: bool,
+ ) -> Self {
+ self.inner.shader_signed_zero_inf_nan_preserve_float32 =
+ shader_signed_zero_inf_nan_preserve_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_signed_zero_inf_nan_preserve_float64(
+ mut self,
+ shader_signed_zero_inf_nan_preserve_float64: bool,
+ ) -> Self {
+ self.inner.shader_signed_zero_inf_nan_preserve_float64 =
+ shader_signed_zero_inf_nan_preserve_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_preserve_float16(mut self, shader_denorm_preserve_float16: bool) -> Self {
+ self.inner.shader_denorm_preserve_float16 = shader_denorm_preserve_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_preserve_float32(mut self, shader_denorm_preserve_float32: bool) -> Self {
+ self.inner.shader_denorm_preserve_float32 = shader_denorm_preserve_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_preserve_float64(mut self, shader_denorm_preserve_float64: bool) -> Self {
+ self.inner.shader_denorm_preserve_float64 = shader_denorm_preserve_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_flush_to_zero_float16(
+ mut self,
+ shader_denorm_flush_to_zero_float16: bool,
+ ) -> Self {
+ self.inner.shader_denorm_flush_to_zero_float16 = shader_denorm_flush_to_zero_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_flush_to_zero_float32(
+ mut self,
+ shader_denorm_flush_to_zero_float32: bool,
+ ) -> Self {
+ self.inner.shader_denorm_flush_to_zero_float32 = shader_denorm_flush_to_zero_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_denorm_flush_to_zero_float64(
+ mut self,
+ shader_denorm_flush_to_zero_float64: bool,
+ ) -> Self {
+ self.inner.shader_denorm_flush_to_zero_float64 = shader_denorm_flush_to_zero_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rte_float16(
+ mut self,
+ shader_rounding_mode_rte_float16: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rte_float16 = shader_rounding_mode_rte_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rte_float32(
+ mut self,
+ shader_rounding_mode_rte_float32: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rte_float32 = shader_rounding_mode_rte_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rte_float64(
+ mut self,
+ shader_rounding_mode_rte_float64: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rte_float64 = shader_rounding_mode_rte_float64.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rtz_float16(
+ mut self,
+ shader_rounding_mode_rtz_float16: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rtz_float16 = shader_rounding_mode_rtz_float16.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rtz_float32(
+ mut self,
+ shader_rounding_mode_rtz_float32: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rtz_float32 = shader_rounding_mode_rtz_float32.into();
+ self
+ }
+ #[inline]
+ pub fn shader_rounding_mode_rtz_float64(
+ mut self,
+ shader_rounding_mode_rtz_float64: bool,
+ ) -> Self {
+ self.inner.shader_rounding_mode_rtz_float64 = shader_rounding_mode_rtz_float64.into();
+ self
+ }
+ #[inline]
+ pub fn max_update_after_bind_descriptors_in_all_pools(
+ mut self,
+ max_update_after_bind_descriptors_in_all_pools: u32,
+ ) -> Self {
+ self.inner.max_update_after_bind_descriptors_in_all_pools =
+ max_update_after_bind_descriptors_in_all_pools;
+ self
+ }
+ #[inline]
+ pub fn shader_uniform_buffer_array_non_uniform_indexing_native(
+ mut self,
+ shader_uniform_buffer_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_uniform_buffer_array_non_uniform_indexing_native =
+ shader_uniform_buffer_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_sampled_image_array_non_uniform_indexing_native(
+ mut self,
+ shader_sampled_image_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_sampled_image_array_non_uniform_indexing_native =
+ shader_sampled_image_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_buffer_array_non_uniform_indexing_native(
+ mut self,
+ shader_storage_buffer_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_buffer_array_non_uniform_indexing_native =
+ shader_storage_buffer_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_storage_image_array_non_uniform_indexing_native(
+ mut self,
+ shader_storage_image_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_storage_image_array_non_uniform_indexing_native =
+ shader_storage_image_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn shader_input_attachment_array_non_uniform_indexing_native(
+ mut self,
+ shader_input_attachment_array_non_uniform_indexing_native: bool,
+ ) -> Self {
+ self.inner
+ .shader_input_attachment_array_non_uniform_indexing_native =
+ shader_input_attachment_array_non_uniform_indexing_native.into();
+ self
+ }
+ #[inline]
+ pub fn robust_buffer_access_update_after_bind(
+ mut self,
+ robust_buffer_access_update_after_bind: bool,
+ ) -> Self {
+ self.inner.robust_buffer_access_update_after_bind =
+ robust_buffer_access_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn quad_divergent_implicit_lod(mut self, quad_divergent_implicit_lod: bool) -> Self {
+ self.inner.quad_divergent_implicit_lod = quad_divergent_implicit_lod.into();
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_samplers(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_samplers: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_samplers =
+ max_per_stage_descriptor_update_after_bind_samplers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_uniform_buffers(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_uniform_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_uniform_buffers =
+ max_per_stage_descriptor_update_after_bind_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_storage_buffers(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_storage_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_storage_buffers =
+ max_per_stage_descriptor_update_after_bind_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_sampled_images(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_sampled_images: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_sampled_images =
+ max_per_stage_descriptor_update_after_bind_sampled_images;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_storage_images(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_storage_images: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_storage_images =
+ max_per_stage_descriptor_update_after_bind_storage_images;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_input_attachments(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_input_attachments: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_input_attachments =
+ max_per_stage_descriptor_update_after_bind_input_attachments;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_update_after_bind_resources(
+ mut self,
+ max_per_stage_update_after_bind_resources: u32,
+ ) -> Self {
+ self.inner.max_per_stage_update_after_bind_resources =
+ max_per_stage_update_after_bind_resources;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_samplers(
+ mut self,
+ max_descriptor_set_update_after_bind_samplers: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_update_after_bind_samplers =
+ max_descriptor_set_update_after_bind_samplers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_uniform_buffers(
+ mut self,
+ max_descriptor_set_update_after_bind_uniform_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_uniform_buffers =
+ max_descriptor_set_update_after_bind_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_uniform_buffers_dynamic(
+ mut self,
+ max_descriptor_set_update_after_bind_uniform_buffers_dynamic: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_uniform_buffers_dynamic =
+ max_descriptor_set_update_after_bind_uniform_buffers_dynamic;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_storage_buffers(
+ mut self,
+ max_descriptor_set_update_after_bind_storage_buffers: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_storage_buffers =
+ max_descriptor_set_update_after_bind_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_storage_buffers_dynamic(
+ mut self,
+ max_descriptor_set_update_after_bind_storage_buffers_dynamic: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_storage_buffers_dynamic =
+ max_descriptor_set_update_after_bind_storage_buffers_dynamic;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_sampled_images(
+ mut self,
+ max_descriptor_set_update_after_bind_sampled_images: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_sampled_images =
+ max_descriptor_set_update_after_bind_sampled_images;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_storage_images(
+ mut self,
+ max_descriptor_set_update_after_bind_storage_images: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_storage_images =
+ max_descriptor_set_update_after_bind_storage_images;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_input_attachments(
+ mut self,
+ max_descriptor_set_update_after_bind_input_attachments: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_input_attachments =
+ max_descriptor_set_update_after_bind_input_attachments;
+ self
+ }
+ #[inline]
+ pub fn supported_depth_resolve_modes(
+ mut self,
+ supported_depth_resolve_modes: ResolveModeFlags,
+ ) -> Self {
+ self.inner.supported_depth_resolve_modes = supported_depth_resolve_modes;
+ self
+ }
+ #[inline]
+ pub fn supported_stencil_resolve_modes(
+ mut self,
+ supported_stencil_resolve_modes: ResolveModeFlags,
+ ) -> Self {
+ self.inner.supported_stencil_resolve_modes = supported_stencil_resolve_modes;
+ self
+ }
+ #[inline]
+ pub fn independent_resolve_none(mut self, independent_resolve_none: bool) -> Self {
+ self.inner.independent_resolve_none = independent_resolve_none.into();
+ self
+ }
+ #[inline]
+ pub fn independent_resolve(mut self, independent_resolve: bool) -> Self {
+ self.inner.independent_resolve = independent_resolve.into();
+ self
+ }
+ #[inline]
+ pub fn filter_minmax_single_component_formats(
+ mut self,
+ filter_minmax_single_component_formats: bool,
+ ) -> Self {
+ self.inner.filter_minmax_single_component_formats =
+ filter_minmax_single_component_formats.into();
+ self
+ }
+ #[inline]
+ pub fn filter_minmax_image_component_mapping(
+ mut self,
+ filter_minmax_image_component_mapping: bool,
+ ) -> Self {
+ self.inner.filter_minmax_image_component_mapping =
+ filter_minmax_image_component_mapping.into();
+ self
+ }
+ #[inline]
+ pub fn max_timeline_semaphore_value_difference(
+ mut self,
+ max_timeline_semaphore_value_difference: u64,
+ ) -> Self {
+ self.inner.max_timeline_semaphore_value_difference =
+ max_timeline_semaphore_value_difference;
+ self
+ }
+ #[inline]
+ pub fn framebuffer_integer_color_sample_counts(
+ mut self,
+ framebuffer_integer_color_sample_counts: SampleCountFlags,
+ ) -> Self {
+ self.inner.framebuffer_integer_color_sample_counts =
+ framebuffer_integer_color_sample_counts;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkan12Properties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkan13Features.html>"]
+pub struct PhysicalDeviceVulkan13Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub robust_image_access: Bool32,
+ pub inline_uniform_block: Bool32,
+ pub descriptor_binding_inline_uniform_block_update_after_bind: Bool32,
+ pub pipeline_creation_cache_control: Bool32,
+ pub private_data: Bool32,
+ pub shader_demote_to_helper_invocation: Bool32,
+ pub shader_terminate_invocation: Bool32,
+ pub subgroup_size_control: Bool32,
+ pub compute_full_subgroups: Bool32,
+ pub synchronization2: Bool32,
+ pub texture_compression_astc_hdr: Bool32,
+ pub shader_zero_initialize_workgroup_memory: Bool32,
+ pub dynamic_rendering: Bool32,
+ pub shader_integer_dot_product: Bool32,
+ pub maintenance4: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVulkan13Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ robust_image_access: Bool32::default(),
+ inline_uniform_block: Bool32::default(),
+ descriptor_binding_inline_uniform_block_update_after_bind: Bool32::default(),
+ pipeline_creation_cache_control: Bool32::default(),
+ private_data: Bool32::default(),
+ shader_demote_to_helper_invocation: Bool32::default(),
+ shader_terminate_invocation: Bool32::default(),
+ subgroup_size_control: Bool32::default(),
+ compute_full_subgroups: Bool32::default(),
+ synchronization2: Bool32::default(),
+ texture_compression_astc_hdr: Bool32::default(),
+ shader_zero_initialize_workgroup_memory: Bool32::default(),
+ dynamic_rendering: Bool32::default(),
+ shader_integer_dot_product: Bool32::default(),
+ maintenance4: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkan13Features {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_3_FEATURES;
+}
+impl PhysicalDeviceVulkan13Features {
+ pub fn builder<'a>() -> PhysicalDeviceVulkan13FeaturesBuilder<'a> {
+ PhysicalDeviceVulkan13FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkan13FeaturesBuilder<'a> {
+ inner: PhysicalDeviceVulkan13Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan13FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVulkan13Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan13FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVulkan13Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkan13FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceVulkan13Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkan13FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkan13FeaturesBuilder<'a> {
+ #[inline]
+ pub fn robust_image_access(mut self, robust_image_access: bool) -> Self {
+ self.inner.robust_image_access = robust_image_access.into();
+ self
+ }
+ #[inline]
+ pub fn inline_uniform_block(mut self, inline_uniform_block: bool) -> Self {
+ self.inner.inline_uniform_block = inline_uniform_block.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_binding_inline_uniform_block_update_after_bind(
+ mut self,
+ descriptor_binding_inline_uniform_block_update_after_bind: bool,
+ ) -> Self {
+ self.inner
+ .descriptor_binding_inline_uniform_block_update_after_bind =
+ descriptor_binding_inline_uniform_block_update_after_bind.into();
+ self
+ }
+ #[inline]
+ pub fn pipeline_creation_cache_control(
+ mut self,
+ pipeline_creation_cache_control: bool,
+ ) -> Self {
+ self.inner.pipeline_creation_cache_control = pipeline_creation_cache_control.into();
+ self
+ }
+ #[inline]
+ pub fn private_data(mut self, private_data: bool) -> Self {
+ self.inner.private_data = private_data.into();
+ self
+ }
+ #[inline]
+ pub fn shader_demote_to_helper_invocation(
+ mut self,
+ shader_demote_to_helper_invocation: bool,
+ ) -> Self {
+ self.inner.shader_demote_to_helper_invocation = shader_demote_to_helper_invocation.into();
+ self
+ }
+ #[inline]
+ pub fn shader_terminate_invocation(mut self, shader_terminate_invocation: bool) -> Self {
+ self.inner.shader_terminate_invocation = shader_terminate_invocation.into();
+ self
+ }
+ #[inline]
+ pub fn subgroup_size_control(mut self, subgroup_size_control: bool) -> Self {
+ self.inner.subgroup_size_control = subgroup_size_control.into();
+ self
+ }
+ #[inline]
+ pub fn compute_full_subgroups(mut self, compute_full_subgroups: bool) -> Self {
+ self.inner.compute_full_subgroups = compute_full_subgroups.into();
+ self
+ }
+ #[inline]
+ pub fn synchronization2(mut self, synchronization2: bool) -> Self {
+ self.inner.synchronization2 = synchronization2.into();
+ self
+ }
+ #[inline]
+ pub fn texture_compression_astc_hdr(mut self, texture_compression_astc_hdr: bool) -> Self {
+ self.inner.texture_compression_astc_hdr = texture_compression_astc_hdr.into();
+ self
+ }
+ #[inline]
+ pub fn shader_zero_initialize_workgroup_memory(
+ mut self,
+ shader_zero_initialize_workgroup_memory: bool,
+ ) -> Self {
+ self.inner.shader_zero_initialize_workgroup_memory =
+ shader_zero_initialize_workgroup_memory.into();
+ self
+ }
+ #[inline]
+ pub fn dynamic_rendering(mut self, dynamic_rendering: bool) -> Self {
+ self.inner.dynamic_rendering = dynamic_rendering.into();
+ self
+ }
+ #[inline]
+ pub fn shader_integer_dot_product(mut self, shader_integer_dot_product: bool) -> Self {
+ self.inner.shader_integer_dot_product = shader_integer_dot_product.into();
+ self
+ }
+ #[inline]
+ pub fn maintenance4(mut self, maintenance4: bool) -> Self {
+ self.inner.maintenance4 = maintenance4.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkan13Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVulkan13Properties.html>"]
+pub struct PhysicalDeviceVulkan13Properties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_subgroup_size: u32,
+ pub max_subgroup_size: u32,
+ pub max_compute_workgroup_subgroups: u32,
+ pub required_subgroup_size_stages: ShaderStageFlags,
+ pub max_inline_uniform_block_size: u32,
+ pub max_per_stage_descriptor_inline_uniform_blocks: u32,
+ pub max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32,
+ pub max_descriptor_set_inline_uniform_blocks: u32,
+ pub max_descriptor_set_update_after_bind_inline_uniform_blocks: u32,
+ pub max_inline_uniform_total_size: u32,
+ pub integer_dot_product8_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product8_bit_signed_accelerated: Bool32,
+ pub integer_dot_product8_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product4x8_bit_packed_unsigned_accelerated: Bool32,
+ pub integer_dot_product4x8_bit_packed_signed_accelerated: Bool32,
+ pub integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product16_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product16_bit_signed_accelerated: Bool32,
+ pub integer_dot_product16_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product32_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product32_bit_signed_accelerated: Bool32,
+ pub integer_dot_product32_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product64_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product64_bit_signed_accelerated: Bool32,
+ pub integer_dot_product64_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating8_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated:
+ Bool32,
+ pub integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating16_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating32_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating64_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: Bool32,
+ pub storage_texel_buffer_offset_alignment_bytes: DeviceSize,
+ pub storage_texel_buffer_offset_single_texel_alignment: Bool32,
+ pub uniform_texel_buffer_offset_alignment_bytes: DeviceSize,
+ pub uniform_texel_buffer_offset_single_texel_alignment: Bool32,
+ pub max_buffer_size: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceVulkan13Properties {
+ #[inline]
+ fn default() -> Self {
+ Self { s_type : Self :: STRUCTURE_TYPE , p_next : :: std :: ptr :: null_mut () , min_subgroup_size : u32 :: default () , max_subgroup_size : u32 :: default () , max_compute_workgroup_subgroups : u32 :: default () , required_subgroup_size_stages : ShaderStageFlags :: default () , max_inline_uniform_block_size : u32 :: default () , max_per_stage_descriptor_inline_uniform_blocks : u32 :: default () , max_per_stage_descriptor_update_after_bind_inline_uniform_blocks : u32 :: default () , max_descriptor_set_inline_uniform_blocks : u32 :: default () , max_descriptor_set_update_after_bind_inline_uniform_blocks : u32 :: default () , max_inline_uniform_total_size : u32 :: default () , integer_dot_product8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product64_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated : Bool32 :: default () , storage_texel_buffer_offset_alignment_bytes : DeviceSize :: default () , storage_texel_buffer_offset_single_texel_alignment : Bool32 :: default () , uniform_texel_buffer_offset_alignment_bytes : DeviceSize :: default () , uniform_texel_buffer_offset_single_texel_alignment : Bool32 :: default () , max_buffer_size : DeviceSize :: default () }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVulkan13Properties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES;
+}
+impl PhysicalDeviceVulkan13Properties {
+ pub fn builder<'a>() -> PhysicalDeviceVulkan13PropertiesBuilder<'a> {
+ PhysicalDeviceVulkan13PropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVulkan13PropertiesBuilder<'a> {
+ inner: PhysicalDeviceVulkan13Properties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan13PropertiesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceVulkan13Properties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVulkan13PropertiesBuilder<'a> {
+ type Target = PhysicalDeviceVulkan13Properties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVulkan13PropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVulkan13PropertiesBuilder<'a> {
+ #[inline]
+ pub fn min_subgroup_size(mut self, min_subgroup_size: u32) -> Self {
+ self.inner.min_subgroup_size = min_subgroup_size;
+ self
+ }
+ #[inline]
+ pub fn max_subgroup_size(mut self, max_subgroup_size: u32) -> Self {
+ self.inner.max_subgroup_size = max_subgroup_size;
+ self
+ }
+ #[inline]
+ pub fn max_compute_workgroup_subgroups(mut self, max_compute_workgroup_subgroups: u32) -> Self {
+ self.inner.max_compute_workgroup_subgroups = max_compute_workgroup_subgroups;
+ self
+ }
+ #[inline]
+ pub fn required_subgroup_size_stages(
+ mut self,
+ required_subgroup_size_stages: ShaderStageFlags,
+ ) -> Self {
+ self.inner.required_subgroup_size_stages = required_subgroup_size_stages;
+ self
+ }
+ #[inline]
+ pub fn max_inline_uniform_block_size(mut self, max_inline_uniform_block_size: u32) -> Self {
+ self.inner.max_inline_uniform_block_size = max_inline_uniform_block_size;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_inline_uniform_blocks(
+ mut self,
+ max_per_stage_descriptor_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner.max_per_stage_descriptor_inline_uniform_blocks =
+ max_per_stage_descriptor_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_per_stage_descriptor_update_after_bind_inline_uniform_blocks(
+ mut self,
+ max_per_stage_descriptor_update_after_bind_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner
+ .max_per_stage_descriptor_update_after_bind_inline_uniform_blocks =
+ max_per_stage_descriptor_update_after_bind_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_inline_uniform_blocks(
+ mut self,
+ max_descriptor_set_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner.max_descriptor_set_inline_uniform_blocks =
+ max_descriptor_set_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_set_update_after_bind_inline_uniform_blocks(
+ mut self,
+ max_descriptor_set_update_after_bind_inline_uniform_blocks: u32,
+ ) -> Self {
+ self.inner
+ .max_descriptor_set_update_after_bind_inline_uniform_blocks =
+ max_descriptor_set_update_after_bind_inline_uniform_blocks;
+ self
+ }
+ #[inline]
+ pub fn max_inline_uniform_total_size(mut self, max_inline_uniform_total_size: u32) -> Self {
+ self.inner.max_inline_uniform_total_size = max_inline_uniform_total_size;
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product8_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product8_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product8_bit_unsigned_accelerated =
+ integer_dot_product8_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product8_bit_signed_accelerated(
+ mut self,
+ integer_dot_product8_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product8_bit_signed_accelerated =
+ integer_dot_product8_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product8_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product8_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product8_bit_mixed_signedness_accelerated =
+ integer_dot_product8_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product4x8_bit_packed_unsigned_accelerated(
+ mut self,
+ integer_dot_product4x8_bit_packed_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product4x8_bit_packed_unsigned_accelerated =
+ integer_dot_product4x8_bit_packed_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product4x8_bit_packed_signed_accelerated(
+ mut self,
+ integer_dot_product4x8_bit_packed_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product4x8_bit_packed_signed_accelerated =
+ integer_dot_product4x8_bit_packed_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product4x8_bit_packed_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product4x8_bit_packed_mixed_signedness_accelerated =
+ integer_dot_product4x8_bit_packed_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product16_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product16_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product16_bit_unsigned_accelerated =
+ integer_dot_product16_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product16_bit_signed_accelerated(
+ mut self,
+ integer_dot_product16_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product16_bit_signed_accelerated =
+ integer_dot_product16_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product16_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product16_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product16_bit_mixed_signedness_accelerated =
+ integer_dot_product16_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product32_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product32_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product32_bit_unsigned_accelerated =
+ integer_dot_product32_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product32_bit_signed_accelerated(
+ mut self,
+ integer_dot_product32_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product32_bit_signed_accelerated =
+ integer_dot_product32_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product32_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product32_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product32_bit_mixed_signedness_accelerated =
+ integer_dot_product32_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product64_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product64_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product64_bit_unsigned_accelerated =
+ integer_dot_product64_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product64_bit_signed_accelerated(
+ mut self,
+ integer_dot_product64_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product64_bit_signed_accelerated =
+ integer_dot_product64_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product64_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product64_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product64_bit_mixed_signedness_accelerated =
+ integer_dot_product64_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating8_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating8_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating8_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating8_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated =
+ integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : bool,
+ ) -> Self {
+ self . inner . integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated . into () ;
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating16_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating16_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating16_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating16_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating32_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating32_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating32_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating32_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating64_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating64_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating64_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating64_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn storage_texel_buffer_offset_alignment_bytes(
+ mut self,
+ storage_texel_buffer_offset_alignment_bytes: DeviceSize,
+ ) -> Self {
+ self.inner.storage_texel_buffer_offset_alignment_bytes =
+ storage_texel_buffer_offset_alignment_bytes;
+ self
+ }
+ #[inline]
+ pub fn storage_texel_buffer_offset_single_texel_alignment(
+ mut self,
+ storage_texel_buffer_offset_single_texel_alignment: bool,
+ ) -> Self {
+ self.inner
+ .storage_texel_buffer_offset_single_texel_alignment =
+ storage_texel_buffer_offset_single_texel_alignment.into();
+ self
+ }
+ #[inline]
+ pub fn uniform_texel_buffer_offset_alignment_bytes(
+ mut self,
+ uniform_texel_buffer_offset_alignment_bytes: DeviceSize,
+ ) -> Self {
+ self.inner.uniform_texel_buffer_offset_alignment_bytes =
+ uniform_texel_buffer_offset_alignment_bytes;
+ self
+ }
+ #[inline]
+ pub fn uniform_texel_buffer_offset_single_texel_alignment(
+ mut self,
+ uniform_texel_buffer_offset_single_texel_alignment: bool,
+ ) -> Self {
+ self.inner
+ .uniform_texel_buffer_offset_single_texel_alignment =
+ uniform_texel_buffer_offset_single_texel_alignment.into();
+ self
+ }
+ #[inline]
+ pub fn max_buffer_size(mut self, max_buffer_size: DeviceSize) -> Self {
+ self.inner.max_buffer_size = max_buffer_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVulkan13Properties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCompilerControlCreateInfoAMD.html>"]
+pub struct PipelineCompilerControlCreateInfoAMD {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub compiler_control_flags: PipelineCompilerControlFlagsAMD,
+}
+impl ::std::default::Default for PipelineCompilerControlCreateInfoAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ compiler_control_flags: PipelineCompilerControlFlagsAMD::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineCompilerControlCreateInfoAMD {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD;
+}
+impl PipelineCompilerControlCreateInfoAMD {
+ pub fn builder<'a>() -> PipelineCompilerControlCreateInfoAMDBuilder<'a> {
+ PipelineCompilerControlCreateInfoAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineCompilerControlCreateInfoAMDBuilder<'a> {
+ inner: PipelineCompilerControlCreateInfoAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCompilerControlCreateInfoAMDBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineCompilerControlCreateInfoAMD {}
+unsafe impl ExtendsComputePipelineCreateInfo for PipelineCompilerControlCreateInfoAMDBuilder<'_> {}
+unsafe impl ExtendsComputePipelineCreateInfo for PipelineCompilerControlCreateInfoAMD {}
+impl<'a> ::std::ops::Deref for PipelineCompilerControlCreateInfoAMDBuilder<'a> {
+ type Target = PipelineCompilerControlCreateInfoAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineCompilerControlCreateInfoAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineCompilerControlCreateInfoAMDBuilder<'a> {
+ #[inline]
+ pub fn compiler_control_flags(
+ mut self,
+ compiler_control_flags: PipelineCompilerControlFlagsAMD,
+ ) -> Self {
+ self.inner.compiler_control_flags = compiler_control_flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineCompilerControlCreateInfoAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCoherentMemoryFeaturesAMD.html>"]
+pub struct PhysicalDeviceCoherentMemoryFeaturesAMD {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_coherent_memory: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceCoherentMemoryFeaturesAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_coherent_memory: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCoherentMemoryFeaturesAMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD;
+}
+impl PhysicalDeviceCoherentMemoryFeaturesAMD {
+ pub fn builder<'a>() -> PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'a> {
+ PhysicalDeviceCoherentMemoryFeaturesAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'a> {
+ inner: PhysicalDeviceCoherentMemoryFeaturesAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCoherentMemoryFeaturesAMD {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCoherentMemoryFeaturesAMD {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'a> {
+ type Target = PhysicalDeviceCoherentMemoryFeaturesAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCoherentMemoryFeaturesAMDBuilder<'a> {
+ #[inline]
+ pub fn device_coherent_memory(mut self, device_coherent_memory: bool) -> Self {
+ self.inner.device_coherent_memory = device_coherent_memory.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCoherentMemoryFeaturesAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceToolProperties.html>"]
+pub struct PhysicalDeviceToolProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub name: [c_char; MAX_EXTENSION_NAME_SIZE],
+ pub version: [c_char; MAX_EXTENSION_NAME_SIZE],
+ pub purposes: ToolPurposeFlags,
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub layer: [c_char; MAX_EXTENSION_NAME_SIZE],
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for PhysicalDeviceToolProperties {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("PhysicalDeviceToolProperties")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("name", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.name.as_ptr())
+ })
+ .field("version", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.version.as_ptr())
+ })
+ .field("purposes", &self.purposes)
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("layer", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.layer.as_ptr())
+ })
+ .finish()
+ }
+}
+impl ::std::default::Default for PhysicalDeviceToolProperties {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ name: unsafe { ::std::mem::zeroed() },
+ version: unsafe { ::std::mem::zeroed() },
+ purposes: ToolPurposeFlags::default(),
+ description: unsafe { ::std::mem::zeroed() },
+ layer: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceToolProperties {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_TOOL_PROPERTIES;
+}
+impl PhysicalDeviceToolProperties {
+ pub fn builder<'a>() -> PhysicalDeviceToolPropertiesBuilder<'a> {
+ PhysicalDeviceToolPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceToolPropertiesBuilder<'a> {
+ inner: PhysicalDeviceToolProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceToolPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceToolProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceToolPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceToolPropertiesBuilder<'a> {
+ #[inline]
+ pub fn name(mut self, name: [c_char; MAX_EXTENSION_NAME_SIZE]) -> Self {
+ self.inner.name = name;
+ self
+ }
+ #[inline]
+ pub fn version(mut self, version: [c_char; MAX_EXTENSION_NAME_SIZE]) -> Self {
+ self.inner.version = version;
+ self
+ }
+ #[inline]
+ pub fn purposes(mut self, purposes: ToolPurposeFlags) -> Self {
+ self.inner.purposes = purposes;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn layer(mut self, layer: [c_char; MAX_EXTENSION_NAME_SIZE]) -> Self {
+ self.inner.layer = layer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceToolProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerCustomBorderColorCreateInfoEXT.html>"]
+pub struct SamplerCustomBorderColorCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub custom_border_color: ClearColorValue,
+ pub format: Format,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for SamplerCustomBorderColorCreateInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("SamplerCustomBorderColorCreateInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("custom_border_color", &"union")
+ .field("format", &self.format)
+ .finish()
+ }
+}
+impl ::std::default::Default for SamplerCustomBorderColorCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ custom_border_color: ClearColorValue::default(),
+ format: Format::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerCustomBorderColorCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT;
+}
+impl SamplerCustomBorderColorCreateInfoEXT {
+ pub fn builder<'a>() -> SamplerCustomBorderColorCreateInfoEXTBuilder<'a> {
+ SamplerCustomBorderColorCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerCustomBorderColorCreateInfoEXTBuilder<'a> {
+ inner: SamplerCustomBorderColorCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSamplerCreateInfo for SamplerCustomBorderColorCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSamplerCreateInfo for SamplerCustomBorderColorCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for SamplerCustomBorderColorCreateInfoEXTBuilder<'a> {
+ type Target = SamplerCustomBorderColorCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerCustomBorderColorCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerCustomBorderColorCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn custom_border_color(mut self, custom_border_color: ClearColorValue) -> Self {
+ self.inner.custom_border_color = custom_border_color;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerCustomBorderColorCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCustomBorderColorPropertiesEXT.html>"]
+pub struct PhysicalDeviceCustomBorderColorPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_custom_border_color_samplers: u32,
+}
+impl ::std::default::Default for PhysicalDeviceCustomBorderColorPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_custom_border_color_samplers: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCustomBorderColorPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT;
+}
+impl PhysicalDeviceCustomBorderColorPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceCustomBorderColorPropertiesEXTBuilder<'a> {
+ PhysicalDeviceCustomBorderColorPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCustomBorderColorPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceCustomBorderColorPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceCustomBorderColorPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceCustomBorderColorPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCustomBorderColorPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceCustomBorderColorPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCustomBorderColorPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCustomBorderColorPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_custom_border_color_samplers(
+ mut self,
+ max_custom_border_color_samplers: u32,
+ ) -> Self {
+ self.inner.max_custom_border_color_samplers = max_custom_border_color_samplers;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCustomBorderColorPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceCustomBorderColorFeaturesEXT.html>"]
+pub struct PhysicalDeviceCustomBorderColorFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub custom_border_colors: Bool32,
+ pub custom_border_color_without_format: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceCustomBorderColorFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ custom_border_colors: Bool32::default(),
+ custom_border_color_without_format: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceCustomBorderColorFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT;
+}
+impl PhysicalDeviceCustomBorderColorFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'a> {
+ PhysicalDeviceCustomBorderColorFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceCustomBorderColorFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceCustomBorderColorFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceCustomBorderColorFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceCustomBorderColorFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceCustomBorderColorFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn custom_border_colors(mut self, custom_border_colors: bool) -> Self {
+ self.inner.custom_border_colors = custom_border_colors.into();
+ self
+ }
+ #[inline]
+ pub fn custom_border_color_without_format(
+ mut self,
+ custom_border_color_without_format: bool,
+ ) -> Self {
+ self.inner.custom_border_color_without_format = custom_border_color_without_format.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceCustomBorderColorFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerBorderColorComponentMappingCreateInfoEXT.html>"]
+pub struct SamplerBorderColorComponentMappingCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub components: ComponentMapping,
+ pub srgb: Bool32,
+}
+impl ::std::default::Default for SamplerBorderColorComponentMappingCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ components: ComponentMapping::default(),
+ srgb: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerBorderColorComponentMappingCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT;
+}
+impl SamplerBorderColorComponentMappingCreateInfoEXT {
+ pub fn builder<'a>() -> SamplerBorderColorComponentMappingCreateInfoEXTBuilder<'a> {
+ SamplerBorderColorComponentMappingCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerBorderColorComponentMappingCreateInfoEXTBuilder<'a> {
+ inner: SamplerBorderColorComponentMappingCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSamplerCreateInfo
+ for SamplerBorderColorComponentMappingCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsSamplerCreateInfo for SamplerBorderColorComponentMappingCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for SamplerBorderColorComponentMappingCreateInfoEXTBuilder<'a> {
+ type Target = SamplerBorderColorComponentMappingCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerBorderColorComponentMappingCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerBorderColorComponentMappingCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn components(mut self, components: ComponentMapping) -> Self {
+ self.inner.components = components;
+ self
+ }
+ #[inline]
+ pub fn srgb(mut self, srgb: bool) -> Self {
+ self.inner.srgb = srgb.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerBorderColorComponentMappingCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceBorderColorSwizzleFeaturesEXT.html>"]
+pub struct PhysicalDeviceBorderColorSwizzleFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub border_color_swizzle: Bool32,
+ pub border_color_swizzle_from_image: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceBorderColorSwizzleFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ border_color_swizzle: Bool32::default(),
+ border_color_swizzle_from_image: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceBorderColorSwizzleFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT;
+}
+impl PhysicalDeviceBorderColorSwizzleFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'a> {
+ PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceBorderColorSwizzleFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceBorderColorSwizzleFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceBorderColorSwizzleFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceBorderColorSwizzleFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceBorderColorSwizzleFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn border_color_swizzle(mut self, border_color_swizzle: bool) -> Self {
+ self.inner.border_color_swizzle = border_color_swizzle.into();
+ self
+ }
+ #[inline]
+ pub fn border_color_swizzle_from_image(
+ mut self,
+ border_color_swizzle_from_image: bool,
+ ) -> Self {
+ self.inner.border_color_swizzle_from_image = border_color_swizzle_from_image.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceBorderColorSwizzleFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceOrHostAddressKHR.html>"]
+pub union DeviceOrHostAddressKHR {
+ pub device_address: DeviceAddress,
+ pub host_address: *mut c_void,
+}
+impl ::std::default::Default for DeviceOrHostAddressKHR {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceOrHostAddressConstKHR.html>"]
+pub union DeviceOrHostAddressConstKHR {
+ pub device_address: DeviceAddress,
+ pub host_address: *const c_void,
+}
+impl ::std::default::Default for DeviceOrHostAddressConstKHR {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureGeometryTrianglesDataKHR.html>"]
+pub struct AccelerationStructureGeometryTrianglesDataKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub vertex_format: Format,
+ pub vertex_data: DeviceOrHostAddressConstKHR,
+ pub vertex_stride: DeviceSize,
+ pub max_vertex: u32,
+ pub index_type: IndexType,
+ pub index_data: DeviceOrHostAddressConstKHR,
+ pub transform_data: DeviceOrHostAddressConstKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureGeometryTrianglesDataKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureGeometryTrianglesDataKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("vertex_format", &self.vertex_format)
+ .field("vertex_data", &"union")
+ .field("vertex_stride", &self.vertex_stride)
+ .field("max_vertex", &self.max_vertex)
+ .field("index_type", &self.index_type)
+ .field("index_data", &"union")
+ .field("transform_data", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureGeometryTrianglesDataKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ vertex_format: Format::default(),
+ vertex_data: DeviceOrHostAddressConstKHR::default(),
+ vertex_stride: DeviceSize::default(),
+ max_vertex: u32::default(),
+ index_type: IndexType::default(),
+ index_data: DeviceOrHostAddressConstKHR::default(),
+ transform_data: DeviceOrHostAddressConstKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureGeometryTrianglesDataKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR;
+}
+impl AccelerationStructureGeometryTrianglesDataKHR {
+ pub fn builder<'a>() -> AccelerationStructureGeometryTrianglesDataKHRBuilder<'a> {
+ AccelerationStructureGeometryTrianglesDataKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureGeometryTrianglesDataKHRBuilder<'a> {
+ inner: AccelerationStructureGeometryTrianglesDataKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsAccelerationStructureGeometryTrianglesDataKHR {}
+impl<'a> ::std::ops::Deref for AccelerationStructureGeometryTrianglesDataKHRBuilder<'a> {
+ type Target = AccelerationStructureGeometryTrianglesDataKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureGeometryTrianglesDataKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureGeometryTrianglesDataKHRBuilder<'a> {
+ #[inline]
+ pub fn vertex_format(mut self, vertex_format: Format) -> Self {
+ self.inner.vertex_format = vertex_format;
+ self
+ }
+ #[inline]
+ pub fn vertex_data(mut self, vertex_data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.vertex_data = vertex_data;
+ self
+ }
+ #[inline]
+ pub fn vertex_stride(mut self, vertex_stride: DeviceSize) -> Self {
+ self.inner.vertex_stride = vertex_stride;
+ self
+ }
+ #[inline]
+ pub fn max_vertex(mut self, max_vertex: u32) -> Self {
+ self.inner.max_vertex = max_vertex;
+ self
+ }
+ #[inline]
+ pub fn index_type(mut self, index_type: IndexType) -> Self {
+ self.inner.index_type = index_type;
+ self
+ }
+ #[inline]
+ pub fn index_data(mut self, index_data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.index_data = index_data;
+ self
+ }
+ #[inline]
+ pub fn transform_data(mut self, transform_data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.transform_data = transform_data;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsAccelerationStructureGeometryTrianglesDataKHR>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureGeometryTrianglesDataKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureGeometryAabbsDataKHR.html>"]
+pub struct AccelerationStructureGeometryAabbsDataKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub data: DeviceOrHostAddressConstKHR,
+ pub stride: DeviceSize,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureGeometryAabbsDataKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureGeometryAabbsDataKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("data", &"union")
+ .field("stride", &self.stride)
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureGeometryAabbsDataKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ data: DeviceOrHostAddressConstKHR::default(),
+ stride: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureGeometryAabbsDataKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR;
+}
+impl AccelerationStructureGeometryAabbsDataKHR {
+ pub fn builder<'a>() -> AccelerationStructureGeometryAabbsDataKHRBuilder<'a> {
+ AccelerationStructureGeometryAabbsDataKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureGeometryAabbsDataKHRBuilder<'a> {
+ inner: AccelerationStructureGeometryAabbsDataKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureGeometryAabbsDataKHRBuilder<'a> {
+ type Target = AccelerationStructureGeometryAabbsDataKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureGeometryAabbsDataKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureGeometryAabbsDataKHRBuilder<'a> {
+ #[inline]
+ pub fn data(mut self, data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: DeviceSize) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureGeometryAabbsDataKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureGeometryInstancesDataKHR.html>"]
+pub struct AccelerationStructureGeometryInstancesDataKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub array_of_pointers: Bool32,
+ pub data: DeviceOrHostAddressConstKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureGeometryInstancesDataKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureGeometryInstancesDataKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("array_of_pointers", &self.array_of_pointers)
+ .field("data", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureGeometryInstancesDataKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ array_of_pointers: Bool32::default(),
+ data: DeviceOrHostAddressConstKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureGeometryInstancesDataKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR;
+}
+impl AccelerationStructureGeometryInstancesDataKHR {
+ pub fn builder<'a>() -> AccelerationStructureGeometryInstancesDataKHRBuilder<'a> {
+ AccelerationStructureGeometryInstancesDataKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureGeometryInstancesDataKHRBuilder<'a> {
+ inner: AccelerationStructureGeometryInstancesDataKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureGeometryInstancesDataKHRBuilder<'a> {
+ type Target = AccelerationStructureGeometryInstancesDataKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureGeometryInstancesDataKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureGeometryInstancesDataKHRBuilder<'a> {
+ #[inline]
+ pub fn array_of_pointers(mut self, array_of_pointers: bool) -> Self {
+ self.inner.array_of_pointers = array_of_pointers.into();
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureGeometryInstancesDataKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureGeometryDataKHR.html>"]
+pub union AccelerationStructureGeometryDataKHR {
+ pub triangles: AccelerationStructureGeometryTrianglesDataKHR,
+ pub aabbs: AccelerationStructureGeometryAabbsDataKHR,
+ pub instances: AccelerationStructureGeometryInstancesDataKHR,
+}
+impl ::std::default::Default for AccelerationStructureGeometryDataKHR {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureGeometryKHR.html>"]
+pub struct AccelerationStructureGeometryKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub geometry_type: GeometryTypeKHR,
+ pub geometry: AccelerationStructureGeometryDataKHR,
+ pub flags: GeometryFlagsKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureGeometryKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureGeometryKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("geometry_type", &self.geometry_type)
+ .field("geometry", &"union")
+ .field("flags", &self.flags)
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureGeometryKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ geometry_type: GeometryTypeKHR::default(),
+ geometry: AccelerationStructureGeometryDataKHR::default(),
+ flags: GeometryFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureGeometryKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_GEOMETRY_KHR;
+}
+impl AccelerationStructureGeometryKHR {
+ pub fn builder<'a>() -> AccelerationStructureGeometryKHRBuilder<'a> {
+ AccelerationStructureGeometryKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureGeometryKHRBuilder<'a> {
+ inner: AccelerationStructureGeometryKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureGeometryKHRBuilder<'a> {
+ type Target = AccelerationStructureGeometryKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureGeometryKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureGeometryKHRBuilder<'a> {
+ #[inline]
+ pub fn geometry_type(mut self, geometry_type: GeometryTypeKHR) -> Self {
+ self.inner.geometry_type = geometry_type;
+ self
+ }
+ #[inline]
+ pub fn geometry(mut self, geometry: AccelerationStructureGeometryDataKHR) -> Self {
+ self.inner.geometry = geometry;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: GeometryFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureGeometryKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureBuildGeometryInfoKHR.html>"]
+pub struct AccelerationStructureBuildGeometryInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: AccelerationStructureTypeKHR,
+ pub flags: BuildAccelerationStructureFlagsKHR,
+ pub mode: BuildAccelerationStructureModeKHR,
+ pub src_acceleration_structure: AccelerationStructureKHR,
+ pub dst_acceleration_structure: AccelerationStructureKHR,
+ pub geometry_count: u32,
+ pub p_geometries: *const AccelerationStructureGeometryKHR,
+ pub pp_geometries: *const *const AccelerationStructureGeometryKHR,
+ pub scratch_data: DeviceOrHostAddressKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureBuildGeometryInfoKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureBuildGeometryInfoKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("ty", &self.ty)
+ .field("flags", &self.flags)
+ .field("mode", &self.mode)
+ .field(
+ "src_acceleration_structure",
+ &self.src_acceleration_structure,
+ )
+ .field(
+ "dst_acceleration_structure",
+ &self.dst_acceleration_structure,
+ )
+ .field("geometry_count", &self.geometry_count)
+ .field("p_geometries", &self.p_geometries)
+ .field("pp_geometries", &self.pp_geometries)
+ .field("scratch_data", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureBuildGeometryInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: AccelerationStructureTypeKHR::default(),
+ flags: BuildAccelerationStructureFlagsKHR::default(),
+ mode: BuildAccelerationStructureModeKHR::default(),
+ src_acceleration_structure: AccelerationStructureKHR::default(),
+ dst_acceleration_structure: AccelerationStructureKHR::default(),
+ geometry_count: u32::default(),
+ p_geometries: ::std::ptr::null(),
+ pp_geometries: ::std::ptr::null(),
+ scratch_data: DeviceOrHostAddressKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureBuildGeometryInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR;
+}
+impl AccelerationStructureBuildGeometryInfoKHR {
+ pub fn builder<'a>() -> AccelerationStructureBuildGeometryInfoKHRBuilder<'a> {
+ AccelerationStructureBuildGeometryInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureBuildGeometryInfoKHRBuilder<'a> {
+ inner: AccelerationStructureBuildGeometryInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureBuildGeometryInfoKHRBuilder<'a> {
+ type Target = AccelerationStructureBuildGeometryInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureBuildGeometryInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureBuildGeometryInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: AccelerationStructureTypeKHR) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: BuildAccelerationStructureFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: BuildAccelerationStructureModeKHR) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[inline]
+ pub fn src_acceleration_structure(
+ mut self,
+ src_acceleration_structure: AccelerationStructureKHR,
+ ) -> Self {
+ self.inner.src_acceleration_structure = src_acceleration_structure;
+ self
+ }
+ #[inline]
+ pub fn dst_acceleration_structure(
+ mut self,
+ dst_acceleration_structure: AccelerationStructureKHR,
+ ) -> Self {
+ self.inner.dst_acceleration_structure = dst_acceleration_structure;
+ self
+ }
+ #[inline]
+ pub fn geometries(mut self, geometries: &'a [AccelerationStructureGeometryKHR]) -> Self {
+ self.inner.geometry_count = geometries.len() as _;
+ self.inner.p_geometries = geometries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn geometries_ptrs(
+ mut self,
+ geometries_ptrs: &'a [&'a AccelerationStructureGeometryKHR],
+ ) -> Self {
+ self.inner.geometry_count = geometries_ptrs.len() as _;
+ self.inner.pp_geometries = geometries_ptrs.as_ptr().cast();
+ self
+ }
+ #[inline]
+ pub fn scratch_data(mut self, scratch_data: DeviceOrHostAddressKHR) -> Self {
+ self.inner.scratch_data = scratch_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureBuildGeometryInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureBuildRangeInfoKHR.html>"]
+pub struct AccelerationStructureBuildRangeInfoKHR {
+ pub primitive_count: u32,
+ pub primitive_offset: u32,
+ pub first_vertex: u32,
+ pub transform_offset: u32,
+}
+impl AccelerationStructureBuildRangeInfoKHR {
+ pub fn builder<'a>() -> AccelerationStructureBuildRangeInfoKHRBuilder<'a> {
+ AccelerationStructureBuildRangeInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureBuildRangeInfoKHRBuilder<'a> {
+ inner: AccelerationStructureBuildRangeInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureBuildRangeInfoKHRBuilder<'a> {
+ type Target = AccelerationStructureBuildRangeInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureBuildRangeInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureBuildRangeInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn primitive_count(mut self, primitive_count: u32) -> Self {
+ self.inner.primitive_count = primitive_count;
+ self
+ }
+ #[inline]
+ pub fn primitive_offset(mut self, primitive_offset: u32) -> Self {
+ self.inner.primitive_offset = primitive_offset;
+ self
+ }
+ #[inline]
+ pub fn first_vertex(mut self, first_vertex: u32) -> Self {
+ self.inner.first_vertex = first_vertex;
+ self
+ }
+ #[inline]
+ pub fn transform_offset(mut self, transform_offset: u32) -> Self {
+ self.inner.transform_offset = transform_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureBuildRangeInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureCreateInfoKHR.html>"]
+pub struct AccelerationStructureCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub create_flags: AccelerationStructureCreateFlagsKHR,
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+ pub size: DeviceSize,
+ pub ty: AccelerationStructureTypeKHR,
+ pub device_address: DeviceAddress,
+}
+impl ::std::default::Default for AccelerationStructureCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ create_flags: AccelerationStructureCreateFlagsKHR::default(),
+ buffer: Buffer::default(),
+ offset: DeviceSize::default(),
+ size: DeviceSize::default(),
+ ty: AccelerationStructureTypeKHR::default(),
+ device_address: DeviceAddress::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_CREATE_INFO_KHR;
+}
+impl AccelerationStructureCreateInfoKHR {
+ pub fn builder<'a>() -> AccelerationStructureCreateInfoKHRBuilder<'a> {
+ AccelerationStructureCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureCreateInfoKHRBuilder<'a> {
+ inner: AccelerationStructureCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsAccelerationStructureCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for AccelerationStructureCreateInfoKHRBuilder<'a> {
+ type Target = AccelerationStructureCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn create_flags(mut self, create_flags: AccelerationStructureCreateFlagsKHR) -> Self {
+ self.inner.create_flags = create_flags;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn ty(mut self, ty: AccelerationStructureTypeKHR) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn device_address(mut self, device_address: DeviceAddress) -> Self {
+ self.inner.device_address = device_address;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsAccelerationStructureCreateInfoKHR>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAabbPositionsKHR.html>"]
+pub struct AabbPositionsKHR {
+ pub min_x: f32,
+ pub min_y: f32,
+ pub min_z: f32,
+ pub max_x: f32,
+ pub max_y: f32,
+ pub max_z: f32,
+}
+impl AabbPositionsKHR {
+ pub fn builder<'a>() -> AabbPositionsKHRBuilder<'a> {
+ AabbPositionsKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AabbPositionsKHRBuilder<'a> {
+ inner: AabbPositionsKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AabbPositionsKHRBuilder<'a> {
+ type Target = AabbPositionsKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AabbPositionsKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AabbPositionsKHRBuilder<'a> {
+ #[inline]
+ pub fn min_x(mut self, min_x: f32) -> Self {
+ self.inner.min_x = min_x;
+ self
+ }
+ #[inline]
+ pub fn min_y(mut self, min_y: f32) -> Self {
+ self.inner.min_y = min_y;
+ self
+ }
+ #[inline]
+ pub fn min_z(mut self, min_z: f32) -> Self {
+ self.inner.min_z = min_z;
+ self
+ }
+ #[inline]
+ pub fn max_x(mut self, max_x: f32) -> Self {
+ self.inner.max_x = max_x;
+ self
+ }
+ #[inline]
+ pub fn max_y(mut self, max_y: f32) -> Self {
+ self.inner.max_y = max_y;
+ self
+ }
+ #[inline]
+ pub fn max_z(mut self, max_z: f32) -> Self {
+ self.inner.max_z = max_z;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AabbPositionsKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub struct TransformMatrixKHR {
+ pub matrix: [f32; 12],
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+pub union AccelerationStructureReferenceKHR {
+ pub device_handle: DeviceAddress,
+ pub host_handle: AccelerationStructureKHR,
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureInstanceKHR.html>"]
+pub struct AccelerationStructureInstanceKHR {
+ pub transform: TransformMatrixKHR,
+ #[doc = r" Use [`Packed24_8::new(instance_custom_index, mask)`][Packed24_8::new()] to construct this field"]
+ pub instance_custom_index_and_mask: Packed24_8,
+ #[doc = r" Use [`Packed24_8::new(instance_shader_binding_table_record_offset, flags)`][Packed24_8::new()] to construct this field"]
+ pub instance_shader_binding_table_record_offset_and_flags: Packed24_8,
+ pub acceleration_structure_reference: AccelerationStructureReferenceKHR,
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureDeviceAddressInfoKHR.html>"]
+pub struct AccelerationStructureDeviceAddressInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acceleration_structure: AccelerationStructureKHR,
+}
+impl ::std::default::Default for AccelerationStructureDeviceAddressInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acceleration_structure: AccelerationStructureKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureDeviceAddressInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR;
+}
+impl AccelerationStructureDeviceAddressInfoKHR {
+ pub fn builder<'a>() -> AccelerationStructureDeviceAddressInfoKHRBuilder<'a> {
+ AccelerationStructureDeviceAddressInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureDeviceAddressInfoKHRBuilder<'a> {
+ inner: AccelerationStructureDeviceAddressInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureDeviceAddressInfoKHRBuilder<'a> {
+ type Target = AccelerationStructureDeviceAddressInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureDeviceAddressInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureDeviceAddressInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structure(
+ mut self,
+ acceleration_structure: AccelerationStructureKHR,
+ ) -> Self {
+ self.inner.acceleration_structure = acceleration_structure;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureDeviceAddressInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureVersionInfoKHR.html>"]
+pub struct AccelerationStructureVersionInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_version_data: *const [u8; 2 * UUID_SIZE],
+}
+impl ::std::default::Default for AccelerationStructureVersionInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_version_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureVersionInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_VERSION_INFO_KHR;
+}
+impl AccelerationStructureVersionInfoKHR {
+ pub fn builder<'a>() -> AccelerationStructureVersionInfoKHRBuilder<'a> {
+ AccelerationStructureVersionInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureVersionInfoKHRBuilder<'a> {
+ inner: AccelerationStructureVersionInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureVersionInfoKHRBuilder<'a> {
+ type Target = AccelerationStructureVersionInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureVersionInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureVersionInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn version_data(mut self, version_data: &'a [u8; 2 * UUID_SIZE]) -> Self {
+ self.inner.p_version_data = version_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureVersionInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyAccelerationStructureInfoKHR.html>"]
+pub struct CopyAccelerationStructureInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src: AccelerationStructureKHR,
+ pub dst: AccelerationStructureKHR,
+ pub mode: CopyAccelerationStructureModeKHR,
+}
+impl ::std::default::Default for CopyAccelerationStructureInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src: AccelerationStructureKHR::default(),
+ dst: AccelerationStructureKHR::default(),
+ mode: CopyAccelerationStructureModeKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyAccelerationStructureInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_ACCELERATION_STRUCTURE_INFO_KHR;
+}
+impl CopyAccelerationStructureInfoKHR {
+ pub fn builder<'a>() -> CopyAccelerationStructureInfoKHRBuilder<'a> {
+ CopyAccelerationStructureInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyAccelerationStructureInfoKHRBuilder<'a> {
+ inner: CopyAccelerationStructureInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyAccelerationStructureInfoKHRBuilder<'a> {
+ type Target = CopyAccelerationStructureInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyAccelerationStructureInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyAccelerationStructureInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn src(mut self, src: AccelerationStructureKHR) -> Self {
+ self.inner.src = src;
+ self
+ }
+ #[inline]
+ pub fn dst(mut self, dst: AccelerationStructureKHR) -> Self {
+ self.inner.dst = dst;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: CopyAccelerationStructureModeKHR) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyAccelerationStructureInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyAccelerationStructureToMemoryInfoKHR.html>"]
+pub struct CopyAccelerationStructureToMemoryInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src: AccelerationStructureKHR,
+ pub dst: DeviceOrHostAddressKHR,
+ pub mode: CopyAccelerationStructureModeKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for CopyAccelerationStructureToMemoryInfoKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("CopyAccelerationStructureToMemoryInfoKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("src", &self.src)
+ .field("dst", &"union")
+ .field("mode", &self.mode)
+ .finish()
+ }
+}
+impl ::std::default::Default for CopyAccelerationStructureToMemoryInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src: AccelerationStructureKHR::default(),
+ dst: DeviceOrHostAddressKHR::default(),
+ mode: CopyAccelerationStructureModeKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyAccelerationStructureToMemoryInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR;
+}
+impl CopyAccelerationStructureToMemoryInfoKHR {
+ pub fn builder<'a>() -> CopyAccelerationStructureToMemoryInfoKHRBuilder<'a> {
+ CopyAccelerationStructureToMemoryInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyAccelerationStructureToMemoryInfoKHRBuilder<'a> {
+ inner: CopyAccelerationStructureToMemoryInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyAccelerationStructureToMemoryInfoKHRBuilder<'a> {
+ type Target = CopyAccelerationStructureToMemoryInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyAccelerationStructureToMemoryInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyAccelerationStructureToMemoryInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn src(mut self, src: AccelerationStructureKHR) -> Self {
+ self.inner.src = src;
+ self
+ }
+ #[inline]
+ pub fn dst(mut self, dst: DeviceOrHostAddressKHR) -> Self {
+ self.inner.dst = dst;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: CopyAccelerationStructureModeKHR) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyAccelerationStructureToMemoryInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMemoryToAccelerationStructureInfoKHR.html>"]
+pub struct CopyMemoryToAccelerationStructureInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src: DeviceOrHostAddressConstKHR,
+ pub dst: AccelerationStructureKHR,
+ pub mode: CopyAccelerationStructureModeKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for CopyMemoryToAccelerationStructureInfoKHR {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("CopyMemoryToAccelerationStructureInfoKHR")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("src", &"union")
+ .field("dst", &self.dst)
+ .field("mode", &self.mode)
+ .finish()
+ }
+}
+impl ::std::default::Default for CopyMemoryToAccelerationStructureInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src: DeviceOrHostAddressConstKHR::default(),
+ dst: AccelerationStructureKHR::default(),
+ mode: CopyAccelerationStructureModeKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyMemoryToAccelerationStructureInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR;
+}
+impl CopyMemoryToAccelerationStructureInfoKHR {
+ pub fn builder<'a>() -> CopyMemoryToAccelerationStructureInfoKHRBuilder<'a> {
+ CopyMemoryToAccelerationStructureInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyMemoryToAccelerationStructureInfoKHRBuilder<'a> {
+ inner: CopyMemoryToAccelerationStructureInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyMemoryToAccelerationStructureInfoKHRBuilder<'a> {
+ type Target = CopyMemoryToAccelerationStructureInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyMemoryToAccelerationStructureInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyMemoryToAccelerationStructureInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn src(mut self, src: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.src = src;
+ self
+ }
+ #[inline]
+ pub fn dst(mut self, dst: AccelerationStructureKHR) -> Self {
+ self.inner.dst = dst;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: CopyAccelerationStructureModeKHR) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyMemoryToAccelerationStructureInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingPipelineInterfaceCreateInfoKHR.html>"]
+pub struct RayTracingPipelineInterfaceCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_pipeline_ray_payload_size: u32,
+ pub max_pipeline_ray_hit_attribute_size: u32,
+}
+impl ::std::default::Default for RayTracingPipelineInterfaceCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_pipeline_ray_payload_size: u32::default(),
+ max_pipeline_ray_hit_attribute_size: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RayTracingPipelineInterfaceCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR;
+}
+impl RayTracingPipelineInterfaceCreateInfoKHR {
+ pub fn builder<'a>() -> RayTracingPipelineInterfaceCreateInfoKHRBuilder<'a> {
+ RayTracingPipelineInterfaceCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RayTracingPipelineInterfaceCreateInfoKHRBuilder<'a> {
+ inner: RayTracingPipelineInterfaceCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RayTracingPipelineInterfaceCreateInfoKHRBuilder<'a> {
+ type Target = RayTracingPipelineInterfaceCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RayTracingPipelineInterfaceCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RayTracingPipelineInterfaceCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn max_pipeline_ray_payload_size(mut self, max_pipeline_ray_payload_size: u32) -> Self {
+ self.inner.max_pipeline_ray_payload_size = max_pipeline_ray_payload_size;
+ self
+ }
+ #[inline]
+ pub fn max_pipeline_ray_hit_attribute_size(
+ mut self,
+ max_pipeline_ray_hit_attribute_size: u32,
+ ) -> Self {
+ self.inner.max_pipeline_ray_hit_attribute_size = max_pipeline_ray_hit_attribute_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RayTracingPipelineInterfaceCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineLibraryCreateInfoKHR.html>"]
+pub struct PipelineLibraryCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub library_count: u32,
+ pub p_libraries: *const Pipeline,
+}
+impl ::std::default::Default for PipelineLibraryCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ library_count: u32::default(),
+ p_libraries: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineLibraryCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_LIBRARY_CREATE_INFO_KHR;
+}
+impl PipelineLibraryCreateInfoKHR {
+ pub fn builder<'a>() -> PipelineLibraryCreateInfoKHRBuilder<'a> {
+ PipelineLibraryCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineLibraryCreateInfoKHRBuilder<'a> {
+ inner: PipelineLibraryCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineLibraryCreateInfoKHRBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineLibraryCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for PipelineLibraryCreateInfoKHRBuilder<'a> {
+ type Target = PipelineLibraryCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineLibraryCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineLibraryCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn libraries(mut self, libraries: &'a [Pipeline]) -> Self {
+ self.inner.library_count = libraries.len() as _;
+ self.inner.p_libraries = libraries.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineLibraryCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExtendedDynamicStateFeaturesEXT.html>"]
+pub struct PhysicalDeviceExtendedDynamicStateFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub extended_dynamic_state: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceExtendedDynamicStateFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ extended_dynamic_state: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExtendedDynamicStateFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT;
+}
+impl PhysicalDeviceExtendedDynamicStateFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'a> {
+ PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceExtendedDynamicStateFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedDynamicStateFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicStateFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExtendedDynamicStateFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn extended_dynamic_state(mut self, extended_dynamic_state: bool) -> Self {
+ self.inner.extended_dynamic_state = extended_dynamic_state.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExtendedDynamicStateFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExtendedDynamicState2FeaturesEXT.html>"]
+pub struct PhysicalDeviceExtendedDynamicState2FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub extended_dynamic_state2: Bool32,
+ pub extended_dynamic_state2_logic_op: Bool32,
+ pub extended_dynamic_state2_patch_control_points: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceExtendedDynamicState2FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ extended_dynamic_state2: Bool32::default(),
+ extended_dynamic_state2_logic_op: Bool32::default(),
+ extended_dynamic_state2_patch_control_points: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExtendedDynamicState2FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT;
+}
+impl PhysicalDeviceExtendedDynamicState2FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'a> {
+ PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceExtendedDynamicState2FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedDynamicState2FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicState2FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceExtendedDynamicState2FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExtendedDynamicState2FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn extended_dynamic_state2(mut self, extended_dynamic_state2: bool) -> Self {
+ self.inner.extended_dynamic_state2 = extended_dynamic_state2.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state2_logic_op(
+ mut self,
+ extended_dynamic_state2_logic_op: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state2_logic_op = extended_dynamic_state2_logic_op.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state2_patch_control_points(
+ mut self,
+ extended_dynamic_state2_patch_control_points: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state2_patch_control_points =
+ extended_dynamic_state2_patch_control_points.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExtendedDynamicState2FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExtendedDynamicState3FeaturesEXT.html>"]
+pub struct PhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub extended_dynamic_state3_tessellation_domain_origin: Bool32,
+ pub extended_dynamic_state3_depth_clamp_enable: Bool32,
+ pub extended_dynamic_state3_polygon_mode: Bool32,
+ pub extended_dynamic_state3_rasterization_samples: Bool32,
+ pub extended_dynamic_state3_sample_mask: Bool32,
+ pub extended_dynamic_state3_alpha_to_coverage_enable: Bool32,
+ pub extended_dynamic_state3_alpha_to_one_enable: Bool32,
+ pub extended_dynamic_state3_logic_op_enable: Bool32,
+ pub extended_dynamic_state3_color_blend_enable: Bool32,
+ pub extended_dynamic_state3_color_blend_equation: Bool32,
+ pub extended_dynamic_state3_color_write_mask: Bool32,
+ pub extended_dynamic_state3_rasterization_stream: Bool32,
+ pub extended_dynamic_state3_conservative_rasterization_mode: Bool32,
+ pub extended_dynamic_state3_extra_primitive_overestimation_size: Bool32,
+ pub extended_dynamic_state3_depth_clip_enable: Bool32,
+ pub extended_dynamic_state3_sample_locations_enable: Bool32,
+ pub extended_dynamic_state3_color_blend_advanced: Bool32,
+ pub extended_dynamic_state3_provoking_vertex_mode: Bool32,
+ pub extended_dynamic_state3_line_rasterization_mode: Bool32,
+ pub extended_dynamic_state3_line_stipple_enable: Bool32,
+ pub extended_dynamic_state3_depth_clip_negative_one_to_one: Bool32,
+ pub extended_dynamic_state3_viewport_w_scaling_enable: Bool32,
+ pub extended_dynamic_state3_viewport_swizzle: Bool32,
+ pub extended_dynamic_state3_coverage_to_color_enable: Bool32,
+ pub extended_dynamic_state3_coverage_to_color_location: Bool32,
+ pub extended_dynamic_state3_coverage_modulation_mode: Bool32,
+ pub extended_dynamic_state3_coverage_modulation_table_enable: Bool32,
+ pub extended_dynamic_state3_coverage_modulation_table: Bool32,
+ pub extended_dynamic_state3_coverage_reduction_mode: Bool32,
+ pub extended_dynamic_state3_representative_fragment_test_enable: Bool32,
+ pub extended_dynamic_state3_shading_rate_image_enable: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ extended_dynamic_state3_tessellation_domain_origin: Bool32::default(),
+ extended_dynamic_state3_depth_clamp_enable: Bool32::default(),
+ extended_dynamic_state3_polygon_mode: Bool32::default(),
+ extended_dynamic_state3_rasterization_samples: Bool32::default(),
+ extended_dynamic_state3_sample_mask: Bool32::default(),
+ extended_dynamic_state3_alpha_to_coverage_enable: Bool32::default(),
+ extended_dynamic_state3_alpha_to_one_enable: Bool32::default(),
+ extended_dynamic_state3_logic_op_enable: Bool32::default(),
+ extended_dynamic_state3_color_blend_enable: Bool32::default(),
+ extended_dynamic_state3_color_blend_equation: Bool32::default(),
+ extended_dynamic_state3_color_write_mask: Bool32::default(),
+ extended_dynamic_state3_rasterization_stream: Bool32::default(),
+ extended_dynamic_state3_conservative_rasterization_mode: Bool32::default(),
+ extended_dynamic_state3_extra_primitive_overestimation_size: Bool32::default(),
+ extended_dynamic_state3_depth_clip_enable: Bool32::default(),
+ extended_dynamic_state3_sample_locations_enable: Bool32::default(),
+ extended_dynamic_state3_color_blend_advanced: Bool32::default(),
+ extended_dynamic_state3_provoking_vertex_mode: Bool32::default(),
+ extended_dynamic_state3_line_rasterization_mode: Bool32::default(),
+ extended_dynamic_state3_line_stipple_enable: Bool32::default(),
+ extended_dynamic_state3_depth_clip_negative_one_to_one: Bool32::default(),
+ extended_dynamic_state3_viewport_w_scaling_enable: Bool32::default(),
+ extended_dynamic_state3_viewport_swizzle: Bool32::default(),
+ extended_dynamic_state3_coverage_to_color_enable: Bool32::default(),
+ extended_dynamic_state3_coverage_to_color_location: Bool32::default(),
+ extended_dynamic_state3_coverage_modulation_mode: Bool32::default(),
+ extended_dynamic_state3_coverage_modulation_table_enable: Bool32::default(),
+ extended_dynamic_state3_coverage_modulation_table: Bool32::default(),
+ extended_dynamic_state3_coverage_reduction_mode: Bool32::default(),
+ extended_dynamic_state3_representative_fragment_test_enable: Bool32::default(),
+ extended_dynamic_state3_shading_rate_image_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT;
+}
+impl PhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'a> {
+ PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceExtendedDynamicState3FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExtendedDynamicState3FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExtendedDynamicState3FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExtendedDynamicState3FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn extended_dynamic_state3_tessellation_domain_origin(
+ mut self,
+ extended_dynamic_state3_tessellation_domain_origin: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_tessellation_domain_origin =
+ extended_dynamic_state3_tessellation_domain_origin.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_depth_clamp_enable(
+ mut self,
+ extended_dynamic_state3_depth_clamp_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_depth_clamp_enable =
+ extended_dynamic_state3_depth_clamp_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_polygon_mode(
+ mut self,
+ extended_dynamic_state3_polygon_mode: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_polygon_mode =
+ extended_dynamic_state3_polygon_mode.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_rasterization_samples(
+ mut self,
+ extended_dynamic_state3_rasterization_samples: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_rasterization_samples =
+ extended_dynamic_state3_rasterization_samples.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_sample_mask(
+ mut self,
+ extended_dynamic_state3_sample_mask: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_sample_mask = extended_dynamic_state3_sample_mask.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_alpha_to_coverage_enable(
+ mut self,
+ extended_dynamic_state3_alpha_to_coverage_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_alpha_to_coverage_enable =
+ extended_dynamic_state3_alpha_to_coverage_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_alpha_to_one_enable(
+ mut self,
+ extended_dynamic_state3_alpha_to_one_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_alpha_to_one_enable =
+ extended_dynamic_state3_alpha_to_one_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_logic_op_enable(
+ mut self,
+ extended_dynamic_state3_logic_op_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_logic_op_enable =
+ extended_dynamic_state3_logic_op_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_color_blend_enable(
+ mut self,
+ extended_dynamic_state3_color_blend_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_color_blend_enable =
+ extended_dynamic_state3_color_blend_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_color_blend_equation(
+ mut self,
+ extended_dynamic_state3_color_blend_equation: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_color_blend_equation =
+ extended_dynamic_state3_color_blend_equation.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_color_write_mask(
+ mut self,
+ extended_dynamic_state3_color_write_mask: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_color_write_mask =
+ extended_dynamic_state3_color_write_mask.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_rasterization_stream(
+ mut self,
+ extended_dynamic_state3_rasterization_stream: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_rasterization_stream =
+ extended_dynamic_state3_rasterization_stream.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_conservative_rasterization_mode(
+ mut self,
+ extended_dynamic_state3_conservative_rasterization_mode: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_conservative_rasterization_mode =
+ extended_dynamic_state3_conservative_rasterization_mode.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_extra_primitive_overestimation_size(
+ mut self,
+ extended_dynamic_state3_extra_primitive_overestimation_size: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_extra_primitive_overestimation_size =
+ extended_dynamic_state3_extra_primitive_overestimation_size.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_depth_clip_enable(
+ mut self,
+ extended_dynamic_state3_depth_clip_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_depth_clip_enable =
+ extended_dynamic_state3_depth_clip_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_sample_locations_enable(
+ mut self,
+ extended_dynamic_state3_sample_locations_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_sample_locations_enable =
+ extended_dynamic_state3_sample_locations_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_color_blend_advanced(
+ mut self,
+ extended_dynamic_state3_color_blend_advanced: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_color_blend_advanced =
+ extended_dynamic_state3_color_blend_advanced.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_provoking_vertex_mode(
+ mut self,
+ extended_dynamic_state3_provoking_vertex_mode: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_provoking_vertex_mode =
+ extended_dynamic_state3_provoking_vertex_mode.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_line_rasterization_mode(
+ mut self,
+ extended_dynamic_state3_line_rasterization_mode: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_line_rasterization_mode =
+ extended_dynamic_state3_line_rasterization_mode.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_line_stipple_enable(
+ mut self,
+ extended_dynamic_state3_line_stipple_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_line_stipple_enable =
+ extended_dynamic_state3_line_stipple_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_depth_clip_negative_one_to_one(
+ mut self,
+ extended_dynamic_state3_depth_clip_negative_one_to_one: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_depth_clip_negative_one_to_one =
+ extended_dynamic_state3_depth_clip_negative_one_to_one.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_viewport_w_scaling_enable(
+ mut self,
+ extended_dynamic_state3_viewport_w_scaling_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_viewport_w_scaling_enable =
+ extended_dynamic_state3_viewport_w_scaling_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_viewport_swizzle(
+ mut self,
+ extended_dynamic_state3_viewport_swizzle: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_viewport_swizzle =
+ extended_dynamic_state3_viewport_swizzle.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_coverage_to_color_enable(
+ mut self,
+ extended_dynamic_state3_coverage_to_color_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_coverage_to_color_enable =
+ extended_dynamic_state3_coverage_to_color_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_coverage_to_color_location(
+ mut self,
+ extended_dynamic_state3_coverage_to_color_location: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_coverage_to_color_location =
+ extended_dynamic_state3_coverage_to_color_location.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_coverage_modulation_mode(
+ mut self,
+ extended_dynamic_state3_coverage_modulation_mode: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_coverage_modulation_mode =
+ extended_dynamic_state3_coverage_modulation_mode.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_coverage_modulation_table_enable(
+ mut self,
+ extended_dynamic_state3_coverage_modulation_table_enable: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_coverage_modulation_table_enable =
+ extended_dynamic_state3_coverage_modulation_table_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_coverage_modulation_table(
+ mut self,
+ extended_dynamic_state3_coverage_modulation_table: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_coverage_modulation_table =
+ extended_dynamic_state3_coverage_modulation_table.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_coverage_reduction_mode(
+ mut self,
+ extended_dynamic_state3_coverage_reduction_mode: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_coverage_reduction_mode =
+ extended_dynamic_state3_coverage_reduction_mode.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_representative_fragment_test_enable(
+ mut self,
+ extended_dynamic_state3_representative_fragment_test_enable: bool,
+ ) -> Self {
+ self.inner
+ .extended_dynamic_state3_representative_fragment_test_enable =
+ extended_dynamic_state3_representative_fragment_test_enable.into();
+ self
+ }
+ #[inline]
+ pub fn extended_dynamic_state3_shading_rate_image_enable(
+ mut self,
+ extended_dynamic_state3_shading_rate_image_enable: bool,
+ ) -> Self {
+ self.inner.extended_dynamic_state3_shading_rate_image_enable =
+ extended_dynamic_state3_shading_rate_image_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExtendedDynamicState3FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExtendedDynamicState3PropertiesEXT.html>"]
+pub struct PhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub dynamic_primitive_topology_unrestricted: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ dynamic_primitive_topology_unrestricted: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT;
+}
+impl PhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder<'a> {
+ PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceExtendedDynamicState3PropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceExtendedDynamicState3PropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExtendedDynamicState3PropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn dynamic_primitive_topology_unrestricted(
+ mut self,
+ dynamic_primitive_topology_unrestricted: bool,
+ ) -> Self {
+ self.inner.dynamic_primitive_topology_unrestricted =
+ dynamic_primitive_topology_unrestricted.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExtendedDynamicState3PropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkColorBlendEquationEXT.html>"]
+pub struct ColorBlendEquationEXT {
+ pub src_color_blend_factor: BlendFactor,
+ pub dst_color_blend_factor: BlendFactor,
+ pub color_blend_op: BlendOp,
+ pub src_alpha_blend_factor: BlendFactor,
+ pub dst_alpha_blend_factor: BlendFactor,
+ pub alpha_blend_op: BlendOp,
+}
+impl ColorBlendEquationEXT {
+ pub fn builder<'a>() -> ColorBlendEquationEXTBuilder<'a> {
+ ColorBlendEquationEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ColorBlendEquationEXTBuilder<'a> {
+ inner: ColorBlendEquationEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ColorBlendEquationEXTBuilder<'a> {
+ type Target = ColorBlendEquationEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ColorBlendEquationEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ColorBlendEquationEXTBuilder<'a> {
+ #[inline]
+ pub fn src_color_blend_factor(mut self, src_color_blend_factor: BlendFactor) -> Self {
+ self.inner.src_color_blend_factor = src_color_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn dst_color_blend_factor(mut self, dst_color_blend_factor: BlendFactor) -> Self {
+ self.inner.dst_color_blend_factor = dst_color_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn color_blend_op(mut self, color_blend_op: BlendOp) -> Self {
+ self.inner.color_blend_op = color_blend_op;
+ self
+ }
+ #[inline]
+ pub fn src_alpha_blend_factor(mut self, src_alpha_blend_factor: BlendFactor) -> Self {
+ self.inner.src_alpha_blend_factor = src_alpha_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn dst_alpha_blend_factor(mut self, dst_alpha_blend_factor: BlendFactor) -> Self {
+ self.inner.dst_alpha_blend_factor = dst_alpha_blend_factor;
+ self
+ }
+ #[inline]
+ pub fn alpha_blend_op(mut self, alpha_blend_op: BlendOp) -> Self {
+ self.inner.alpha_blend_op = alpha_blend_op;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ColorBlendEquationEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkColorBlendAdvancedEXT.html>"]
+pub struct ColorBlendAdvancedEXT {
+ pub advanced_blend_op: BlendOp,
+ pub src_premultiplied: Bool32,
+ pub dst_premultiplied: Bool32,
+ pub blend_overlap: BlendOverlapEXT,
+ pub clamp_results: Bool32,
+}
+impl ColorBlendAdvancedEXT {
+ pub fn builder<'a>() -> ColorBlendAdvancedEXTBuilder<'a> {
+ ColorBlendAdvancedEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ColorBlendAdvancedEXTBuilder<'a> {
+ inner: ColorBlendAdvancedEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ColorBlendAdvancedEXTBuilder<'a> {
+ type Target = ColorBlendAdvancedEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ColorBlendAdvancedEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ColorBlendAdvancedEXTBuilder<'a> {
+ #[inline]
+ pub fn advanced_blend_op(mut self, advanced_blend_op: BlendOp) -> Self {
+ self.inner.advanced_blend_op = advanced_blend_op;
+ self
+ }
+ #[inline]
+ pub fn src_premultiplied(mut self, src_premultiplied: bool) -> Self {
+ self.inner.src_premultiplied = src_premultiplied.into();
+ self
+ }
+ #[inline]
+ pub fn dst_premultiplied(mut self, dst_premultiplied: bool) -> Self {
+ self.inner.dst_premultiplied = dst_premultiplied.into();
+ self
+ }
+ #[inline]
+ pub fn blend_overlap(mut self, blend_overlap: BlendOverlapEXT) -> Self {
+ self.inner.blend_overlap = blend_overlap;
+ self
+ }
+ #[inline]
+ pub fn clamp_results(mut self, clamp_results: bool) -> Self {
+ self.inner.clamp_results = clamp_results.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ColorBlendAdvancedEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassTransformBeginInfoQCOM.html>"]
+pub struct RenderPassTransformBeginInfoQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub transform: SurfaceTransformFlagsKHR,
+}
+impl ::std::default::Default for RenderPassTransformBeginInfoQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ transform: SurfaceTransformFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassTransformBeginInfoQCOM {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM;
+}
+impl RenderPassTransformBeginInfoQCOM {
+ pub fn builder<'a>() -> RenderPassTransformBeginInfoQCOMBuilder<'a> {
+ RenderPassTransformBeginInfoQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassTransformBeginInfoQCOMBuilder<'a> {
+ inner: RenderPassTransformBeginInfoQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassBeginInfo for RenderPassTransformBeginInfoQCOMBuilder<'_> {}
+unsafe impl ExtendsRenderPassBeginInfo for RenderPassTransformBeginInfoQCOM {}
+impl<'a> ::std::ops::Deref for RenderPassTransformBeginInfoQCOMBuilder<'a> {
+ type Target = RenderPassTransformBeginInfoQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassTransformBeginInfoQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassTransformBeginInfoQCOMBuilder<'a> {
+ #[inline]
+ pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.transform = transform;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassTransformBeginInfoQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyCommandTransformInfoQCOM.html>"]
+pub struct CopyCommandTransformInfoQCOM {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub transform: SurfaceTransformFlagsKHR,
+}
+impl ::std::default::Default for CopyCommandTransformInfoQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ transform: SurfaceTransformFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyCommandTransformInfoQCOM {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_COMMAND_TRANSFORM_INFO_QCOM;
+}
+impl CopyCommandTransformInfoQCOM {
+ pub fn builder<'a>() -> CopyCommandTransformInfoQCOMBuilder<'a> {
+ CopyCommandTransformInfoQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyCommandTransformInfoQCOMBuilder<'a> {
+ inner: CopyCommandTransformInfoQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferImageCopy2 for CopyCommandTransformInfoQCOMBuilder<'_> {}
+unsafe impl ExtendsBufferImageCopy2 for CopyCommandTransformInfoQCOM {}
+unsafe impl ExtendsImageBlit2 for CopyCommandTransformInfoQCOMBuilder<'_> {}
+unsafe impl ExtendsImageBlit2 for CopyCommandTransformInfoQCOM {}
+impl<'a> ::std::ops::Deref for CopyCommandTransformInfoQCOMBuilder<'a> {
+ type Target = CopyCommandTransformInfoQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyCommandTransformInfoQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyCommandTransformInfoQCOMBuilder<'a> {
+ #[inline]
+ pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.transform = transform;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyCommandTransformInfoQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferInheritanceRenderPassTransformInfoQCOM.html>"]
+pub struct CommandBufferInheritanceRenderPassTransformInfoQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub transform: SurfaceTransformFlagsKHR,
+ pub render_area: Rect2D,
+}
+impl ::std::default::Default for CommandBufferInheritanceRenderPassTransformInfoQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ transform: SurfaceTransformFlagsKHR::default(),
+ render_area: Rect2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferInheritanceRenderPassTransformInfoQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM;
+}
+impl CommandBufferInheritanceRenderPassTransformInfoQCOM {
+ pub fn builder<'a>() -> CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder<'a> {
+ CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder<'a> {
+ inner: CommandBufferInheritanceRenderPassTransformInfoQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo
+ for CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo
+ for CommandBufferInheritanceRenderPassTransformInfoQCOM
+{
+}
+impl<'a> ::std::ops::Deref for CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder<'a> {
+ type Target = CommandBufferInheritanceRenderPassTransformInfoQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferInheritanceRenderPassTransformInfoQCOMBuilder<'a> {
+ #[inline]
+ pub fn transform(mut self, transform: SurfaceTransformFlagsKHR) -> Self {
+ self.inner.transform = transform;
+ self
+ }
+ #[inline]
+ pub fn render_area(mut self, render_area: Rect2D) -> Self {
+ self.inner.render_area = render_area;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferInheritanceRenderPassTransformInfoQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDiagnosticsConfigFeaturesNV.html>"]
+pub struct PhysicalDeviceDiagnosticsConfigFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub diagnostics_config: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDiagnosticsConfigFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ diagnostics_config: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDiagnosticsConfigFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV;
+}
+impl PhysicalDeviceDiagnosticsConfigFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'a> {
+ PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceDiagnosticsConfigFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDiagnosticsConfigFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDiagnosticsConfigFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceDiagnosticsConfigFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDiagnosticsConfigFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn diagnostics_config(mut self, diagnostics_config: bool) -> Self {
+ self.inner.diagnostics_config = diagnostics_config.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDiagnosticsConfigFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceDiagnosticsConfigCreateInfoNV.html>"]
+pub struct DeviceDiagnosticsConfigCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: DeviceDiagnosticsConfigFlagsNV,
+}
+impl ::std::default::Default for DeviceDiagnosticsConfigCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: DeviceDiagnosticsConfigFlagsNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceDiagnosticsConfigCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV;
+}
+impl DeviceDiagnosticsConfigCreateInfoNV {
+ pub fn builder<'a>() -> DeviceDiagnosticsConfigCreateInfoNVBuilder<'a> {
+ DeviceDiagnosticsConfigCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceDiagnosticsConfigCreateInfoNVBuilder<'a> {
+ inner: DeviceDiagnosticsConfigCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDeviceCreateInfo for DeviceDiagnosticsConfigCreateInfoNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for DeviceDiagnosticsConfigCreateInfoNV {}
+impl<'a> ::std::ops::Deref for DeviceDiagnosticsConfigCreateInfoNVBuilder<'a> {
+ type Target = DeviceDiagnosticsConfigCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceDiagnosticsConfigCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceDiagnosticsConfigCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceDiagnosticsConfigFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceDiagnosticsConfigCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures.html>"]
+pub struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_zero_initialize_workgroup_memory: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_zero_initialize_workgroup_memory: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES;
+}
+impl PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'a> {
+ PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'a> {
+ inner: PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_zero_initialize_workgroup_memory(
+ mut self,
+ shader_zero_initialize_workgroup_memory: bool,
+ ) -> Self {
+ self.inner.shader_zero_initialize_workgroup_memory =
+ shader_zero_initialize_workgroup_memory.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR.html>"]
+pub struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_subgroup_uniform_control_flow: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_subgroup_uniform_control_flow: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR;
+}
+impl PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'a> {
+ PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {}
+impl<'a> ::std::ops::Deref
+ for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'a>
+{
+ type Target = PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn shader_subgroup_uniform_control_flow(
+ mut self,
+ shader_subgroup_uniform_control_flow: bool,
+ ) -> Self {
+ self.inner.shader_subgroup_uniform_control_flow =
+ shader_subgroup_uniform_control_flow.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRobustness2FeaturesEXT.html>"]
+pub struct PhysicalDeviceRobustness2FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub robust_buffer_access2: Bool32,
+ pub robust_image_access2: Bool32,
+ pub null_descriptor: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRobustness2FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ robust_buffer_access2: Bool32::default(),
+ robust_image_access2: Bool32::default(),
+ null_descriptor: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRobustness2FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT;
+}
+impl PhysicalDeviceRobustness2FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceRobustness2FeaturesEXTBuilder<'a> {
+ PhysicalDeviceRobustness2FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRobustness2FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceRobustness2FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRobustness2FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRobustness2FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRobustness2FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRobustness2FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRobustness2FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceRobustness2FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRobustness2FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRobustness2FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn robust_buffer_access2(mut self, robust_buffer_access2: bool) -> Self {
+ self.inner.robust_buffer_access2 = robust_buffer_access2.into();
+ self
+ }
+ #[inline]
+ pub fn robust_image_access2(mut self, robust_image_access2: bool) -> Self {
+ self.inner.robust_image_access2 = robust_image_access2.into();
+ self
+ }
+ #[inline]
+ pub fn null_descriptor(mut self, null_descriptor: bool) -> Self {
+ self.inner.null_descriptor = null_descriptor.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRobustness2FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRobustness2PropertiesEXT.html>"]
+pub struct PhysicalDeviceRobustness2PropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub robust_storage_buffer_access_size_alignment: DeviceSize,
+ pub robust_uniform_buffer_access_size_alignment: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceRobustness2PropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ robust_storage_buffer_access_size_alignment: DeviceSize::default(),
+ robust_uniform_buffer_access_size_alignment: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRobustness2PropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT;
+}
+impl PhysicalDeviceRobustness2PropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceRobustness2PropertiesEXTBuilder<'a> {
+ PhysicalDeviceRobustness2PropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRobustness2PropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceRobustness2PropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRobustness2PropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceRobustness2PropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRobustness2PropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceRobustness2PropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRobustness2PropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRobustness2PropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn robust_storage_buffer_access_size_alignment(
+ mut self,
+ robust_storage_buffer_access_size_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.robust_storage_buffer_access_size_alignment =
+ robust_storage_buffer_access_size_alignment;
+ self
+ }
+ #[inline]
+ pub fn robust_uniform_buffer_access_size_alignment(
+ mut self,
+ robust_uniform_buffer_access_size_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.robust_uniform_buffer_access_size_alignment =
+ robust_uniform_buffer_access_size_alignment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRobustness2PropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageRobustnessFeatures.html>"]
+pub struct PhysicalDeviceImageRobustnessFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub robust_image_access: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImageRobustnessFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ robust_image_access: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageRobustnessFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES;
+}
+impl PhysicalDeviceImageRobustnessFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceImageRobustnessFeaturesBuilder<'a> {
+ PhysicalDeviceImageRobustnessFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageRobustnessFeaturesBuilder<'a> {
+ inner: PhysicalDeviceImageRobustnessFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageRobustnessFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageRobustnessFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageRobustnessFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageRobustnessFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageRobustnessFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceImageRobustnessFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageRobustnessFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageRobustnessFeaturesBuilder<'a> {
+ #[inline]
+ pub fn robust_image_access(mut self, robust_image_access: bool) -> Self {
+ self.inner.robust_image_access = robust_image_access.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageRobustnessFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR.html>"]
+pub struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub workgroup_memory_explicit_layout: Bool32,
+ pub workgroup_memory_explicit_layout_scalar_block_layout: Bool32,
+ pub workgroup_memory_explicit_layout8_bit_access: Bool32,
+ pub workgroup_memory_explicit_layout16_bit_access: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ workgroup_memory_explicit_layout: Bool32::default(),
+ workgroup_memory_explicit_layout_scalar_block_layout: Bool32::default(),
+ workgroup_memory_explicit_layout8_bit_access: Bool32::default(),
+ workgroup_memory_explicit_layout16_bit_access: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR;
+}
+impl PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'a> {
+ PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn workgroup_memory_explicit_layout(
+ mut self,
+ workgroup_memory_explicit_layout: bool,
+ ) -> Self {
+ self.inner.workgroup_memory_explicit_layout = workgroup_memory_explicit_layout.into();
+ self
+ }
+ #[inline]
+ pub fn workgroup_memory_explicit_layout_scalar_block_layout(
+ mut self,
+ workgroup_memory_explicit_layout_scalar_block_layout: bool,
+ ) -> Self {
+ self.inner
+ .workgroup_memory_explicit_layout_scalar_block_layout =
+ workgroup_memory_explicit_layout_scalar_block_layout.into();
+ self
+ }
+ #[inline]
+ pub fn workgroup_memory_explicit_layout8_bit_access(
+ mut self,
+ workgroup_memory_explicit_layout8_bit_access: bool,
+ ) -> Self {
+ self.inner.workgroup_memory_explicit_layout8_bit_access =
+ workgroup_memory_explicit_layout8_bit_access.into();
+ self
+ }
+ #[inline]
+ pub fn workgroup_memory_explicit_layout16_bit_access(
+ mut self,
+ workgroup_memory_explicit_layout16_bit_access: bool,
+ ) -> Self {
+ self.inner.workgroup_memory_explicit_layout16_bit_access =
+ workgroup_memory_explicit_layout16_bit_access.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePortabilitySubsetFeaturesKHR.html>"]
+pub struct PhysicalDevicePortabilitySubsetFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub constant_alpha_color_blend_factors: Bool32,
+ pub events: Bool32,
+ pub image_view_format_reinterpretation: Bool32,
+ pub image_view_format_swizzle: Bool32,
+ pub image_view2_d_on3_d_image: Bool32,
+ pub multisample_array_image: Bool32,
+ pub mutable_comparison_samplers: Bool32,
+ pub point_polygons: Bool32,
+ pub sampler_mip_lod_bias: Bool32,
+ pub separate_stencil_mask_ref: Bool32,
+ pub shader_sample_rate_interpolation_functions: Bool32,
+ pub tessellation_isolines: Bool32,
+ pub tessellation_point_mode: Bool32,
+ pub triangle_fans: Bool32,
+ pub vertex_attribute_access_beyond_stride: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePortabilitySubsetFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ constant_alpha_color_blend_factors: Bool32::default(),
+ events: Bool32::default(),
+ image_view_format_reinterpretation: Bool32::default(),
+ image_view_format_swizzle: Bool32::default(),
+ image_view2_d_on3_d_image: Bool32::default(),
+ multisample_array_image: Bool32::default(),
+ mutable_comparison_samplers: Bool32::default(),
+ point_polygons: Bool32::default(),
+ sampler_mip_lod_bias: Bool32::default(),
+ separate_stencil_mask_ref: Bool32::default(),
+ shader_sample_rate_interpolation_functions: Bool32::default(),
+ tessellation_isolines: Bool32::default(),
+ tessellation_point_mode: Bool32::default(),
+ triangle_fans: Bool32::default(),
+ vertex_attribute_access_beyond_stride: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePortabilitySubsetFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
+}
+impl PhysicalDevicePortabilitySubsetFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'a> {
+ PhysicalDevicePortabilitySubsetFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'a> {
+ inner: PhysicalDevicePortabilitySubsetFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePortabilitySubsetFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePortabilitySubsetFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDevicePortabilitySubsetFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePortabilitySubsetFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn constant_alpha_color_blend_factors(
+ mut self,
+ constant_alpha_color_blend_factors: bool,
+ ) -> Self {
+ self.inner.constant_alpha_color_blend_factors = constant_alpha_color_blend_factors.into();
+ self
+ }
+ #[inline]
+ pub fn events(mut self, events: bool) -> Self {
+ self.inner.events = events.into();
+ self
+ }
+ #[inline]
+ pub fn image_view_format_reinterpretation(
+ mut self,
+ image_view_format_reinterpretation: bool,
+ ) -> Self {
+ self.inner.image_view_format_reinterpretation = image_view_format_reinterpretation.into();
+ self
+ }
+ #[inline]
+ pub fn image_view_format_swizzle(mut self, image_view_format_swizzle: bool) -> Self {
+ self.inner.image_view_format_swizzle = image_view_format_swizzle.into();
+ self
+ }
+ #[inline]
+ pub fn image_view2_d_on3_d_image(mut self, image_view2_d_on3_d_image: bool) -> Self {
+ self.inner.image_view2_d_on3_d_image = image_view2_d_on3_d_image.into();
+ self
+ }
+ #[inline]
+ pub fn multisample_array_image(mut self, multisample_array_image: bool) -> Self {
+ self.inner.multisample_array_image = multisample_array_image.into();
+ self
+ }
+ #[inline]
+ pub fn mutable_comparison_samplers(mut self, mutable_comparison_samplers: bool) -> Self {
+ self.inner.mutable_comparison_samplers = mutable_comparison_samplers.into();
+ self
+ }
+ #[inline]
+ pub fn point_polygons(mut self, point_polygons: bool) -> Self {
+ self.inner.point_polygons = point_polygons.into();
+ self
+ }
+ #[inline]
+ pub fn sampler_mip_lod_bias(mut self, sampler_mip_lod_bias: bool) -> Self {
+ self.inner.sampler_mip_lod_bias = sampler_mip_lod_bias.into();
+ self
+ }
+ #[inline]
+ pub fn separate_stencil_mask_ref(mut self, separate_stencil_mask_ref: bool) -> Self {
+ self.inner.separate_stencil_mask_ref = separate_stencil_mask_ref.into();
+ self
+ }
+ #[inline]
+ pub fn shader_sample_rate_interpolation_functions(
+ mut self,
+ shader_sample_rate_interpolation_functions: bool,
+ ) -> Self {
+ self.inner.shader_sample_rate_interpolation_functions =
+ shader_sample_rate_interpolation_functions.into();
+ self
+ }
+ #[inline]
+ pub fn tessellation_isolines(mut self, tessellation_isolines: bool) -> Self {
+ self.inner.tessellation_isolines = tessellation_isolines.into();
+ self
+ }
+ #[inline]
+ pub fn tessellation_point_mode(mut self, tessellation_point_mode: bool) -> Self {
+ self.inner.tessellation_point_mode = tessellation_point_mode.into();
+ self
+ }
+ #[inline]
+ pub fn triangle_fans(mut self, triangle_fans: bool) -> Self {
+ self.inner.triangle_fans = triangle_fans.into();
+ self
+ }
+ #[inline]
+ pub fn vertex_attribute_access_beyond_stride(
+ mut self,
+ vertex_attribute_access_beyond_stride: bool,
+ ) -> Self {
+ self.inner.vertex_attribute_access_beyond_stride =
+ vertex_attribute_access_beyond_stride.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePortabilitySubsetFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePortabilitySubsetPropertiesKHR.html>"]
+pub struct PhysicalDevicePortabilitySubsetPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_vertex_input_binding_stride_alignment: u32,
+}
+impl ::std::default::Default for PhysicalDevicePortabilitySubsetPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_vertex_input_binding_stride_alignment: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePortabilitySubsetPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR;
+}
+impl PhysicalDevicePortabilitySubsetPropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDevicePortabilitySubsetPropertiesKHRBuilder<'a> {
+ PhysicalDevicePortabilitySubsetPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePortabilitySubsetPropertiesKHRBuilder<'a> {
+ inner: PhysicalDevicePortabilitySubsetPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDevicePortabilitySubsetPropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePortabilitySubsetPropertiesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePortabilitySubsetPropertiesKHRBuilder<'a> {
+ type Target = PhysicalDevicePortabilitySubsetPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePortabilitySubsetPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePortabilitySubsetPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn min_vertex_input_binding_stride_alignment(
+ mut self,
+ min_vertex_input_binding_stride_alignment: u32,
+ ) -> Self {
+ self.inner.min_vertex_input_binding_stride_alignment =
+ min_vertex_input_binding_stride_alignment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePortabilitySubsetPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevice4444FormatsFeaturesEXT.html>"]
+pub struct PhysicalDevice4444FormatsFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub format_a4r4g4b4: Bool32,
+ pub format_a4b4g4r4: Bool32,
+}
+impl ::std::default::Default for PhysicalDevice4444FormatsFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ format_a4r4g4b4: Bool32::default(),
+ format_a4b4g4r4: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevice4444FormatsFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT;
+}
+impl PhysicalDevice4444FormatsFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevice4444FormatsFeaturesEXTBuilder<'a> {
+ PhysicalDevice4444FormatsFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevice4444FormatsFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevice4444FormatsFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice4444FormatsFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevice4444FormatsFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice4444FormatsFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevice4444FormatsFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevice4444FormatsFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevice4444FormatsFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevice4444FormatsFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevice4444FormatsFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn format_a4r4g4b4(mut self, format_a4r4g4b4: bool) -> Self {
+ self.inner.format_a4r4g4b4 = format_a4r4g4b4.into();
+ self
+ }
+ #[inline]
+ pub fn format_a4b4g4r4(mut self, format_a4b4g4r4: bool) -> Self {
+ self.inner.format_a4b4g4r4 = format_a4b4g4r4.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevice4444FormatsFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSubpassShadingFeaturesHUAWEI.html>"]
+pub struct PhysicalDeviceSubpassShadingFeaturesHUAWEI {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub subpass_shading: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSubpassShadingFeaturesHUAWEI {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ subpass_shading: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSubpassShadingFeaturesHUAWEI {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI;
+}
+impl PhysicalDeviceSubpassShadingFeaturesHUAWEI {
+ pub fn builder<'a>() -> PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'a> {
+ PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'a> {
+ inner: PhysicalDeviceSubpassShadingFeaturesHUAWEI,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSubpassShadingFeaturesHUAWEI {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubpassShadingFeaturesHUAWEI {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'a> {
+ type Target = PhysicalDeviceSubpassShadingFeaturesHUAWEI;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSubpassShadingFeaturesHUAWEIBuilder<'a> {
+ #[inline]
+ pub fn subpass_shading(mut self, subpass_shading: bool) -> Self {
+ self.inner.subpass_shading = subpass_shading.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSubpassShadingFeaturesHUAWEI {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCopy2.html>"]
+pub struct BufferCopy2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_offset: DeviceSize,
+ pub dst_offset: DeviceSize,
+ pub size: DeviceSize,
+}
+impl ::std::default::Default for BufferCopy2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_offset: DeviceSize::default(),
+ dst_offset: DeviceSize::default(),
+ size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCopy2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COPY_2;
+}
+impl BufferCopy2 {
+ pub fn builder<'a>() -> BufferCopy2Builder<'a> {
+ BufferCopy2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCopy2Builder<'a> {
+ inner: BufferCopy2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferCopy2Builder<'a> {
+ type Target = BufferCopy2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCopy2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCopy2Builder<'a> {
+ #[inline]
+ pub fn src_offset(mut self, src_offset: DeviceSize) -> Self {
+ self.inner.src_offset = src_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_offset(mut self, dst_offset: DeviceSize) -> Self {
+ self.inner.dst_offset = dst_offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCopy2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCopy2.html>"]
+pub struct ImageCopy2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_subresource: ImageSubresourceLayers,
+ pub src_offset: Offset3D,
+ pub dst_subresource: ImageSubresourceLayers,
+ pub dst_offset: Offset3D,
+ pub extent: Extent3D,
+}
+impl ::std::default::Default for ImageCopy2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_subresource: ImageSubresourceLayers::default(),
+ src_offset: Offset3D::default(),
+ dst_subresource: ImageSubresourceLayers::default(),
+ dst_offset: Offset3D::default(),
+ extent: Extent3D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageCopy2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_COPY_2;
+}
+impl ImageCopy2 {
+ pub fn builder<'a>() -> ImageCopy2Builder<'a> {
+ ImageCopy2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageCopy2Builder<'a> {
+ inner: ImageCopy2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageCopy2Builder<'a> {
+ type Target = ImageCopy2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageCopy2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageCopy2Builder<'a> {
+ #[inline]
+ pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.src_subresource = src_subresource;
+ self
+ }
+ #[inline]
+ pub fn src_offset(mut self, src_offset: Offset3D) -> Self {
+ self.inner.src_offset = src_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.dst_subresource = dst_subresource;
+ self
+ }
+ #[inline]
+ pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self {
+ self.inner.dst_offset = dst_offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent3D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageCopy2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageBlit2.html>"]
+pub struct ImageBlit2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_subresource: ImageSubresourceLayers,
+ pub src_offsets: [Offset3D; 2],
+ pub dst_subresource: ImageSubresourceLayers,
+ pub dst_offsets: [Offset3D; 2],
+}
+impl ::std::default::Default for ImageBlit2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_subresource: ImageSubresourceLayers::default(),
+ src_offsets: unsafe { ::std::mem::zeroed() },
+ dst_subresource: ImageSubresourceLayers::default(),
+ dst_offsets: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageBlit2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_BLIT_2;
+}
+impl ImageBlit2 {
+ pub fn builder<'a>() -> ImageBlit2Builder<'a> {
+ ImageBlit2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageBlit2Builder<'a> {
+ inner: ImageBlit2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageBlit2 {}
+impl<'a> ::std::ops::Deref for ImageBlit2Builder<'a> {
+ type Target = ImageBlit2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageBlit2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageBlit2Builder<'a> {
+ #[inline]
+ pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.src_subresource = src_subresource;
+ self
+ }
+ #[inline]
+ pub fn src_offsets(mut self, src_offsets: [Offset3D; 2]) -> Self {
+ self.inner.src_offsets = src_offsets;
+ self
+ }
+ #[inline]
+ pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.dst_subresource = dst_subresource;
+ self
+ }
+ #[inline]
+ pub fn dst_offsets(mut self, dst_offsets: [Offset3D; 2]) -> Self {
+ self.inner.dst_offsets = dst_offsets;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageBlit2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageBlit2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferImageCopy2.html>"]
+pub struct BufferImageCopy2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer_offset: DeviceSize,
+ pub buffer_row_length: u32,
+ pub buffer_image_height: u32,
+ pub image_subresource: ImageSubresourceLayers,
+ pub image_offset: Offset3D,
+ pub image_extent: Extent3D,
+}
+impl ::std::default::Default for BufferImageCopy2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer_offset: DeviceSize::default(),
+ buffer_row_length: u32::default(),
+ buffer_image_height: u32::default(),
+ image_subresource: ImageSubresourceLayers::default(),
+ image_offset: Offset3D::default(),
+ image_extent: Extent3D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferImageCopy2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_IMAGE_COPY_2;
+}
+impl BufferImageCopy2 {
+ pub fn builder<'a>() -> BufferImageCopy2Builder<'a> {
+ BufferImageCopy2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferImageCopy2Builder<'a> {
+ inner: BufferImageCopy2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsBufferImageCopy2 {}
+impl<'a> ::std::ops::Deref for BufferImageCopy2Builder<'a> {
+ type Target = BufferImageCopy2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferImageCopy2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferImageCopy2Builder<'a> {
+ #[inline]
+ pub fn buffer_offset(mut self, buffer_offset: DeviceSize) -> Self {
+ self.inner.buffer_offset = buffer_offset;
+ self
+ }
+ #[inline]
+ pub fn buffer_row_length(mut self, buffer_row_length: u32) -> Self {
+ self.inner.buffer_row_length = buffer_row_length;
+ self
+ }
+ #[inline]
+ pub fn buffer_image_height(mut self, buffer_image_height: u32) -> Self {
+ self.inner.buffer_image_height = buffer_image_height;
+ self
+ }
+ #[inline]
+ pub fn image_subresource(mut self, image_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.image_subresource = image_subresource;
+ self
+ }
+ #[inline]
+ pub fn image_offset(mut self, image_offset: Offset3D) -> Self {
+ self.inner.image_offset = image_offset;
+ self
+ }
+ #[inline]
+ pub fn image_extent(mut self, image_extent: Extent3D) -> Self {
+ self.inner.image_extent = image_extent;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsBufferImageCopy2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferImageCopy2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageResolve2.html>"]
+pub struct ImageResolve2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_subresource: ImageSubresourceLayers,
+ pub src_offset: Offset3D,
+ pub dst_subresource: ImageSubresourceLayers,
+ pub dst_offset: Offset3D,
+ pub extent: Extent3D,
+}
+impl ::std::default::Default for ImageResolve2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_subresource: ImageSubresourceLayers::default(),
+ src_offset: Offset3D::default(),
+ dst_subresource: ImageSubresourceLayers::default(),
+ dst_offset: Offset3D::default(),
+ extent: Extent3D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageResolve2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_RESOLVE_2;
+}
+impl ImageResolve2 {
+ pub fn builder<'a>() -> ImageResolve2Builder<'a> {
+ ImageResolve2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageResolve2Builder<'a> {
+ inner: ImageResolve2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageResolve2Builder<'a> {
+ type Target = ImageResolve2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageResolve2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageResolve2Builder<'a> {
+ #[inline]
+ pub fn src_subresource(mut self, src_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.src_subresource = src_subresource;
+ self
+ }
+ #[inline]
+ pub fn src_offset(mut self, src_offset: Offset3D) -> Self {
+ self.inner.src_offset = src_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_subresource(mut self, dst_subresource: ImageSubresourceLayers) -> Self {
+ self.inner.dst_subresource = dst_subresource;
+ self
+ }
+ #[inline]
+ pub fn dst_offset(mut self, dst_offset: Offset3D) -> Self {
+ self.inner.dst_offset = dst_offset;
+ self
+ }
+ #[inline]
+ pub fn extent(mut self, extent: Extent3D) -> Self {
+ self.inner.extent = extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageResolve2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyBufferInfo2.html>"]
+pub struct CopyBufferInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_buffer: Buffer,
+ pub dst_buffer: Buffer,
+ pub region_count: u32,
+ pub p_regions: *const BufferCopy2,
+}
+impl ::std::default::Default for CopyBufferInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_buffer: Buffer::default(),
+ dst_buffer: Buffer::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyBufferInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_BUFFER_INFO_2;
+}
+impl CopyBufferInfo2 {
+ pub fn builder<'a>() -> CopyBufferInfo2Builder<'a> {
+ CopyBufferInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyBufferInfo2Builder<'a> {
+ inner: CopyBufferInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyBufferInfo2Builder<'a> {
+ type Target = CopyBufferInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyBufferInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyBufferInfo2Builder<'a> {
+ #[inline]
+ pub fn src_buffer(mut self, src_buffer: Buffer) -> Self {
+ self.inner.src_buffer = src_buffer;
+ self
+ }
+ #[inline]
+ pub fn dst_buffer(mut self, dst_buffer: Buffer) -> Self {
+ self.inner.dst_buffer = dst_buffer;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [BufferCopy2]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyBufferInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyImageInfo2.html>"]
+pub struct CopyImageInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_image: Image,
+ pub src_image_layout: ImageLayout,
+ pub dst_image: Image,
+ pub dst_image_layout: ImageLayout,
+ pub region_count: u32,
+ pub p_regions: *const ImageCopy2,
+}
+impl ::std::default::Default for CopyImageInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_image: Image::default(),
+ src_image_layout: ImageLayout::default(),
+ dst_image: Image::default(),
+ dst_image_layout: ImageLayout::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyImageInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_IMAGE_INFO_2;
+}
+impl CopyImageInfo2 {
+ pub fn builder<'a>() -> CopyImageInfo2Builder<'a> {
+ CopyImageInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyImageInfo2Builder<'a> {
+ inner: CopyImageInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyImageInfo2Builder<'a> {
+ type Target = CopyImageInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyImageInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyImageInfo2Builder<'a> {
+ #[inline]
+ pub fn src_image(mut self, src_image: Image) -> Self {
+ self.inner.src_image = src_image;
+ self
+ }
+ #[inline]
+ pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self {
+ self.inner.src_image_layout = src_image_layout;
+ self
+ }
+ #[inline]
+ pub fn dst_image(mut self, dst_image: Image) -> Self {
+ self.inner.dst_image = dst_image;
+ self
+ }
+ #[inline]
+ pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self {
+ self.inner.dst_image_layout = dst_image_layout;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [ImageCopy2]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyImageInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBlitImageInfo2.html>"]
+pub struct BlitImageInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_image: Image,
+ pub src_image_layout: ImageLayout,
+ pub dst_image: Image,
+ pub dst_image_layout: ImageLayout,
+ pub region_count: u32,
+ pub p_regions: *const ImageBlit2,
+ pub filter: Filter,
+}
+impl ::std::default::Default for BlitImageInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_image: Image::default(),
+ src_image_layout: ImageLayout::default(),
+ dst_image: Image::default(),
+ dst_image_layout: ImageLayout::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ filter: Filter::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BlitImageInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::BLIT_IMAGE_INFO_2;
+}
+impl BlitImageInfo2 {
+ pub fn builder<'a>() -> BlitImageInfo2Builder<'a> {
+ BlitImageInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BlitImageInfo2Builder<'a> {
+ inner: BlitImageInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BlitImageInfo2Builder<'a> {
+ type Target = BlitImageInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BlitImageInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BlitImageInfo2Builder<'a> {
+ #[inline]
+ pub fn src_image(mut self, src_image: Image) -> Self {
+ self.inner.src_image = src_image;
+ self
+ }
+ #[inline]
+ pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self {
+ self.inner.src_image_layout = src_image_layout;
+ self
+ }
+ #[inline]
+ pub fn dst_image(mut self, dst_image: Image) -> Self {
+ self.inner.dst_image = dst_image;
+ self
+ }
+ #[inline]
+ pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self {
+ self.inner.dst_image_layout = dst_image_layout;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [ImageBlit2]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn filter(mut self, filter: Filter) -> Self {
+ self.inner.filter = filter;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BlitImageInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyBufferToImageInfo2.html>"]
+pub struct CopyBufferToImageInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_buffer: Buffer,
+ pub dst_image: Image,
+ pub dst_image_layout: ImageLayout,
+ pub region_count: u32,
+ pub p_regions: *const BufferImageCopy2,
+}
+impl ::std::default::Default for CopyBufferToImageInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_buffer: Buffer::default(),
+ dst_image: Image::default(),
+ dst_image_layout: ImageLayout::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyBufferToImageInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_BUFFER_TO_IMAGE_INFO_2;
+}
+impl CopyBufferToImageInfo2 {
+ pub fn builder<'a>() -> CopyBufferToImageInfo2Builder<'a> {
+ CopyBufferToImageInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyBufferToImageInfo2Builder<'a> {
+ inner: CopyBufferToImageInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyBufferToImageInfo2Builder<'a> {
+ type Target = CopyBufferToImageInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyBufferToImageInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyBufferToImageInfo2Builder<'a> {
+ #[inline]
+ pub fn src_buffer(mut self, src_buffer: Buffer) -> Self {
+ self.inner.src_buffer = src_buffer;
+ self
+ }
+ #[inline]
+ pub fn dst_image(mut self, dst_image: Image) -> Self {
+ self.inner.dst_image = dst_image;
+ self
+ }
+ #[inline]
+ pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self {
+ self.inner.dst_image_layout = dst_image_layout;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [BufferImageCopy2]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyBufferToImageInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyImageToBufferInfo2.html>"]
+pub struct CopyImageToBufferInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_image: Image,
+ pub src_image_layout: ImageLayout,
+ pub dst_buffer: Buffer,
+ pub region_count: u32,
+ pub p_regions: *const BufferImageCopy2,
+}
+impl ::std::default::Default for CopyImageToBufferInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_image: Image::default(),
+ src_image_layout: ImageLayout::default(),
+ dst_buffer: Buffer::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyImageToBufferInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_IMAGE_TO_BUFFER_INFO_2;
+}
+impl CopyImageToBufferInfo2 {
+ pub fn builder<'a>() -> CopyImageToBufferInfo2Builder<'a> {
+ CopyImageToBufferInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyImageToBufferInfo2Builder<'a> {
+ inner: CopyImageToBufferInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyImageToBufferInfo2Builder<'a> {
+ type Target = CopyImageToBufferInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyImageToBufferInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyImageToBufferInfo2Builder<'a> {
+ #[inline]
+ pub fn src_image(mut self, src_image: Image) -> Self {
+ self.inner.src_image = src_image;
+ self
+ }
+ #[inline]
+ pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self {
+ self.inner.src_image_layout = src_image_layout;
+ self
+ }
+ #[inline]
+ pub fn dst_buffer(mut self, dst_buffer: Buffer) -> Self {
+ self.inner.dst_buffer = dst_buffer;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [BufferImageCopy2]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyImageToBufferInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkResolveImageInfo2.html>"]
+pub struct ResolveImageInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_image: Image,
+ pub src_image_layout: ImageLayout,
+ pub dst_image: Image,
+ pub dst_image_layout: ImageLayout,
+ pub region_count: u32,
+ pub p_regions: *const ImageResolve2,
+}
+impl ::std::default::Default for ResolveImageInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_image: Image::default(),
+ src_image_layout: ImageLayout::default(),
+ dst_image: Image::default(),
+ dst_image_layout: ImageLayout::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ResolveImageInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::RESOLVE_IMAGE_INFO_2;
+}
+impl ResolveImageInfo2 {
+ pub fn builder<'a>() -> ResolveImageInfo2Builder<'a> {
+ ResolveImageInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ResolveImageInfo2Builder<'a> {
+ inner: ResolveImageInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ResolveImageInfo2Builder<'a> {
+ type Target = ResolveImageInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ResolveImageInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ResolveImageInfo2Builder<'a> {
+ #[inline]
+ pub fn src_image(mut self, src_image: Image) -> Self {
+ self.inner.src_image = src_image;
+ self
+ }
+ #[inline]
+ pub fn src_image_layout(mut self, src_image_layout: ImageLayout) -> Self {
+ self.inner.src_image_layout = src_image_layout;
+ self
+ }
+ #[inline]
+ pub fn dst_image(mut self, dst_image: Image) -> Self {
+ self.inner.dst_image = dst_image;
+ self
+ }
+ #[inline]
+ pub fn dst_image_layout(mut self, dst_image_layout: ImageLayout) -> Self {
+ self.inner.dst_image_layout = dst_image_layout;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [ImageResolve2]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ResolveImageInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT.html>"]
+pub struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_image_int64_atomics: Bool32,
+ pub sparse_image_int64_atomics: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_image_int64_atomics: Bool32::default(),
+ sparse_image_int64_atomics: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT;
+}
+impl PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'a> {
+ PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceShaderImageAtomicInt64FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderImageAtomicInt64FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn shader_image_int64_atomics(mut self, shader_image_int64_atomics: bool) -> Self {
+ self.inner.shader_image_int64_atomics = shader_image_int64_atomics.into();
+ self
+ }
+ #[inline]
+ pub fn sparse_image_int64_atomics(mut self, sparse_image_int64_atomics: bool) -> Self {
+ self.inner.sparse_image_int64_atomics = sparse_image_int64_atomics.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFragmentShadingRateAttachmentInfoKHR.html>"]
+pub struct FragmentShadingRateAttachmentInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_fragment_shading_rate_attachment: *const AttachmentReference2,
+ pub shading_rate_attachment_texel_size: Extent2D,
+}
+impl ::std::default::Default for FragmentShadingRateAttachmentInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_fragment_shading_rate_attachment: ::std::ptr::null(),
+ shading_rate_attachment_texel_size: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FragmentShadingRateAttachmentInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR;
+}
+impl FragmentShadingRateAttachmentInfoKHR {
+ pub fn builder<'a>() -> FragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ FragmentShadingRateAttachmentInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ inner: FragmentShadingRateAttachmentInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubpassDescription2 for FragmentShadingRateAttachmentInfoKHRBuilder<'_> {}
+unsafe impl ExtendsSubpassDescription2 for FragmentShadingRateAttachmentInfoKHR {}
+impl<'a> ::std::ops::Deref for FragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ type Target = FragmentShadingRateAttachmentInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn fragment_shading_rate_attachment(
+ mut self,
+ fragment_shading_rate_attachment: &'a AttachmentReference2,
+ ) -> Self {
+ self.inner.p_fragment_shading_rate_attachment = fragment_shading_rate_attachment;
+ self
+ }
+ #[inline]
+ pub fn shading_rate_attachment_texel_size(
+ mut self,
+ shading_rate_attachment_texel_size: Extent2D,
+ ) -> Self {
+ self.inner.shading_rate_attachment_texel_size = shading_rate_attachment_texel_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FragmentShadingRateAttachmentInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineFragmentShadingRateStateCreateInfoKHR.html>"]
+pub struct PipelineFragmentShadingRateStateCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub fragment_size: Extent2D,
+ pub combiner_ops: [FragmentShadingRateCombinerOpKHR; 2],
+}
+impl ::std::default::Default for PipelineFragmentShadingRateStateCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ fragment_size: Extent2D::default(),
+ combiner_ops: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineFragmentShadingRateStateCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR;
+}
+impl PipelineFragmentShadingRateStateCreateInfoKHR {
+ pub fn builder<'a>() -> PipelineFragmentShadingRateStateCreateInfoKHRBuilder<'a> {
+ PipelineFragmentShadingRateStateCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineFragmentShadingRateStateCreateInfoKHRBuilder<'a> {
+ inner: PipelineFragmentShadingRateStateCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo
+ for PipelineFragmentShadingRateStateCreateInfoKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineFragmentShadingRateStateCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for PipelineFragmentShadingRateStateCreateInfoKHRBuilder<'a> {
+ type Target = PipelineFragmentShadingRateStateCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineFragmentShadingRateStateCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineFragmentShadingRateStateCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn fragment_size(mut self, fragment_size: Extent2D) -> Self {
+ self.inner.fragment_size = fragment_size;
+ self
+ }
+ #[inline]
+ pub fn combiner_ops(mut self, combiner_ops: [FragmentShadingRateCombinerOpKHR; 2]) -> Self {
+ self.inner.combiner_ops = combiner_ops;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineFragmentShadingRateStateCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShadingRateFeaturesKHR.html>"]
+pub struct PhysicalDeviceFragmentShadingRateFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_fragment_shading_rate: Bool32,
+ pub primitive_fragment_shading_rate: Bool32,
+ pub attachment_fragment_shading_rate: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShadingRateFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_fragment_shading_rate: Bool32::default(),
+ primitive_fragment_shading_rate: Bool32::default(),
+ attachment_fragment_shading_rate: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShadingRateFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
+}
+impl PhysicalDeviceFragmentShadingRateFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'a> {
+ PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceFragmentShadingRateFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShadingRateFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShadingRateFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShadingRateFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShadingRateFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn pipeline_fragment_shading_rate(mut self, pipeline_fragment_shading_rate: bool) -> Self {
+ self.inner.pipeline_fragment_shading_rate = pipeline_fragment_shading_rate.into();
+ self
+ }
+ #[inline]
+ pub fn primitive_fragment_shading_rate(
+ mut self,
+ primitive_fragment_shading_rate: bool,
+ ) -> Self {
+ self.inner.primitive_fragment_shading_rate = primitive_fragment_shading_rate.into();
+ self
+ }
+ #[inline]
+ pub fn attachment_fragment_shading_rate(
+ mut self,
+ attachment_fragment_shading_rate: bool,
+ ) -> Self {
+ self.inner.attachment_fragment_shading_rate = attachment_fragment_shading_rate.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShadingRateFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShadingRatePropertiesKHR.html>"]
+pub struct PhysicalDeviceFragmentShadingRatePropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_fragment_shading_rate_attachment_texel_size: Extent2D,
+ pub max_fragment_shading_rate_attachment_texel_size: Extent2D,
+ pub max_fragment_shading_rate_attachment_texel_size_aspect_ratio: u32,
+ pub primitive_fragment_shading_rate_with_multiple_viewports: Bool32,
+ pub layered_shading_rate_attachments: Bool32,
+ pub fragment_shading_rate_non_trivial_combiner_ops: Bool32,
+ pub max_fragment_size: Extent2D,
+ pub max_fragment_size_aspect_ratio: u32,
+ pub max_fragment_shading_rate_coverage_samples: u32,
+ pub max_fragment_shading_rate_rasterization_samples: SampleCountFlags,
+ pub fragment_shading_rate_with_shader_depth_stencil_writes: Bool32,
+ pub fragment_shading_rate_with_sample_mask: Bool32,
+ pub fragment_shading_rate_with_shader_sample_mask: Bool32,
+ pub fragment_shading_rate_with_conservative_rasterization: Bool32,
+ pub fragment_shading_rate_with_fragment_shader_interlock: Bool32,
+ pub fragment_shading_rate_with_custom_sample_locations: Bool32,
+ pub fragment_shading_rate_strict_multiply_combiner: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShadingRatePropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_fragment_shading_rate_attachment_texel_size: Extent2D::default(),
+ max_fragment_shading_rate_attachment_texel_size: Extent2D::default(),
+ max_fragment_shading_rate_attachment_texel_size_aspect_ratio: u32::default(),
+ primitive_fragment_shading_rate_with_multiple_viewports: Bool32::default(),
+ layered_shading_rate_attachments: Bool32::default(),
+ fragment_shading_rate_non_trivial_combiner_ops: Bool32::default(),
+ max_fragment_size: Extent2D::default(),
+ max_fragment_size_aspect_ratio: u32::default(),
+ max_fragment_shading_rate_coverage_samples: u32::default(),
+ max_fragment_shading_rate_rasterization_samples: SampleCountFlags::default(),
+ fragment_shading_rate_with_shader_depth_stencil_writes: Bool32::default(),
+ fragment_shading_rate_with_sample_mask: Bool32::default(),
+ fragment_shading_rate_with_shader_sample_mask: Bool32::default(),
+ fragment_shading_rate_with_conservative_rasterization: Bool32::default(),
+ fragment_shading_rate_with_fragment_shader_interlock: Bool32::default(),
+ fragment_shading_rate_with_custom_sample_locations: Bool32::default(),
+ fragment_shading_rate_strict_multiply_combiner: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShadingRatePropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR;
+}
+impl PhysicalDeviceFragmentShadingRatePropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder<'a> {
+ PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder<'a> {
+ inner: PhysicalDeviceFragmentShadingRatePropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceFragmentShadingRatePropertiesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShadingRatePropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShadingRatePropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn min_fragment_shading_rate_attachment_texel_size(
+ mut self,
+ min_fragment_shading_rate_attachment_texel_size: Extent2D,
+ ) -> Self {
+ self.inner.min_fragment_shading_rate_attachment_texel_size =
+ min_fragment_shading_rate_attachment_texel_size;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_shading_rate_attachment_texel_size(
+ mut self,
+ max_fragment_shading_rate_attachment_texel_size: Extent2D,
+ ) -> Self {
+ self.inner.max_fragment_shading_rate_attachment_texel_size =
+ max_fragment_shading_rate_attachment_texel_size;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_shading_rate_attachment_texel_size_aspect_ratio(
+ mut self,
+ max_fragment_shading_rate_attachment_texel_size_aspect_ratio: u32,
+ ) -> Self {
+ self.inner
+ .max_fragment_shading_rate_attachment_texel_size_aspect_ratio =
+ max_fragment_shading_rate_attachment_texel_size_aspect_ratio;
+ self
+ }
+ #[inline]
+ pub fn primitive_fragment_shading_rate_with_multiple_viewports(
+ mut self,
+ primitive_fragment_shading_rate_with_multiple_viewports: bool,
+ ) -> Self {
+ self.inner
+ .primitive_fragment_shading_rate_with_multiple_viewports =
+ primitive_fragment_shading_rate_with_multiple_viewports.into();
+ self
+ }
+ #[inline]
+ pub fn layered_shading_rate_attachments(
+ mut self,
+ layered_shading_rate_attachments: bool,
+ ) -> Self {
+ self.inner.layered_shading_rate_attachments = layered_shading_rate_attachments.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_non_trivial_combiner_ops(
+ mut self,
+ fragment_shading_rate_non_trivial_combiner_ops: bool,
+ ) -> Self {
+ self.inner.fragment_shading_rate_non_trivial_combiner_ops =
+ fragment_shading_rate_non_trivial_combiner_ops.into();
+ self
+ }
+ #[inline]
+ pub fn max_fragment_size(mut self, max_fragment_size: Extent2D) -> Self {
+ self.inner.max_fragment_size = max_fragment_size;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_size_aspect_ratio(mut self, max_fragment_size_aspect_ratio: u32) -> Self {
+ self.inner.max_fragment_size_aspect_ratio = max_fragment_size_aspect_ratio;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_shading_rate_coverage_samples(
+ mut self,
+ max_fragment_shading_rate_coverage_samples: u32,
+ ) -> Self {
+ self.inner.max_fragment_shading_rate_coverage_samples =
+ max_fragment_shading_rate_coverage_samples;
+ self
+ }
+ #[inline]
+ pub fn max_fragment_shading_rate_rasterization_samples(
+ mut self,
+ max_fragment_shading_rate_rasterization_samples: SampleCountFlags,
+ ) -> Self {
+ self.inner.max_fragment_shading_rate_rasterization_samples =
+ max_fragment_shading_rate_rasterization_samples;
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_with_shader_depth_stencil_writes(
+ mut self,
+ fragment_shading_rate_with_shader_depth_stencil_writes: bool,
+ ) -> Self {
+ self.inner
+ .fragment_shading_rate_with_shader_depth_stencil_writes =
+ fragment_shading_rate_with_shader_depth_stencil_writes.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_with_sample_mask(
+ mut self,
+ fragment_shading_rate_with_sample_mask: bool,
+ ) -> Self {
+ self.inner.fragment_shading_rate_with_sample_mask =
+ fragment_shading_rate_with_sample_mask.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_with_shader_sample_mask(
+ mut self,
+ fragment_shading_rate_with_shader_sample_mask: bool,
+ ) -> Self {
+ self.inner.fragment_shading_rate_with_shader_sample_mask =
+ fragment_shading_rate_with_shader_sample_mask.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_with_conservative_rasterization(
+ mut self,
+ fragment_shading_rate_with_conservative_rasterization: bool,
+ ) -> Self {
+ self.inner
+ .fragment_shading_rate_with_conservative_rasterization =
+ fragment_shading_rate_with_conservative_rasterization.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_with_fragment_shader_interlock(
+ mut self,
+ fragment_shading_rate_with_fragment_shader_interlock: bool,
+ ) -> Self {
+ self.inner
+ .fragment_shading_rate_with_fragment_shader_interlock =
+ fragment_shading_rate_with_fragment_shader_interlock.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_with_custom_sample_locations(
+ mut self,
+ fragment_shading_rate_with_custom_sample_locations: bool,
+ ) -> Self {
+ self.inner
+ .fragment_shading_rate_with_custom_sample_locations =
+ fragment_shading_rate_with_custom_sample_locations.into();
+ self
+ }
+ #[inline]
+ pub fn fragment_shading_rate_strict_multiply_combiner(
+ mut self,
+ fragment_shading_rate_strict_multiply_combiner: bool,
+ ) -> Self {
+ self.inner.fragment_shading_rate_strict_multiply_combiner =
+ fragment_shading_rate_strict_multiply_combiner.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShadingRatePropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShadingRateKHR.html>"]
+pub struct PhysicalDeviceFragmentShadingRateKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub sample_counts: SampleCountFlags,
+ pub fragment_size: Extent2D,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShadingRateKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ sample_counts: SampleCountFlags::default(),
+ fragment_size: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShadingRateKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR;
+}
+impl PhysicalDeviceFragmentShadingRateKHR {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShadingRateKHRBuilder<'a> {
+ PhysicalDeviceFragmentShadingRateKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShadingRateKHRBuilder<'a> {
+ inner: PhysicalDeviceFragmentShadingRateKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShadingRateKHRBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShadingRateKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShadingRateKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShadingRateKHRBuilder<'a> {
+ #[inline]
+ pub fn sample_counts(mut self, sample_counts: SampleCountFlags) -> Self {
+ self.inner.sample_counts = sample_counts;
+ self
+ }
+ #[inline]
+ pub fn fragment_size(mut self, fragment_size: Extent2D) -> Self {
+ self.inner.fragment_size = fragment_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShadingRateKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderTerminateInvocationFeatures.html>"]
+pub struct PhysicalDeviceShaderTerminateInvocationFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_terminate_invocation: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderTerminateInvocationFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_terminate_invocation: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderTerminateInvocationFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES;
+}
+impl PhysicalDeviceShaderTerminateInvocationFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'a> {
+ PhysicalDeviceShaderTerminateInvocationFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderTerminateInvocationFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderTerminateInvocationFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderTerminateInvocationFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderTerminateInvocationFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderTerminateInvocationFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_terminate_invocation(mut self, shader_terminate_invocation: bool) -> Self {
+ self.inner.shader_terminate_invocation = shader_terminate_invocation.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderTerminateInvocationFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV.html>"]
+pub struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_shading_rate_enums: Bool32,
+ pub supersample_fragment_shading_rates: Bool32,
+ pub no_invocation_fragment_shading_rates: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_shading_rate_enums: Bool32::default(),
+ supersample_fragment_shading_rates: Bool32::default(),
+ no_invocation_fragment_shading_rates: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV;
+}
+impl PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'a> {
+ PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceFragmentShadingRateEnumsFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShadingRateEnumsFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn fragment_shading_rate_enums(mut self, fragment_shading_rate_enums: bool) -> Self {
+ self.inner.fragment_shading_rate_enums = fragment_shading_rate_enums.into();
+ self
+ }
+ #[inline]
+ pub fn supersample_fragment_shading_rates(
+ mut self,
+ supersample_fragment_shading_rates: bool,
+ ) -> Self {
+ self.inner.supersample_fragment_shading_rates = supersample_fragment_shading_rates.into();
+ self
+ }
+ #[inline]
+ pub fn no_invocation_fragment_shading_rates(
+ mut self,
+ no_invocation_fragment_shading_rates: bool,
+ ) -> Self {
+ self.inner.no_invocation_fragment_shading_rates =
+ no_invocation_fragment_shading_rates.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV.html>"]
+pub struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_fragment_shading_rate_invocation_count: SampleCountFlags,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_fragment_shading_rate_invocation_count: SampleCountFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV;
+}
+impl PhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder<'a> {
+ PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceFragmentShadingRateEnumsPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShadingRateEnumsPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn max_fragment_shading_rate_invocation_count(
+ mut self,
+ max_fragment_shading_rate_invocation_count: SampleCountFlags,
+ ) -> Self {
+ self.inner.max_fragment_shading_rate_invocation_count =
+ max_fragment_shading_rate_invocation_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineFragmentShadingRateEnumStateCreateInfoNV.html>"]
+pub struct PipelineFragmentShadingRateEnumStateCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub shading_rate_type: FragmentShadingRateTypeNV,
+ pub shading_rate: FragmentShadingRateNV,
+ pub combiner_ops: [FragmentShadingRateCombinerOpKHR; 2],
+}
+impl ::std::default::Default for PipelineFragmentShadingRateEnumStateCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ shading_rate_type: FragmentShadingRateTypeNV::default(),
+ shading_rate: FragmentShadingRateNV::default(),
+ combiner_ops: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineFragmentShadingRateEnumStateCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV;
+}
+impl PipelineFragmentShadingRateEnumStateCreateInfoNV {
+ pub fn builder<'a>() -> PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder<'a> {
+ PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder<'a> {
+ inner: PipelineFragmentShadingRateEnumStateCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo
+ for PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineFragmentShadingRateEnumStateCreateInfoNV {}
+impl<'a> ::std::ops::Deref for PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder<'a> {
+ type Target = PipelineFragmentShadingRateEnumStateCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineFragmentShadingRateEnumStateCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn shading_rate_type(mut self, shading_rate_type: FragmentShadingRateTypeNV) -> Self {
+ self.inner.shading_rate_type = shading_rate_type;
+ self
+ }
+ #[inline]
+ pub fn shading_rate(mut self, shading_rate: FragmentShadingRateNV) -> Self {
+ self.inner.shading_rate = shading_rate;
+ self
+ }
+ #[inline]
+ pub fn combiner_ops(mut self, combiner_ops: [FragmentShadingRateCombinerOpKHR; 2]) -> Self {
+ self.inner.combiner_ops = combiner_ops;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineFragmentShadingRateEnumStateCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureBuildSizesInfoKHR.html>"]
+pub struct AccelerationStructureBuildSizesInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acceleration_structure_size: DeviceSize,
+ pub update_scratch_size: DeviceSize,
+ pub build_scratch_size: DeviceSize,
+}
+impl ::std::default::Default for AccelerationStructureBuildSizesInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acceleration_structure_size: DeviceSize::default(),
+ update_scratch_size: DeviceSize::default(),
+ build_scratch_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureBuildSizesInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR;
+}
+impl AccelerationStructureBuildSizesInfoKHR {
+ pub fn builder<'a>() -> AccelerationStructureBuildSizesInfoKHRBuilder<'a> {
+ AccelerationStructureBuildSizesInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureBuildSizesInfoKHRBuilder<'a> {
+ inner: AccelerationStructureBuildSizesInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureBuildSizesInfoKHRBuilder<'a> {
+ type Target = AccelerationStructureBuildSizesInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureBuildSizesInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureBuildSizesInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structure_size(mut self, acceleration_structure_size: DeviceSize) -> Self {
+ self.inner.acceleration_structure_size = acceleration_structure_size;
+ self
+ }
+ #[inline]
+ pub fn update_scratch_size(mut self, update_scratch_size: DeviceSize) -> Self {
+ self.inner.update_scratch_size = update_scratch_size;
+ self
+ }
+ #[inline]
+ pub fn build_scratch_size(mut self, build_scratch_size: DeviceSize) -> Self {
+ self.inner.build_scratch_size = build_scratch_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureBuildSizesInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImage2DViewOf3DFeaturesEXT.html>"]
+pub struct PhysicalDeviceImage2DViewOf3DFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image2_d_view_of3_d: Bool32,
+ pub sampler2_d_view_of3_d: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImage2DViewOf3DFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image2_d_view_of3_d: Bool32::default(),
+ sampler2_d_view_of3_d: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImage2DViewOf3DFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT;
+}
+impl PhysicalDeviceImage2DViewOf3DFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'a> {
+ PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceImage2DViewOf3DFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImage2DViewOf3DFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImage2DViewOf3DFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImage2DViewOf3DFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn image2_d_view_of3_d(mut self, image2_d_view_of3_d: bool) -> Self {
+ self.inner.image2_d_view_of3_d = image2_d_view_of3_d.into();
+ self
+ }
+ #[inline]
+ pub fn sampler2_d_view_of3_d(mut self, sampler2_d_view_of3_d: bool) -> Self {
+ self.inner.sampler2_d_view_of3_d = sampler2_d_view_of3_d.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImage2DViewOf3DFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT.html>"]
+pub struct PhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub mutable_descriptor_type: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ mutable_descriptor_type: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT;
+}
+impl PhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'a> {
+ PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceMutableDescriptorTypeFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMutableDescriptorTypeFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMutableDescriptorTypeFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMutableDescriptorTypeFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn mutable_descriptor_type(mut self, mutable_descriptor_type: bool) -> Self {
+ self.inner.mutable_descriptor_type = mutable_descriptor_type.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMutableDescriptorTypeListEXT.html>"]
+pub struct MutableDescriptorTypeListEXT {
+ pub descriptor_type_count: u32,
+ pub p_descriptor_types: *const DescriptorType,
+}
+impl ::std::default::Default for MutableDescriptorTypeListEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ descriptor_type_count: u32::default(),
+ p_descriptor_types: ::std::ptr::null(),
+ }
+ }
+}
+impl MutableDescriptorTypeListEXT {
+ pub fn builder<'a>() -> MutableDescriptorTypeListEXTBuilder<'a> {
+ MutableDescriptorTypeListEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MutableDescriptorTypeListEXTBuilder<'a> {
+ inner: MutableDescriptorTypeListEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MutableDescriptorTypeListEXTBuilder<'a> {
+ type Target = MutableDescriptorTypeListEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MutableDescriptorTypeListEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MutableDescriptorTypeListEXTBuilder<'a> {
+ #[inline]
+ pub fn descriptor_types(mut self, descriptor_types: &'a [DescriptorType]) -> Self {
+ self.inner.descriptor_type_count = descriptor_types.len() as _;
+ self.inner.p_descriptor_types = descriptor_types.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MutableDescriptorTypeListEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMutableDescriptorTypeCreateInfoEXT.html>"]
+pub struct MutableDescriptorTypeCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub mutable_descriptor_type_list_count: u32,
+ pub p_mutable_descriptor_type_lists: *const MutableDescriptorTypeListEXT,
+}
+impl ::std::default::Default for MutableDescriptorTypeCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ mutable_descriptor_type_list_count: u32::default(),
+ p_mutable_descriptor_type_lists: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MutableDescriptorTypeCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT;
+}
+impl MutableDescriptorTypeCreateInfoEXT {
+ pub fn builder<'a>() -> MutableDescriptorTypeCreateInfoEXTBuilder<'a> {
+ MutableDescriptorTypeCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MutableDescriptorTypeCreateInfoEXTBuilder<'a> {
+ inner: MutableDescriptorTypeCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDescriptorSetLayoutCreateInfo for MutableDescriptorTypeCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsDescriptorSetLayoutCreateInfo for MutableDescriptorTypeCreateInfoEXT {}
+unsafe impl ExtendsDescriptorPoolCreateInfo for MutableDescriptorTypeCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsDescriptorPoolCreateInfo for MutableDescriptorTypeCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for MutableDescriptorTypeCreateInfoEXTBuilder<'a> {
+ type Target = MutableDescriptorTypeCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MutableDescriptorTypeCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MutableDescriptorTypeCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn mutable_descriptor_type_lists(
+ mut self,
+ mutable_descriptor_type_lists: &'a [MutableDescriptorTypeListEXT],
+ ) -> Self {
+ self.inner.mutable_descriptor_type_list_count = mutable_descriptor_type_lists.len() as _;
+ self.inner.p_mutable_descriptor_type_lists = mutable_descriptor_type_lists.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MutableDescriptorTypeCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDepthClipControlFeaturesEXT.html>"]
+pub struct PhysicalDeviceDepthClipControlFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub depth_clip_control: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDepthClipControlFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ depth_clip_control: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDepthClipControlFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT;
+}
+impl PhysicalDeviceDepthClipControlFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'a> {
+ PhysicalDeviceDepthClipControlFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceDepthClipControlFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClipControlFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClipControlFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDepthClipControlFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDepthClipControlFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn depth_clip_control(mut self, depth_clip_control: bool) -> Self {
+ self.inner.depth_clip_control = depth_clip_control.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDepthClipControlFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineViewportDepthClipControlCreateInfoEXT.html>"]
+pub struct PipelineViewportDepthClipControlCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub negative_one_to_one: Bool32,
+}
+impl ::std::default::Default for PipelineViewportDepthClipControlCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ negative_one_to_one: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineViewportDepthClipControlCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT;
+}
+impl PipelineViewportDepthClipControlCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineViewportDepthClipControlCreateInfoEXTBuilder<'a> {
+ PipelineViewportDepthClipControlCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineViewportDepthClipControlCreateInfoEXTBuilder<'a> {
+ inner: PipelineViewportDepthClipControlCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportDepthClipControlCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineViewportStateCreateInfo
+ for PipelineViewportDepthClipControlCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineViewportDepthClipControlCreateInfoEXTBuilder<'a> {
+ type Target = PipelineViewportDepthClipControlCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineViewportDepthClipControlCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineViewportDepthClipControlCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn negative_one_to_one(mut self, negative_one_to_one: bool) -> Self {
+ self.inner.negative_one_to_one = negative_one_to_one.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineViewportDepthClipControlCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT.html>"]
+pub struct PhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub vertex_input_dynamic_state: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ vertex_input_dynamic_state: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT;
+}
+impl PhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'a> {
+ PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceVertexInputDynamicStateFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceVertexInputDynamicStateFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceVertexInputDynamicStateFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVertexInputDynamicStateFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn vertex_input_dynamic_state(mut self, vertex_input_dynamic_state: bool) -> Self {
+ self.inner.vertex_input_dynamic_state = vertex_input_dynamic_state.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceExternalMemoryRDMAFeaturesNV.html>"]
+pub struct PhysicalDeviceExternalMemoryRDMAFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub external_memory_rdma: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceExternalMemoryRDMAFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ external_memory_rdma: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceExternalMemoryRDMAFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV;
+}
+impl PhysicalDeviceExternalMemoryRDMAFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'a> {
+ PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceExternalMemoryRDMAFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceExternalMemoryRDMAFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceExternalMemoryRDMAFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceExternalMemoryRDMAFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceExternalMemoryRDMAFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn external_memory_rdma(mut self, external_memory_rdma: bool) -> Self {
+ self.inner.external_memory_rdma = external_memory_rdma.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceExternalMemoryRDMAFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVertexInputBindingDescription2EXT.html>"]
+pub struct VertexInputBindingDescription2EXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub binding: u32,
+ pub stride: u32,
+ pub input_rate: VertexInputRate,
+ pub divisor: u32,
+}
+impl ::std::default::Default for VertexInputBindingDescription2EXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ binding: u32::default(),
+ stride: u32::default(),
+ input_rate: VertexInputRate::default(),
+ divisor: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VertexInputBindingDescription2EXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT;
+}
+impl VertexInputBindingDescription2EXT {
+ pub fn builder<'a>() -> VertexInputBindingDescription2EXTBuilder<'a> {
+ VertexInputBindingDescription2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VertexInputBindingDescription2EXTBuilder<'a> {
+ inner: VertexInputBindingDescription2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VertexInputBindingDescription2EXTBuilder<'a> {
+ type Target = VertexInputBindingDescription2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VertexInputBindingDescription2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VertexInputBindingDescription2EXTBuilder<'a> {
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[inline]
+ pub fn stride(mut self, stride: u32) -> Self {
+ self.inner.stride = stride;
+ self
+ }
+ #[inline]
+ pub fn input_rate(mut self, input_rate: VertexInputRate) -> Self {
+ self.inner.input_rate = input_rate;
+ self
+ }
+ #[inline]
+ pub fn divisor(mut self, divisor: u32) -> Self {
+ self.inner.divisor = divisor;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VertexInputBindingDescription2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVertexInputAttributeDescription2EXT.html>"]
+pub struct VertexInputAttributeDescription2EXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub location: u32,
+ pub binding: u32,
+ pub format: Format,
+ pub offset: u32,
+}
+impl ::std::default::Default for VertexInputAttributeDescription2EXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ location: u32::default(),
+ binding: u32::default(),
+ format: Format::default(),
+ offset: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VertexInputAttributeDescription2EXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT;
+}
+impl VertexInputAttributeDescription2EXT {
+ pub fn builder<'a>() -> VertexInputAttributeDescription2EXTBuilder<'a> {
+ VertexInputAttributeDescription2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VertexInputAttributeDescription2EXTBuilder<'a> {
+ inner: VertexInputAttributeDescription2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VertexInputAttributeDescription2EXTBuilder<'a> {
+ type Target = VertexInputAttributeDescription2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VertexInputAttributeDescription2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VertexInputAttributeDescription2EXTBuilder<'a> {
+ #[inline]
+ pub fn location(mut self, location: u32) -> Self {
+ self.inner.location = location;
+ self
+ }
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: u32) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VertexInputAttributeDescription2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceColorWriteEnableFeaturesEXT.html>"]
+pub struct PhysicalDeviceColorWriteEnableFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub color_write_enable: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceColorWriteEnableFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ color_write_enable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceColorWriteEnableFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT;
+}
+impl PhysicalDeviceColorWriteEnableFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'a> {
+ PhysicalDeviceColorWriteEnableFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceColorWriteEnableFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceColorWriteEnableFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceColorWriteEnableFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceColorWriteEnableFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceColorWriteEnableFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn color_write_enable(mut self, color_write_enable: bool) -> Self {
+ self.inner.color_write_enable = color_write_enable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceColorWriteEnableFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineColorWriteCreateInfoEXT.html>"]
+pub struct PipelineColorWriteCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub attachment_count: u32,
+ pub p_color_write_enables: *const Bool32,
+}
+impl ::std::default::Default for PipelineColorWriteCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ attachment_count: u32::default(),
+ p_color_write_enables: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineColorWriteCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_COLOR_WRITE_CREATE_INFO_EXT;
+}
+impl PipelineColorWriteCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineColorWriteCreateInfoEXTBuilder<'a> {
+ PipelineColorWriteCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineColorWriteCreateInfoEXTBuilder<'a> {
+ inner: PipelineColorWriteCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineColorBlendStateCreateInfo
+ for PipelineColorWriteCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineColorBlendStateCreateInfo for PipelineColorWriteCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for PipelineColorWriteCreateInfoEXTBuilder<'a> {
+ type Target = PipelineColorWriteCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineColorWriteCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineColorWriteCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn color_write_enables(mut self, color_write_enables: &'a [Bool32]) -> Self {
+ self.inner.attachment_count = color_write_enables.len() as _;
+ self.inner.p_color_write_enables = color_write_enables.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineColorWriteCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryBarrier2.html>"]
+pub struct MemoryBarrier2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_stage_mask: PipelineStageFlags2,
+ pub src_access_mask: AccessFlags2,
+ pub dst_stage_mask: PipelineStageFlags2,
+ pub dst_access_mask: AccessFlags2,
+}
+impl ::std::default::Default for MemoryBarrier2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_stage_mask: PipelineStageFlags2::default(),
+ src_access_mask: AccessFlags2::default(),
+ dst_stage_mask: PipelineStageFlags2::default(),
+ dst_access_mask: AccessFlags2::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryBarrier2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_BARRIER_2;
+}
+impl MemoryBarrier2 {
+ pub fn builder<'a>() -> MemoryBarrier2Builder<'a> {
+ MemoryBarrier2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryBarrier2Builder<'a> {
+ inner: MemoryBarrier2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubpassDependency2 for MemoryBarrier2Builder<'_> {}
+unsafe impl ExtendsSubpassDependency2 for MemoryBarrier2 {}
+impl<'a> ::std::ops::Deref for MemoryBarrier2Builder<'a> {
+ type Target = MemoryBarrier2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryBarrier2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryBarrier2Builder<'a> {
+ #[inline]
+ pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.src_stage_mask = src_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags2) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.dst_stage_mask = dst_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryBarrier2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageMemoryBarrier2.html>"]
+pub struct ImageMemoryBarrier2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_stage_mask: PipelineStageFlags2,
+ pub src_access_mask: AccessFlags2,
+ pub dst_stage_mask: PipelineStageFlags2,
+ pub dst_access_mask: AccessFlags2,
+ pub old_layout: ImageLayout,
+ pub new_layout: ImageLayout,
+ pub src_queue_family_index: u32,
+ pub dst_queue_family_index: u32,
+ pub image: Image,
+ pub subresource_range: ImageSubresourceRange,
+}
+impl ::std::default::Default for ImageMemoryBarrier2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_stage_mask: PipelineStageFlags2::default(),
+ src_access_mask: AccessFlags2::default(),
+ dst_stage_mask: PipelineStageFlags2::default(),
+ dst_access_mask: AccessFlags2::default(),
+ old_layout: ImageLayout::default(),
+ new_layout: ImageLayout::default(),
+ src_queue_family_index: u32::default(),
+ dst_queue_family_index: u32::default(),
+ image: Image::default(),
+ subresource_range: ImageSubresourceRange::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageMemoryBarrier2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_MEMORY_BARRIER_2;
+}
+impl ImageMemoryBarrier2 {
+ pub fn builder<'a>() -> ImageMemoryBarrier2Builder<'a> {
+ ImageMemoryBarrier2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageMemoryBarrier2Builder<'a> {
+ inner: ImageMemoryBarrier2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsImageMemoryBarrier2 {}
+impl<'a> ::std::ops::Deref for ImageMemoryBarrier2Builder<'a> {
+ type Target = ImageMemoryBarrier2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageMemoryBarrier2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageMemoryBarrier2Builder<'a> {
+ #[inline]
+ pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.src_stage_mask = src_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags2) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.dst_stage_mask = dst_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[inline]
+ pub fn old_layout(mut self, old_layout: ImageLayout) -> Self {
+ self.inner.old_layout = old_layout;
+ self
+ }
+ #[inline]
+ pub fn new_layout(mut self, new_layout: ImageLayout) -> Self {
+ self.inner.new_layout = new_layout;
+ self
+ }
+ #[inline]
+ pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self {
+ self.inner.src_queue_family_index = src_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self {
+ self.inner.dst_queue_family_index = dst_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn subresource_range(mut self, subresource_range: ImageSubresourceRange) -> Self {
+ self.inner.subresource_range = subresource_range;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsImageMemoryBarrier2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageMemoryBarrier2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferMemoryBarrier2.html>"]
+pub struct BufferMemoryBarrier2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src_stage_mask: PipelineStageFlags2,
+ pub src_access_mask: AccessFlags2,
+ pub dst_stage_mask: PipelineStageFlags2,
+ pub dst_access_mask: AccessFlags2,
+ pub src_queue_family_index: u32,
+ pub dst_queue_family_index: u32,
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+ pub size: DeviceSize,
+}
+impl ::std::default::Default for BufferMemoryBarrier2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src_stage_mask: PipelineStageFlags2::default(),
+ src_access_mask: AccessFlags2::default(),
+ dst_stage_mask: PipelineStageFlags2::default(),
+ dst_access_mask: AccessFlags2::default(),
+ src_queue_family_index: u32::default(),
+ dst_queue_family_index: u32::default(),
+ buffer: Buffer::default(),
+ offset: DeviceSize::default(),
+ size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferMemoryBarrier2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_MEMORY_BARRIER_2;
+}
+impl BufferMemoryBarrier2 {
+ pub fn builder<'a>() -> BufferMemoryBarrier2Builder<'a> {
+ BufferMemoryBarrier2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferMemoryBarrier2Builder<'a> {
+ inner: BufferMemoryBarrier2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferMemoryBarrier2Builder<'a> {
+ type Target = BufferMemoryBarrier2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferMemoryBarrier2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferMemoryBarrier2Builder<'a> {
+ #[inline]
+ pub fn src_stage_mask(mut self, src_stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.src_stage_mask = src_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn src_access_mask(mut self, src_access_mask: AccessFlags2) -> Self {
+ self.inner.src_access_mask = src_access_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_stage_mask(mut self, dst_stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.dst_stage_mask = dst_stage_mask;
+ self
+ }
+ #[inline]
+ pub fn dst_access_mask(mut self, dst_access_mask: AccessFlags2) -> Self {
+ self.inner.dst_access_mask = dst_access_mask;
+ self
+ }
+ #[inline]
+ pub fn src_queue_family_index(mut self, src_queue_family_index: u32) -> Self {
+ self.inner.src_queue_family_index = src_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn dst_queue_family_index(mut self, dst_queue_family_index: u32) -> Self {
+ self.inner.dst_queue_family_index = dst_queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferMemoryBarrier2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDependencyInfo.html>"]
+pub struct DependencyInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub dependency_flags: DependencyFlags,
+ pub memory_barrier_count: u32,
+ pub p_memory_barriers: *const MemoryBarrier2,
+ pub buffer_memory_barrier_count: u32,
+ pub p_buffer_memory_barriers: *const BufferMemoryBarrier2,
+ pub image_memory_barrier_count: u32,
+ pub p_image_memory_barriers: *const ImageMemoryBarrier2,
+}
+impl ::std::default::Default for DependencyInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ dependency_flags: DependencyFlags::default(),
+ memory_barrier_count: u32::default(),
+ p_memory_barriers: ::std::ptr::null(),
+ buffer_memory_barrier_count: u32::default(),
+ p_buffer_memory_barriers: ::std::ptr::null(),
+ image_memory_barrier_count: u32::default(),
+ p_image_memory_barriers: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DependencyInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEPENDENCY_INFO;
+}
+impl DependencyInfo {
+ pub fn builder<'a>() -> DependencyInfoBuilder<'a> {
+ DependencyInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DependencyInfoBuilder<'a> {
+ inner: DependencyInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DependencyInfoBuilder<'a> {
+ type Target = DependencyInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DependencyInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DependencyInfoBuilder<'a> {
+ #[inline]
+ pub fn dependency_flags(mut self, dependency_flags: DependencyFlags) -> Self {
+ self.inner.dependency_flags = dependency_flags;
+ self
+ }
+ #[inline]
+ pub fn memory_barriers(mut self, memory_barriers: &'a [MemoryBarrier2]) -> Self {
+ self.inner.memory_barrier_count = memory_barriers.len() as _;
+ self.inner.p_memory_barriers = memory_barriers.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn buffer_memory_barriers(
+ mut self,
+ buffer_memory_barriers: &'a [BufferMemoryBarrier2],
+ ) -> Self {
+ self.inner.buffer_memory_barrier_count = buffer_memory_barriers.len() as _;
+ self.inner.p_buffer_memory_barriers = buffer_memory_barriers.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn image_memory_barriers(
+ mut self,
+ image_memory_barriers: &'a [ImageMemoryBarrier2],
+ ) -> Self {
+ self.inner.image_memory_barrier_count = image_memory_barriers.len() as _;
+ self.inner.p_image_memory_barriers = image_memory_barriers.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DependencyInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreSubmitInfo.html>"]
+pub struct SemaphoreSubmitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub value: u64,
+ pub stage_mask: PipelineStageFlags2,
+ pub device_index: u32,
+}
+impl ::std::default::Default for SemaphoreSubmitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ value: u64::default(),
+ stage_mask: PipelineStageFlags2::default(),
+ device_index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SemaphoreSubmitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::SEMAPHORE_SUBMIT_INFO;
+}
+impl SemaphoreSubmitInfo {
+ pub fn builder<'a>() -> SemaphoreSubmitInfoBuilder<'a> {
+ SemaphoreSubmitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SemaphoreSubmitInfoBuilder<'a> {
+ inner: SemaphoreSubmitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SemaphoreSubmitInfoBuilder<'a> {
+ type Target = SemaphoreSubmitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SemaphoreSubmitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SemaphoreSubmitInfoBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn value(mut self, value: u64) -> Self {
+ self.inner.value = value;
+ self
+ }
+ #[inline]
+ pub fn stage_mask(mut self, stage_mask: PipelineStageFlags2) -> Self {
+ self.inner.stage_mask = stage_mask;
+ self
+ }
+ #[inline]
+ pub fn device_index(mut self, device_index: u32) -> Self {
+ self.inner.device_index = device_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SemaphoreSubmitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferSubmitInfo.html>"]
+pub struct CommandBufferSubmitInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub command_buffer: CommandBuffer,
+ pub device_mask: u32,
+}
+impl ::std::default::Default for CommandBufferSubmitInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ command_buffer: CommandBuffer::default(),
+ device_mask: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferSubmitInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_SUBMIT_INFO;
+}
+impl CommandBufferSubmitInfo {
+ pub fn builder<'a>() -> CommandBufferSubmitInfoBuilder<'a> {
+ CommandBufferSubmitInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferSubmitInfoBuilder<'a> {
+ inner: CommandBufferSubmitInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CommandBufferSubmitInfoBuilder<'a> {
+ type Target = CommandBufferSubmitInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferSubmitInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferSubmitInfoBuilder<'a> {
+ #[inline]
+ pub fn command_buffer(mut self, command_buffer: CommandBuffer) -> Self {
+ self.inner.command_buffer = command_buffer;
+ self
+ }
+ #[inline]
+ pub fn device_mask(mut self, device_mask: u32) -> Self {
+ self.inner.device_mask = device_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferSubmitInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubmitInfo2.html>"]
+pub struct SubmitInfo2 {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: SubmitFlags,
+ pub wait_semaphore_info_count: u32,
+ pub p_wait_semaphore_infos: *const SemaphoreSubmitInfo,
+ pub command_buffer_info_count: u32,
+ pub p_command_buffer_infos: *const CommandBufferSubmitInfo,
+ pub signal_semaphore_info_count: u32,
+ pub p_signal_semaphore_infos: *const SemaphoreSubmitInfo,
+}
+impl ::std::default::Default for SubmitInfo2 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: SubmitFlags::default(),
+ wait_semaphore_info_count: u32::default(),
+ p_wait_semaphore_infos: ::std::ptr::null(),
+ command_buffer_info_count: u32::default(),
+ p_command_buffer_infos: ::std::ptr::null(),
+ signal_semaphore_info_count: u32::default(),
+ p_signal_semaphore_infos: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubmitInfo2 {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBMIT_INFO_2;
+}
+impl SubmitInfo2 {
+ pub fn builder<'a>() -> SubmitInfo2Builder<'a> {
+ SubmitInfo2Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubmitInfo2Builder<'a> {
+ inner: SubmitInfo2,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSubmitInfo2 {}
+impl<'a> ::std::ops::Deref for SubmitInfo2Builder<'a> {
+ type Target = SubmitInfo2;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubmitInfo2Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubmitInfo2Builder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: SubmitFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn wait_semaphore_infos(mut self, wait_semaphore_infos: &'a [SemaphoreSubmitInfo]) -> Self {
+ self.inner.wait_semaphore_info_count = wait_semaphore_infos.len() as _;
+ self.inner.p_wait_semaphore_infos = wait_semaphore_infos.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn command_buffer_infos(
+ mut self,
+ command_buffer_infos: &'a [CommandBufferSubmitInfo],
+ ) -> Self {
+ self.inner.command_buffer_info_count = command_buffer_infos.len() as _;
+ self.inner.p_command_buffer_infos = command_buffer_infos.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn signal_semaphore_infos(
+ mut self,
+ signal_semaphore_infos: &'a [SemaphoreSubmitInfo],
+ ) -> Self {
+ self.inner.signal_semaphore_info_count = signal_semaphore_infos.len() as _;
+ self.inner.p_signal_semaphore_infos = signal_semaphore_infos.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSubmitInfo2>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubmitInfo2 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyCheckpointProperties2NV.html>"]
+pub struct QueueFamilyCheckpointProperties2NV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub checkpoint_execution_stage_mask: PipelineStageFlags2,
+}
+impl ::std::default::Default for QueueFamilyCheckpointProperties2NV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ checkpoint_execution_stage_mask: PipelineStageFlags2::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueueFamilyCheckpointProperties2NV {
+ const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV;
+}
+impl QueueFamilyCheckpointProperties2NV {
+ pub fn builder<'a>() -> QueueFamilyCheckpointProperties2NVBuilder<'a> {
+ QueueFamilyCheckpointProperties2NVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyCheckpointProperties2NVBuilder<'a> {
+ inner: QueueFamilyCheckpointProperties2NV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointProperties2NVBuilder<'_> {}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyCheckpointProperties2NV {}
+impl<'a> ::std::ops::Deref for QueueFamilyCheckpointProperties2NVBuilder<'a> {
+ type Target = QueueFamilyCheckpointProperties2NV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyCheckpointProperties2NVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyCheckpointProperties2NVBuilder<'a> {
+ #[inline]
+ pub fn checkpoint_execution_stage_mask(
+ mut self,
+ checkpoint_execution_stage_mask: PipelineStageFlags2,
+ ) -> Self {
+ self.inner.checkpoint_execution_stage_mask = checkpoint_execution_stage_mask;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyCheckpointProperties2NV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCheckpointData2NV.html>"]
+pub struct CheckpointData2NV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub stage: PipelineStageFlags2,
+ pub p_checkpoint_marker: *mut c_void,
+}
+impl ::std::default::Default for CheckpointData2NV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ stage: PipelineStageFlags2::default(),
+ p_checkpoint_marker: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CheckpointData2NV {
+ const STRUCTURE_TYPE: StructureType = StructureType::CHECKPOINT_DATA_2_NV;
+}
+impl CheckpointData2NV {
+ pub fn builder<'a>() -> CheckpointData2NVBuilder<'a> {
+ CheckpointData2NVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CheckpointData2NVBuilder<'a> {
+ inner: CheckpointData2NV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CheckpointData2NVBuilder<'a> {
+ type Target = CheckpointData2NV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CheckpointData2NVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CheckpointData2NVBuilder<'a> {
+ #[inline]
+ pub fn stage(mut self, stage: PipelineStageFlags2) -> Self {
+ self.inner.stage = stage;
+ self
+ }
+ #[inline]
+ pub fn checkpoint_marker(mut self, checkpoint_marker: *mut c_void) -> Self {
+ self.inner.p_checkpoint_marker = checkpoint_marker;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CheckpointData2NV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSynchronization2Features.html>"]
+pub struct PhysicalDeviceSynchronization2Features {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub synchronization2: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSynchronization2Features {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ synchronization2: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSynchronization2Features {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES;
+}
+impl PhysicalDeviceSynchronization2Features {
+ pub fn builder<'a>() -> PhysicalDeviceSynchronization2FeaturesBuilder<'a> {
+ PhysicalDeviceSynchronization2FeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSynchronization2FeaturesBuilder<'a> {
+ inner: PhysicalDeviceSynchronization2Features,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSynchronization2FeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSynchronization2Features {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSynchronization2FeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSynchronization2Features {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSynchronization2FeaturesBuilder<'a> {
+ type Target = PhysicalDeviceSynchronization2Features;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSynchronization2FeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSynchronization2FeaturesBuilder<'a> {
+ #[inline]
+ pub fn synchronization2(mut self, synchronization2: bool) -> Self {
+ self.inner.synchronization2 = synchronization2.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSynchronization2Features {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT.html>"]
+pub struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub primitives_generated_query: Bool32,
+ pub primitives_generated_query_with_rasterizer_discard: Bool32,
+ pub primitives_generated_query_with_non_zero_streams: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ primitives_generated_query: Bool32::default(),
+ primitives_generated_query_with_rasterizer_discard: Bool32::default(),
+ primitives_generated_query_with_non_zero_streams: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT;
+}
+impl PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'a> {
+ PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePrimitivesGeneratedQueryFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn primitives_generated_query(mut self, primitives_generated_query: bool) -> Self {
+ self.inner.primitives_generated_query = primitives_generated_query.into();
+ self
+ }
+ #[inline]
+ pub fn primitives_generated_query_with_rasterizer_discard(
+ mut self,
+ primitives_generated_query_with_rasterizer_discard: bool,
+ ) -> Self {
+ self.inner
+ .primitives_generated_query_with_rasterizer_discard =
+ primitives_generated_query_with_rasterizer_discard.into();
+ self
+ }
+ #[inline]
+ pub fn primitives_generated_query_with_non_zero_streams(
+ mut self,
+ primitives_generated_query_with_non_zero_streams: bool,
+ ) -> Self {
+ self.inner.primitives_generated_query_with_non_zero_streams =
+ primitives_generated_query_with_non_zero_streams.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceLegacyDitheringFeaturesEXT.html>"]
+pub struct PhysicalDeviceLegacyDitheringFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub legacy_dithering: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceLegacyDitheringFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ legacy_dithering: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceLegacyDitheringFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT;
+}
+impl PhysicalDeviceLegacyDitheringFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'a> {
+ PhysicalDeviceLegacyDitheringFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceLegacyDitheringFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLegacyDitheringFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLegacyDitheringFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceLegacyDitheringFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceLegacyDitheringFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn legacy_dithering(mut self, legacy_dithering: bool) -> Self {
+ self.inner.legacy_dithering = legacy_dithering.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceLegacyDitheringFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT.html>"]
+pub struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub multisampled_render_to_single_sampled: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ multisampled_render_to_single_sampled: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT;
+}
+impl PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'a> {
+ PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {}
+impl<'a> ::std::ops::Deref
+ for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'a>
+{
+ type Target = PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn multisampled_render_to_single_sampled(
+ mut self,
+ multisampled_render_to_single_sampled: bool,
+ ) -> Self {
+ self.inner.multisampled_render_to_single_sampled =
+ multisampled_render_to_single_sampled.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassResolvePerformanceQueryEXT.html>"]
+pub struct SubpassResolvePerformanceQueryEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub optimal: Bool32,
+}
+impl ::std::default::Default for SubpassResolvePerformanceQueryEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ optimal: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubpassResolvePerformanceQueryEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT;
+}
+impl SubpassResolvePerformanceQueryEXT {
+ pub fn builder<'a>() -> SubpassResolvePerformanceQueryEXTBuilder<'a> {
+ SubpassResolvePerformanceQueryEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubpassResolvePerformanceQueryEXTBuilder<'a> {
+ inner: SubpassResolvePerformanceQueryEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFormatProperties2 for SubpassResolvePerformanceQueryEXTBuilder<'_> {}
+unsafe impl ExtendsFormatProperties2 for SubpassResolvePerformanceQueryEXT {}
+impl<'a> ::std::ops::Deref for SubpassResolvePerformanceQueryEXTBuilder<'a> {
+ type Target = SubpassResolvePerformanceQueryEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubpassResolvePerformanceQueryEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubpassResolvePerformanceQueryEXTBuilder<'a> {
+ #[inline]
+ pub fn optimal(mut self, optimal: bool) -> Self {
+ self.inner.optimal = optimal.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubpassResolvePerformanceQueryEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMultisampledRenderToSingleSampledInfoEXT.html>"]
+pub struct MultisampledRenderToSingleSampledInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub multisampled_render_to_single_sampled_enable: Bool32,
+ pub rasterization_samples: SampleCountFlags,
+}
+impl ::std::default::Default for MultisampledRenderToSingleSampledInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ multisampled_render_to_single_sampled_enable: Bool32::default(),
+ rasterization_samples: SampleCountFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MultisampledRenderToSingleSampledInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT;
+}
+impl MultisampledRenderToSingleSampledInfoEXT {
+ pub fn builder<'a>() -> MultisampledRenderToSingleSampledInfoEXTBuilder<'a> {
+ MultisampledRenderToSingleSampledInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MultisampledRenderToSingleSampledInfoEXTBuilder<'a> {
+ inner: MultisampledRenderToSingleSampledInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubpassDescription2 for MultisampledRenderToSingleSampledInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSubpassDescription2 for MultisampledRenderToSingleSampledInfoEXT {}
+unsafe impl ExtendsRenderingInfo for MultisampledRenderToSingleSampledInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRenderingInfo for MultisampledRenderToSingleSampledInfoEXT {}
+impl<'a> ::std::ops::Deref for MultisampledRenderToSingleSampledInfoEXTBuilder<'a> {
+ type Target = MultisampledRenderToSingleSampledInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MultisampledRenderToSingleSampledInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MultisampledRenderToSingleSampledInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn multisampled_render_to_single_sampled_enable(
+ mut self,
+ multisampled_render_to_single_sampled_enable: bool,
+ ) -> Self {
+ self.inner.multisampled_render_to_single_sampled_enable =
+ multisampled_render_to_single_sampled_enable.into();
+ self
+ }
+ #[inline]
+ pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self {
+ self.inner.rasterization_samples = rasterization_samples;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MultisampledRenderToSingleSampledInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePipelineProtectedAccessFeaturesEXT.html>"]
+pub struct PhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_protected_access: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_protected_access: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT;
+}
+impl PhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'a> {
+ PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevicePipelineProtectedAccessFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineProtectedAccessFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineProtectedAccessFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePipelineProtectedAccessFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn pipeline_protected_access(mut self, pipeline_protected_access: bool) -> Self {
+ self.inner.pipeline_protected_access = pipeline_protected_access.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePipelineProtectedAccessFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyVideoPropertiesKHR.html>"]
+pub struct QueueFamilyVideoPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub video_codec_operations: VideoCodecOperationFlagsKHR,
+}
+impl ::std::default::Default for QueueFamilyVideoPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ video_codec_operations: VideoCodecOperationFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueueFamilyVideoPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::QUEUE_FAMILY_VIDEO_PROPERTIES_KHR;
+}
+impl QueueFamilyVideoPropertiesKHR {
+ pub fn builder<'a>() -> QueueFamilyVideoPropertiesKHRBuilder<'a> {
+ QueueFamilyVideoPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyVideoPropertiesKHRBuilder<'a> {
+ inner: QueueFamilyVideoPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyVideoPropertiesKHRBuilder<'_> {}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyVideoPropertiesKHR {}
+impl<'a> ::std::ops::Deref for QueueFamilyVideoPropertiesKHRBuilder<'a> {
+ type Target = QueueFamilyVideoPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyVideoPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyVideoPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn video_codec_operations(
+ mut self,
+ video_codec_operations: VideoCodecOperationFlagsKHR,
+ ) -> Self {
+ self.inner.video_codec_operations = video_codec_operations;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyVideoPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueFamilyQueryResultStatusPropertiesKHR.html>"]
+pub struct QueueFamilyQueryResultStatusPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub query_result_status_support: Bool32,
+}
+impl ::std::default::Default for QueueFamilyQueryResultStatusPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ query_result_status_support: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for QueueFamilyQueryResultStatusPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR;
+}
+impl QueueFamilyQueryResultStatusPropertiesKHR {
+ pub fn builder<'a>() -> QueueFamilyQueryResultStatusPropertiesKHRBuilder<'a> {
+ QueueFamilyQueryResultStatusPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct QueueFamilyQueryResultStatusPropertiesKHRBuilder<'a> {
+ inner: QueueFamilyQueryResultStatusPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyQueryResultStatusPropertiesKHRBuilder<'_> {}
+unsafe impl ExtendsQueueFamilyProperties2 for QueueFamilyQueryResultStatusPropertiesKHR {}
+impl<'a> ::std::ops::Deref for QueueFamilyQueryResultStatusPropertiesKHRBuilder<'a> {
+ type Target = QueueFamilyQueryResultStatusPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for QueueFamilyQueryResultStatusPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> QueueFamilyQueryResultStatusPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn query_result_status_support(mut self, query_result_status_support: bool) -> Self {
+ self.inner.query_result_status_support = query_result_status_support.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> QueueFamilyQueryResultStatusPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoProfileListInfoKHR.html>"]
+pub struct VideoProfileListInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub profile_count: u32,
+ pub p_profiles: *const VideoProfileInfoKHR,
+}
+impl ::std::default::Default for VideoProfileListInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ profile_count: u32::default(),
+ p_profiles: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoProfileListInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_PROFILE_LIST_INFO_KHR;
+}
+impl VideoProfileListInfoKHR {
+ pub fn builder<'a>() -> VideoProfileListInfoKHRBuilder<'a> {
+ VideoProfileListInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoProfileListInfoKHRBuilder<'a> {
+ inner: VideoProfileListInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for VideoProfileListInfoKHRBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for VideoProfileListInfoKHR {}
+unsafe impl ExtendsPhysicalDeviceVideoFormatInfoKHR for VideoProfileListInfoKHRBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceVideoFormatInfoKHR for VideoProfileListInfoKHR {}
+unsafe impl ExtendsImageCreateInfo for VideoProfileListInfoKHRBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for VideoProfileListInfoKHR {}
+unsafe impl ExtendsBufferCreateInfo for VideoProfileListInfoKHRBuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for VideoProfileListInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoProfileListInfoKHRBuilder<'a> {
+ type Target = VideoProfileListInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoProfileListInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoProfileListInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn profiles(mut self, profiles: &'a [VideoProfileInfoKHR]) -> Self {
+ self.inner.profile_count = profiles.len() as _;
+ self.inner.p_profiles = profiles.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoProfileListInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceVideoFormatInfoKHR.html>"]
+pub struct PhysicalDeviceVideoFormatInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_usage: ImageUsageFlags,
+}
+impl ::std::default::Default for PhysicalDeviceVideoFormatInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_usage: ImageUsageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceVideoFormatInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR;
+}
+impl PhysicalDeviceVideoFormatInfoKHR {
+ pub fn builder<'a>() -> PhysicalDeviceVideoFormatInfoKHRBuilder<'a> {
+ PhysicalDeviceVideoFormatInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceVideoFormatInfoKHRBuilder<'a> {
+ inner: PhysicalDeviceVideoFormatInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsPhysicalDeviceVideoFormatInfoKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceVideoFormatInfoKHRBuilder<'a> {
+ type Target = PhysicalDeviceVideoFormatInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceVideoFormatInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceVideoFormatInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn image_usage(mut self, image_usage: ImageUsageFlags) -> Self {
+ self.inner.image_usage = image_usage;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsPhysicalDeviceVideoFormatInfoKHR>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceVideoFormatInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoFormatPropertiesKHR.html>"]
+pub struct VideoFormatPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub format: Format,
+ pub component_mapping: ComponentMapping,
+ pub image_create_flags: ImageCreateFlags,
+ pub image_type: ImageType,
+ pub image_tiling: ImageTiling,
+ pub image_usage_flags: ImageUsageFlags,
+}
+impl ::std::default::Default for VideoFormatPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ format: Format::default(),
+ component_mapping: ComponentMapping::default(),
+ image_create_flags: ImageCreateFlags::default(),
+ image_type: ImageType::default(),
+ image_tiling: ImageTiling::default(),
+ image_usage_flags: ImageUsageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoFormatPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_FORMAT_PROPERTIES_KHR;
+}
+impl VideoFormatPropertiesKHR {
+ pub fn builder<'a>() -> VideoFormatPropertiesKHRBuilder<'a> {
+ VideoFormatPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoFormatPropertiesKHRBuilder<'a> {
+ inner: VideoFormatPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoFormatPropertiesKHRBuilder<'a> {
+ type Target = VideoFormatPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoFormatPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoFormatPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn component_mapping(mut self, component_mapping: ComponentMapping) -> Self {
+ self.inner.component_mapping = component_mapping;
+ self
+ }
+ #[inline]
+ pub fn image_create_flags(mut self, image_create_flags: ImageCreateFlags) -> Self {
+ self.inner.image_create_flags = image_create_flags;
+ self
+ }
+ #[inline]
+ pub fn image_type(mut self, image_type: ImageType) -> Self {
+ self.inner.image_type = image_type;
+ self
+ }
+ #[inline]
+ pub fn image_tiling(mut self, image_tiling: ImageTiling) -> Self {
+ self.inner.image_tiling = image_tiling;
+ self
+ }
+ #[inline]
+ pub fn image_usage_flags(mut self, image_usage_flags: ImageUsageFlags) -> Self {
+ self.inner.image_usage_flags = image_usage_flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoFormatPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoProfileInfoKHR.html>"]
+pub struct VideoProfileInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub video_codec_operation: VideoCodecOperationFlagsKHR,
+ pub chroma_subsampling: VideoChromaSubsamplingFlagsKHR,
+ pub luma_bit_depth: VideoComponentBitDepthFlagsKHR,
+ pub chroma_bit_depth: VideoComponentBitDepthFlagsKHR,
+}
+impl ::std::default::Default for VideoProfileInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ video_codec_operation: VideoCodecOperationFlagsKHR::default(),
+ chroma_subsampling: VideoChromaSubsamplingFlagsKHR::default(),
+ luma_bit_depth: VideoComponentBitDepthFlagsKHR::default(),
+ chroma_bit_depth: VideoComponentBitDepthFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoProfileInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_PROFILE_INFO_KHR;
+}
+impl VideoProfileInfoKHR {
+ pub fn builder<'a>() -> VideoProfileInfoKHRBuilder<'a> {
+ VideoProfileInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoProfileInfoKHRBuilder<'a> {
+ inner: VideoProfileInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoProfileInfoKHRBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoProfileInfoKHR {}
+pub unsafe trait ExtendsVideoProfileInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoProfileInfoKHRBuilder<'a> {
+ type Target = VideoProfileInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoProfileInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoProfileInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn video_codec_operation(
+ mut self,
+ video_codec_operation: VideoCodecOperationFlagsKHR,
+ ) -> Self {
+ self.inner.video_codec_operation = video_codec_operation;
+ self
+ }
+ #[inline]
+ pub fn chroma_subsampling(
+ mut self,
+ chroma_subsampling: VideoChromaSubsamplingFlagsKHR,
+ ) -> Self {
+ self.inner.chroma_subsampling = chroma_subsampling;
+ self
+ }
+ #[inline]
+ pub fn luma_bit_depth(mut self, luma_bit_depth: VideoComponentBitDepthFlagsKHR) -> Self {
+ self.inner.luma_bit_depth = luma_bit_depth;
+ self
+ }
+ #[inline]
+ pub fn chroma_bit_depth(mut self, chroma_bit_depth: VideoComponentBitDepthFlagsKHR) -> Self {
+ self.inner.chroma_bit_depth = chroma_bit_depth;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoProfileInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoProfileInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoCapabilitiesKHR.html>"]
+pub struct VideoCapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: VideoCapabilityFlagsKHR,
+ pub min_bitstream_buffer_offset_alignment: DeviceSize,
+ pub min_bitstream_buffer_size_alignment: DeviceSize,
+ pub picture_access_granularity: Extent2D,
+ pub min_coded_extent: Extent2D,
+ pub max_coded_extent: Extent2D,
+ pub max_dpb_slots: u32,
+ pub max_active_reference_pictures: u32,
+ pub std_header_version: ExtensionProperties,
+}
+impl ::std::default::Default for VideoCapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: VideoCapabilityFlagsKHR::default(),
+ min_bitstream_buffer_offset_alignment: DeviceSize::default(),
+ min_bitstream_buffer_size_alignment: DeviceSize::default(),
+ picture_access_granularity: Extent2D::default(),
+ min_coded_extent: Extent2D::default(),
+ max_coded_extent: Extent2D::default(),
+ max_dpb_slots: u32::default(),
+ max_active_reference_pictures: u32::default(),
+ std_header_version: ExtensionProperties::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoCapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_CAPABILITIES_KHR;
+}
+impl VideoCapabilitiesKHR {
+ pub fn builder<'a>() -> VideoCapabilitiesKHRBuilder<'a> {
+ VideoCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoCapabilitiesKHRBuilder<'a> {
+ inner: VideoCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoCapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for VideoCapabilitiesKHRBuilder<'a> {
+ type Target = VideoCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoCapabilityFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn min_bitstream_buffer_offset_alignment(
+ mut self,
+ min_bitstream_buffer_offset_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.min_bitstream_buffer_offset_alignment = min_bitstream_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn min_bitstream_buffer_size_alignment(
+ mut self,
+ min_bitstream_buffer_size_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.min_bitstream_buffer_size_alignment = min_bitstream_buffer_size_alignment;
+ self
+ }
+ #[inline]
+ pub fn picture_access_granularity(mut self, picture_access_granularity: Extent2D) -> Self {
+ self.inner.picture_access_granularity = picture_access_granularity;
+ self
+ }
+ #[inline]
+ pub fn min_coded_extent(mut self, min_coded_extent: Extent2D) -> Self {
+ self.inner.min_coded_extent = min_coded_extent;
+ self
+ }
+ #[inline]
+ pub fn max_coded_extent(mut self, max_coded_extent: Extent2D) -> Self {
+ self.inner.max_coded_extent = max_coded_extent;
+ self
+ }
+ #[inline]
+ pub fn max_dpb_slots(mut self, max_dpb_slots: u32) -> Self {
+ self.inner.max_dpb_slots = max_dpb_slots;
+ self
+ }
+ #[inline]
+ pub fn max_active_reference_pictures(mut self, max_active_reference_pictures: u32) -> Self {
+ self.inner.max_active_reference_pictures = max_active_reference_pictures;
+ self
+ }
+ #[inline]
+ pub fn std_header_version(mut self, std_header_version: ExtensionProperties) -> Self {
+ self.inner.std_header_version = std_header_version;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoCapabilitiesKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionMemoryRequirementsKHR.html>"]
+pub struct VideoSessionMemoryRequirementsKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_bind_index: u32,
+ pub memory_requirements: MemoryRequirements,
+}
+impl ::std::default::Default for VideoSessionMemoryRequirementsKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_bind_index: u32::default(),
+ memory_requirements: MemoryRequirements::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoSessionMemoryRequirementsKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR;
+}
+impl VideoSessionMemoryRequirementsKHR {
+ pub fn builder<'a>() -> VideoSessionMemoryRequirementsKHRBuilder<'a> {
+ VideoSessionMemoryRequirementsKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoSessionMemoryRequirementsKHRBuilder<'a> {
+ inner: VideoSessionMemoryRequirementsKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoSessionMemoryRequirementsKHRBuilder<'a> {
+ type Target = VideoSessionMemoryRequirementsKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoSessionMemoryRequirementsKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoSessionMemoryRequirementsKHRBuilder<'a> {
+ #[inline]
+ pub fn memory_bind_index(mut self, memory_bind_index: u32) -> Self {
+ self.inner.memory_bind_index = memory_bind_index;
+ self
+ }
+ #[inline]
+ pub fn memory_requirements(mut self, memory_requirements: MemoryRequirements) -> Self {
+ self.inner.memory_requirements = memory_requirements;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoSessionMemoryRequirementsKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBindVideoSessionMemoryInfoKHR.html>"]
+pub struct BindVideoSessionMemoryInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory_bind_index: u32,
+ pub memory: DeviceMemory,
+ pub memory_offset: DeviceSize,
+ pub memory_size: DeviceSize,
+}
+impl ::std::default::Default for BindVideoSessionMemoryInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory_bind_index: u32::default(),
+ memory: DeviceMemory::default(),
+ memory_offset: DeviceSize::default(),
+ memory_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BindVideoSessionMemoryInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::BIND_VIDEO_SESSION_MEMORY_INFO_KHR;
+}
+impl BindVideoSessionMemoryInfoKHR {
+ pub fn builder<'a>() -> BindVideoSessionMemoryInfoKHRBuilder<'a> {
+ BindVideoSessionMemoryInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BindVideoSessionMemoryInfoKHRBuilder<'a> {
+ inner: BindVideoSessionMemoryInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BindVideoSessionMemoryInfoKHRBuilder<'a> {
+ type Target = BindVideoSessionMemoryInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BindVideoSessionMemoryInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BindVideoSessionMemoryInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn memory_bind_index(mut self, memory_bind_index: u32) -> Self {
+ self.inner.memory_bind_index = memory_bind_index;
+ self
+ }
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn memory_offset(mut self, memory_offset: DeviceSize) -> Self {
+ self.inner.memory_offset = memory_offset;
+ self
+ }
+ #[inline]
+ pub fn memory_size(mut self, memory_size: DeviceSize) -> Self {
+ self.inner.memory_size = memory_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BindVideoSessionMemoryInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoPictureResourceInfoKHR.html>"]
+pub struct VideoPictureResourceInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub coded_offset: Offset2D,
+ pub coded_extent: Extent2D,
+ pub base_array_layer: u32,
+ pub image_view_binding: ImageView,
+}
+impl ::std::default::Default for VideoPictureResourceInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ coded_offset: Offset2D::default(),
+ coded_extent: Extent2D::default(),
+ base_array_layer: u32::default(),
+ image_view_binding: ImageView::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoPictureResourceInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_PICTURE_RESOURCE_INFO_KHR;
+}
+impl VideoPictureResourceInfoKHR {
+ pub fn builder<'a>() -> VideoPictureResourceInfoKHRBuilder<'a> {
+ VideoPictureResourceInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoPictureResourceInfoKHRBuilder<'a> {
+ inner: VideoPictureResourceInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoPictureResourceInfoKHRBuilder<'a> {
+ type Target = VideoPictureResourceInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoPictureResourceInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoPictureResourceInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn coded_offset(mut self, coded_offset: Offset2D) -> Self {
+ self.inner.coded_offset = coded_offset;
+ self
+ }
+ #[inline]
+ pub fn coded_extent(mut self, coded_extent: Extent2D) -> Self {
+ self.inner.coded_extent = coded_extent;
+ self
+ }
+ #[inline]
+ pub fn base_array_layer(mut self, base_array_layer: u32) -> Self {
+ self.inner.base_array_layer = base_array_layer;
+ self
+ }
+ #[inline]
+ pub fn image_view_binding(mut self, image_view_binding: ImageView) -> Self {
+ self.inner.image_view_binding = image_view_binding;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoPictureResourceInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoReferenceSlotInfoKHR.html>"]
+pub struct VideoReferenceSlotInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub slot_index: i32,
+ pub p_picture_resource: *const VideoPictureResourceInfoKHR,
+}
+impl ::std::default::Default for VideoReferenceSlotInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ slot_index: i32::default(),
+ p_picture_resource: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoReferenceSlotInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_REFERENCE_SLOT_INFO_KHR;
+}
+impl VideoReferenceSlotInfoKHR {
+ pub fn builder<'a>() -> VideoReferenceSlotInfoKHRBuilder<'a> {
+ VideoReferenceSlotInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoReferenceSlotInfoKHRBuilder<'a> {
+ inner: VideoReferenceSlotInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoReferenceSlotInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoReferenceSlotInfoKHRBuilder<'a> {
+ type Target = VideoReferenceSlotInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoReferenceSlotInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoReferenceSlotInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn slot_index(mut self, slot_index: i32) -> Self {
+ self.inner.slot_index = slot_index;
+ self
+ }
+ #[inline]
+ pub fn picture_resource(mut self, picture_resource: &'a VideoPictureResourceInfoKHR) -> Self {
+ self.inner.p_picture_resource = picture_resource;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoReferenceSlotInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoReferenceSlotInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeCapabilitiesKHR.html>"]
+pub struct VideoDecodeCapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: VideoDecodeCapabilityFlagsKHR,
+}
+impl ::std::default::Default for VideoDecodeCapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: VideoDecodeCapabilityFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeCapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_CAPABILITIES_KHR;
+}
+impl VideoDecodeCapabilitiesKHR {
+ pub fn builder<'a>() -> VideoDecodeCapabilitiesKHRBuilder<'a> {
+ VideoDecodeCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeCapabilitiesKHRBuilder<'a> {
+ inner: VideoDecodeCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeCapabilitiesKHRBuilder<'_> {}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeCapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeCapabilitiesKHRBuilder<'a> {
+ type Target = VideoDecodeCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoDecodeCapabilityFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeUsageInfoKHR.html>"]
+pub struct VideoDecodeUsageInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub video_usage_hints: VideoDecodeUsageFlagsKHR,
+}
+impl ::std::default::Default for VideoDecodeUsageInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ video_usage_hints: VideoDecodeUsageFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeUsageInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_USAGE_INFO_KHR;
+}
+impl VideoDecodeUsageInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeUsageInfoKHRBuilder<'a> {
+ VideoDecodeUsageInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeUsageInfoKHRBuilder<'a> {
+ inner: VideoDecodeUsageInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeUsageInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeUsageInfoKHR {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeUsageInfoKHRBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeUsageInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeUsageInfoKHRBuilder<'a> {
+ type Target = VideoDecodeUsageInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeUsageInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeUsageInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn video_usage_hints(mut self, video_usage_hints: VideoDecodeUsageFlagsKHR) -> Self {
+ self.inner.video_usage_hints = video_usage_hints;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeUsageInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeInfoKHR.html>"]
+pub struct VideoDecodeInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoDecodeFlagsKHR,
+ pub src_buffer: Buffer,
+ pub src_buffer_offset: DeviceSize,
+ pub src_buffer_range: DeviceSize,
+ pub dst_picture_resource: VideoPictureResourceInfoKHR,
+ pub p_setup_reference_slot: *const VideoReferenceSlotInfoKHR,
+ pub reference_slot_count: u32,
+ pub p_reference_slots: *const VideoReferenceSlotInfoKHR,
+}
+impl ::std::default::Default for VideoDecodeInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoDecodeFlagsKHR::default(),
+ src_buffer: Buffer::default(),
+ src_buffer_offset: DeviceSize::default(),
+ src_buffer_range: DeviceSize::default(),
+ dst_picture_resource: VideoPictureResourceInfoKHR::default(),
+ p_setup_reference_slot: ::std::ptr::null(),
+ reference_slot_count: u32::default(),
+ p_reference_slots: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_INFO_KHR;
+}
+impl VideoDecodeInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeInfoKHRBuilder<'a> {
+ VideoDecodeInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeInfoKHRBuilder<'a> {
+ inner: VideoDecodeInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoDecodeInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeInfoKHRBuilder<'a> {
+ type Target = VideoDecodeInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoDecodeFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn src_buffer(mut self, src_buffer: Buffer) -> Self {
+ self.inner.src_buffer = src_buffer;
+ self
+ }
+ #[inline]
+ pub fn src_buffer_offset(mut self, src_buffer_offset: DeviceSize) -> Self {
+ self.inner.src_buffer_offset = src_buffer_offset;
+ self
+ }
+ #[inline]
+ pub fn src_buffer_range(mut self, src_buffer_range: DeviceSize) -> Self {
+ self.inner.src_buffer_range = src_buffer_range;
+ self
+ }
+ #[inline]
+ pub fn dst_picture_resource(
+ mut self,
+ dst_picture_resource: VideoPictureResourceInfoKHR,
+ ) -> Self {
+ self.inner.dst_picture_resource = dst_picture_resource;
+ self
+ }
+ #[inline]
+ pub fn setup_reference_slot(
+ mut self,
+ setup_reference_slot: &'a VideoReferenceSlotInfoKHR,
+ ) -> Self {
+ self.inner.p_setup_reference_slot = setup_reference_slot;
+ self
+ }
+ #[inline]
+ pub fn reference_slots(mut self, reference_slots: &'a [VideoReferenceSlotInfoKHR]) -> Self {
+ self.inner.reference_slot_count = reference_slots.len() as _;
+ self.inner.p_reference_slots = reference_slots.as_ptr();
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoDecodeInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264ProfileInfoKHR.html>"]
+pub struct VideoDecodeH264ProfileInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_profile_idc: StdVideoH264ProfileIdc,
+ pub picture_layout: VideoDecodeH264PictureLayoutFlagsKHR,
+}
+impl ::std::default::Default for VideoDecodeH264ProfileInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_profile_idc: StdVideoH264ProfileIdc::default(),
+ picture_layout: VideoDecodeH264PictureLayoutFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH264ProfileInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_PROFILE_INFO_KHR;
+}
+impl VideoDecodeH264ProfileInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH264ProfileInfoKHRBuilder<'a> {
+ VideoDecodeH264ProfileInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH264ProfileInfoKHRBuilder<'a> {
+ inner: VideoDecodeH264ProfileInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeH264ProfileInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeH264ProfileInfoKHR {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeH264ProfileInfoKHRBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeH264ProfileInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH264ProfileInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH264ProfileInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH264ProfileInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH264ProfileInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH264ProfileIdc) -> Self {
+ self.inner.std_profile_idc = std_profile_idc;
+ self
+ }
+ #[inline]
+ pub fn picture_layout(mut self, picture_layout: VideoDecodeH264PictureLayoutFlagsKHR) -> Self {
+ self.inner.picture_layout = picture_layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH264ProfileInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264CapabilitiesKHR.html>"]
+pub struct VideoDecodeH264CapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_level_idc: StdVideoH264LevelIdc,
+ pub field_offset_granularity: Offset2D,
+}
+impl ::std::default::Default for VideoDecodeH264CapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_level_idc: StdVideoH264LevelIdc::default(),
+ field_offset_granularity: Offset2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH264CapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_CAPABILITIES_KHR;
+}
+impl VideoDecodeH264CapabilitiesKHR {
+ pub fn builder<'a>() -> VideoDecodeH264CapabilitiesKHRBuilder<'a> {
+ VideoDecodeH264CapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH264CapabilitiesKHRBuilder<'a> {
+ inner: VideoDecodeH264CapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeH264CapabilitiesKHRBuilder<'_> {}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeH264CapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH264CapabilitiesKHRBuilder<'a> {
+ type Target = VideoDecodeH264CapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH264CapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH264CapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn max_level_idc(mut self, max_level_idc: StdVideoH264LevelIdc) -> Self {
+ self.inner.max_level_idc = max_level_idc;
+ self
+ }
+ #[inline]
+ pub fn field_offset_granularity(mut self, field_offset_granularity: Offset2D) -> Self {
+ self.inner.field_offset_granularity = field_offset_granularity;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH264CapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264SessionParametersAddInfoKHR.html>"]
+pub struct VideoDecodeH264SessionParametersAddInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_sps_count: u32,
+ pub p_std_sp_ss: *const StdVideoH264SequenceParameterSet,
+ pub std_pps_count: u32,
+ pub p_std_pp_ss: *const StdVideoH264PictureParameterSet,
+}
+impl ::std::default::Default for VideoDecodeH264SessionParametersAddInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_sps_count: u32::default(),
+ p_std_sp_ss: ::std::ptr::null(),
+ std_pps_count: u32::default(),
+ p_std_pp_ss: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH264SessionParametersAddInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR;
+}
+impl VideoDecodeH264SessionParametersAddInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH264SessionParametersAddInfoKHRBuilder<'a> {
+ VideoDecodeH264SessionParametersAddInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH264SessionParametersAddInfoKHRBuilder<'a> {
+ inner: VideoDecodeH264SessionParametersAddInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoDecodeH264SessionParametersAddInfoKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoDecodeH264SessionParametersAddInfoKHR
+{
+}
+impl<'a> ::std::ops::Deref for VideoDecodeH264SessionParametersAddInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH264SessionParametersAddInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH264SessionParametersAddInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH264SessionParametersAddInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH264SequenceParameterSet]) -> Self {
+ self.inner.std_sps_count = std_sp_ss.len() as _;
+ self.inner.p_std_sp_ss = std_sp_ss.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH264PictureParameterSet]) -> Self {
+ self.inner.std_pps_count = std_pp_ss.len() as _;
+ self.inner.p_std_pp_ss = std_pp_ss.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH264SessionParametersAddInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264SessionParametersCreateInfoKHR.html>"]
+pub struct VideoDecodeH264SessionParametersCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_std_sps_count: u32,
+ pub max_std_pps_count: u32,
+ pub p_parameters_add_info: *const VideoDecodeH264SessionParametersAddInfoKHR,
+}
+impl ::std::default::Default for VideoDecodeH264SessionParametersCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_std_sps_count: u32::default(),
+ max_std_pps_count: u32::default(),
+ p_parameters_add_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH264SessionParametersCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR;
+}
+impl VideoDecodeH264SessionParametersCreateInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH264SessionParametersCreateInfoKHRBuilder<'a> {
+ VideoDecodeH264SessionParametersCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH264SessionParametersCreateInfoKHRBuilder<'a> {
+ inner: VideoDecodeH264SessionParametersCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoDecodeH264SessionParametersCreateInfoKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoDecodeH264SessionParametersCreateInfoKHR
+{
+}
+impl<'a> ::std::ops::Deref for VideoDecodeH264SessionParametersCreateInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH264SessionParametersCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH264SessionParametersCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH264SessionParametersCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self {
+ self.inner.max_std_sps_count = max_std_sps_count;
+ self
+ }
+ #[inline]
+ pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self {
+ self.inner.max_std_pps_count = max_std_pps_count;
+ self
+ }
+ #[inline]
+ pub fn parameters_add_info(
+ mut self,
+ parameters_add_info: &'a VideoDecodeH264SessionParametersAddInfoKHR,
+ ) -> Self {
+ self.inner.p_parameters_add_info = parameters_add_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH264SessionParametersCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264PictureInfoKHR.html>"]
+pub struct VideoDecodeH264PictureInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_std_picture_info: *const StdVideoDecodeH264PictureInfo,
+ pub slice_count: u32,
+ pub p_slice_offsets: *const u32,
+}
+impl ::std::default::Default for VideoDecodeH264PictureInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_std_picture_info: ::std::ptr::null(),
+ slice_count: u32::default(),
+ p_slice_offsets: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH264PictureInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_PICTURE_INFO_KHR;
+}
+impl VideoDecodeH264PictureInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH264PictureInfoKHRBuilder<'a> {
+ VideoDecodeH264PictureInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH264PictureInfoKHRBuilder<'a> {
+ inner: VideoDecodeH264PictureInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeH264PictureInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeH264PictureInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH264PictureInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH264PictureInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH264PictureInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH264PictureInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_picture_info(mut self, std_picture_info: &'a StdVideoDecodeH264PictureInfo) -> Self {
+ self.inner.p_std_picture_info = std_picture_info;
+ self
+ }
+ #[inline]
+ pub fn slice_offsets(mut self, slice_offsets: &'a [u32]) -> Self {
+ self.inner.slice_count = slice_offsets.len() as _;
+ self.inner.p_slice_offsets = slice_offsets.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH264PictureInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH264DpbSlotInfoKHR.html>"]
+pub struct VideoDecodeH264DpbSlotInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_std_reference_info: *const StdVideoDecodeH264ReferenceInfo,
+}
+impl ::std::default::Default for VideoDecodeH264DpbSlotInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_std_reference_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH264DpbSlotInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR;
+}
+impl VideoDecodeH264DpbSlotInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH264DpbSlotInfoKHRBuilder<'a> {
+ VideoDecodeH264DpbSlotInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH264DpbSlotInfoKHRBuilder<'a> {
+ inner: VideoDecodeH264DpbSlotInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeH264DpbSlotInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeH264DpbSlotInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH264DpbSlotInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH264DpbSlotInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH264DpbSlotInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH264DpbSlotInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_reference_info(
+ mut self,
+ std_reference_info: &'a StdVideoDecodeH264ReferenceInfo,
+ ) -> Self {
+ self.inner.p_std_reference_info = std_reference_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH264DpbSlotInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH265ProfileInfoKHR.html>"]
+pub struct VideoDecodeH265ProfileInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_profile_idc: StdVideoH265ProfileIdc,
+}
+impl ::std::default::Default for VideoDecodeH265ProfileInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_profile_idc: StdVideoH265ProfileIdc::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH265ProfileInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_PROFILE_INFO_KHR;
+}
+impl VideoDecodeH265ProfileInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH265ProfileInfoKHRBuilder<'a> {
+ VideoDecodeH265ProfileInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH265ProfileInfoKHRBuilder<'a> {
+ inner: VideoDecodeH265ProfileInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeH265ProfileInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoDecodeH265ProfileInfoKHR {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeH265ProfileInfoKHRBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoDecodeH265ProfileInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH265ProfileInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH265ProfileInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH265ProfileInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH265ProfileInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH265ProfileIdc) -> Self {
+ self.inner.std_profile_idc = std_profile_idc;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH265ProfileInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH265CapabilitiesKHR.html>"]
+pub struct VideoDecodeH265CapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_level_idc: StdVideoH265LevelIdc,
+}
+impl ::std::default::Default for VideoDecodeH265CapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_level_idc: StdVideoH265LevelIdc::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH265CapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_CAPABILITIES_KHR;
+}
+impl VideoDecodeH265CapabilitiesKHR {
+ pub fn builder<'a>() -> VideoDecodeH265CapabilitiesKHRBuilder<'a> {
+ VideoDecodeH265CapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH265CapabilitiesKHRBuilder<'a> {
+ inner: VideoDecodeH265CapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeH265CapabilitiesKHRBuilder<'_> {}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoDecodeH265CapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH265CapabilitiesKHRBuilder<'a> {
+ type Target = VideoDecodeH265CapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH265CapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH265CapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn max_level_idc(mut self, max_level_idc: StdVideoH265LevelIdc) -> Self {
+ self.inner.max_level_idc = max_level_idc;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH265CapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH265SessionParametersAddInfoKHR.html>"]
+pub struct VideoDecodeH265SessionParametersAddInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_vps_count: u32,
+ pub p_std_vp_ss: *const StdVideoH265VideoParameterSet,
+ pub std_sps_count: u32,
+ pub p_std_sp_ss: *const StdVideoH265SequenceParameterSet,
+ pub std_pps_count: u32,
+ pub p_std_pp_ss: *const StdVideoH265PictureParameterSet,
+}
+impl ::std::default::Default for VideoDecodeH265SessionParametersAddInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_vps_count: u32::default(),
+ p_std_vp_ss: ::std::ptr::null(),
+ std_sps_count: u32::default(),
+ p_std_sp_ss: ::std::ptr::null(),
+ std_pps_count: u32::default(),
+ p_std_pp_ss: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH265SessionParametersAddInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR;
+}
+impl VideoDecodeH265SessionParametersAddInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH265SessionParametersAddInfoKHRBuilder<'a> {
+ VideoDecodeH265SessionParametersAddInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH265SessionParametersAddInfoKHRBuilder<'a> {
+ inner: VideoDecodeH265SessionParametersAddInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoDecodeH265SessionParametersAddInfoKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoDecodeH265SessionParametersAddInfoKHR
+{
+}
+impl<'a> ::std::ops::Deref for VideoDecodeH265SessionParametersAddInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH265SessionParametersAddInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH265SessionParametersAddInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH265SessionParametersAddInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_vp_ss(mut self, std_vp_ss: &'a [StdVideoH265VideoParameterSet]) -> Self {
+ self.inner.std_vps_count = std_vp_ss.len() as _;
+ self.inner.p_std_vp_ss = std_vp_ss.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH265SequenceParameterSet]) -> Self {
+ self.inner.std_sps_count = std_sp_ss.len() as _;
+ self.inner.p_std_sp_ss = std_sp_ss.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH265PictureParameterSet]) -> Self {
+ self.inner.std_pps_count = std_pp_ss.len() as _;
+ self.inner.p_std_pp_ss = std_pp_ss.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH265SessionParametersAddInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH265SessionParametersCreateInfoKHR.html>"]
+pub struct VideoDecodeH265SessionParametersCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_std_vps_count: u32,
+ pub max_std_sps_count: u32,
+ pub max_std_pps_count: u32,
+ pub p_parameters_add_info: *const VideoDecodeH265SessionParametersAddInfoKHR,
+}
+impl ::std::default::Default for VideoDecodeH265SessionParametersCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_std_vps_count: u32::default(),
+ max_std_sps_count: u32::default(),
+ max_std_pps_count: u32::default(),
+ p_parameters_add_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH265SessionParametersCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR;
+}
+impl VideoDecodeH265SessionParametersCreateInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH265SessionParametersCreateInfoKHRBuilder<'a> {
+ VideoDecodeH265SessionParametersCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH265SessionParametersCreateInfoKHRBuilder<'a> {
+ inner: VideoDecodeH265SessionParametersCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoDecodeH265SessionParametersCreateInfoKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoDecodeH265SessionParametersCreateInfoKHR
+{
+}
+impl<'a> ::std::ops::Deref for VideoDecodeH265SessionParametersCreateInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH265SessionParametersCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH265SessionParametersCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH265SessionParametersCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn max_std_vps_count(mut self, max_std_vps_count: u32) -> Self {
+ self.inner.max_std_vps_count = max_std_vps_count;
+ self
+ }
+ #[inline]
+ pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self {
+ self.inner.max_std_sps_count = max_std_sps_count;
+ self
+ }
+ #[inline]
+ pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self {
+ self.inner.max_std_pps_count = max_std_pps_count;
+ self
+ }
+ #[inline]
+ pub fn parameters_add_info(
+ mut self,
+ parameters_add_info: &'a VideoDecodeH265SessionParametersAddInfoKHR,
+ ) -> Self {
+ self.inner.p_parameters_add_info = parameters_add_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH265SessionParametersCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH265PictureInfoKHR.html>"]
+pub struct VideoDecodeH265PictureInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_std_picture_info: *mut StdVideoDecodeH265PictureInfo,
+ pub slice_segment_count: u32,
+ pub p_slice_segment_offsets: *const u32,
+}
+impl ::std::default::Default for VideoDecodeH265PictureInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_std_picture_info: ::std::ptr::null_mut(),
+ slice_segment_count: u32::default(),
+ p_slice_segment_offsets: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH265PictureInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_PICTURE_INFO_KHR;
+}
+impl VideoDecodeH265PictureInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH265PictureInfoKHRBuilder<'a> {
+ VideoDecodeH265PictureInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH265PictureInfoKHRBuilder<'a> {
+ inner: VideoDecodeH265PictureInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeH265PictureInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoDecodeInfoKHR for VideoDecodeH265PictureInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH265PictureInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH265PictureInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH265PictureInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH265PictureInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_picture_info(
+ mut self,
+ std_picture_info: &'a mut StdVideoDecodeH265PictureInfo,
+ ) -> Self {
+ self.inner.p_std_picture_info = std_picture_info;
+ self
+ }
+ #[inline]
+ pub fn slice_segment_offsets(mut self, slice_segment_offsets: &'a [u32]) -> Self {
+ self.inner.slice_segment_count = slice_segment_offsets.len() as _;
+ self.inner.p_slice_segment_offsets = slice_segment_offsets.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH265PictureInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoDecodeH265DpbSlotInfoKHR.html>"]
+pub struct VideoDecodeH265DpbSlotInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_std_reference_info: *const StdVideoDecodeH265ReferenceInfo,
+}
+impl ::std::default::Default for VideoDecodeH265DpbSlotInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_std_reference_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoDecodeH265DpbSlotInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR;
+}
+impl VideoDecodeH265DpbSlotInfoKHR {
+ pub fn builder<'a>() -> VideoDecodeH265DpbSlotInfoKHRBuilder<'a> {
+ VideoDecodeH265DpbSlotInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoDecodeH265DpbSlotInfoKHRBuilder<'a> {
+ inner: VideoDecodeH265DpbSlotInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeH265DpbSlotInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoReferenceSlotInfoKHR for VideoDecodeH265DpbSlotInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoDecodeH265DpbSlotInfoKHRBuilder<'a> {
+ type Target = VideoDecodeH265DpbSlotInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoDecodeH265DpbSlotInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoDecodeH265DpbSlotInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn std_reference_info(
+ mut self,
+ std_reference_info: &'a StdVideoDecodeH265ReferenceInfo,
+ ) -> Self {
+ self.inner.p_std_reference_info = std_reference_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoDecodeH265DpbSlotInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionCreateInfoKHR.html>"]
+pub struct VideoSessionCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub queue_family_index: u32,
+ pub flags: VideoSessionCreateFlagsKHR,
+ pub p_video_profile: *const VideoProfileInfoKHR,
+ pub picture_format: Format,
+ pub max_coded_extent: Extent2D,
+ pub reference_picture_format: Format,
+ pub max_dpb_slots: u32,
+ pub max_active_reference_pictures: u32,
+ pub p_std_header_version: *const ExtensionProperties,
+}
+impl ::std::default::Default for VideoSessionCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ queue_family_index: u32::default(),
+ flags: VideoSessionCreateFlagsKHR::default(),
+ p_video_profile: ::std::ptr::null(),
+ picture_format: Format::default(),
+ max_coded_extent: Extent2D::default(),
+ reference_picture_format: Format::default(),
+ max_dpb_slots: u32::default(),
+ max_active_reference_pictures: u32::default(),
+ p_std_header_version: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoSessionCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_CREATE_INFO_KHR;
+}
+impl VideoSessionCreateInfoKHR {
+ pub fn builder<'a>() -> VideoSessionCreateInfoKHRBuilder<'a> {
+ VideoSessionCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoSessionCreateInfoKHRBuilder<'a> {
+ inner: VideoSessionCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoSessionCreateInfoKHRBuilder<'a> {
+ type Target = VideoSessionCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoSessionCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoSessionCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn queue_family_index(mut self, queue_family_index: u32) -> Self {
+ self.inner.queue_family_index = queue_family_index;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: VideoSessionCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn video_profile(mut self, video_profile: &'a VideoProfileInfoKHR) -> Self {
+ self.inner.p_video_profile = video_profile;
+ self
+ }
+ #[inline]
+ pub fn picture_format(mut self, picture_format: Format) -> Self {
+ self.inner.picture_format = picture_format;
+ self
+ }
+ #[inline]
+ pub fn max_coded_extent(mut self, max_coded_extent: Extent2D) -> Self {
+ self.inner.max_coded_extent = max_coded_extent;
+ self
+ }
+ #[inline]
+ pub fn reference_picture_format(mut self, reference_picture_format: Format) -> Self {
+ self.inner.reference_picture_format = reference_picture_format;
+ self
+ }
+ #[inline]
+ pub fn max_dpb_slots(mut self, max_dpb_slots: u32) -> Self {
+ self.inner.max_dpb_slots = max_dpb_slots;
+ self
+ }
+ #[inline]
+ pub fn max_active_reference_pictures(mut self, max_active_reference_pictures: u32) -> Self {
+ self.inner.max_active_reference_pictures = max_active_reference_pictures;
+ self
+ }
+ #[inline]
+ pub fn std_header_version(mut self, std_header_version: &'a ExtensionProperties) -> Self {
+ self.inner.p_std_header_version = std_header_version;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoSessionCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionParametersCreateInfoKHR.html>"]
+pub struct VideoSessionParametersCreateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoSessionParametersCreateFlagsKHR,
+ pub video_session_parameters_template: VideoSessionParametersKHR,
+ pub video_session: VideoSessionKHR,
+}
+impl ::std::default::Default for VideoSessionParametersCreateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoSessionParametersCreateFlagsKHR::default(),
+ video_session_parameters_template: VideoSessionParametersKHR::default(),
+ video_session: VideoSessionKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoSessionParametersCreateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR;
+}
+impl VideoSessionParametersCreateInfoKHR {
+ pub fn builder<'a>() -> VideoSessionParametersCreateInfoKHRBuilder<'a> {
+ VideoSessionParametersCreateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoSessionParametersCreateInfoKHRBuilder<'a> {
+ inner: VideoSessionParametersCreateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoSessionParametersCreateInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoSessionParametersCreateInfoKHRBuilder<'a> {
+ type Target = VideoSessionParametersCreateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoSessionParametersCreateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoSessionParametersCreateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoSessionParametersCreateFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn video_session_parameters_template(
+ mut self,
+ video_session_parameters_template: VideoSessionParametersKHR,
+ ) -> Self {
+ self.inner.video_session_parameters_template = video_session_parameters_template;
+ self
+ }
+ #[inline]
+ pub fn video_session(mut self, video_session: VideoSessionKHR) -> Self {
+ self.inner.video_session = video_session;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoSessionParametersCreateInfoKHR>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoSessionParametersCreateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoSessionParametersUpdateInfoKHR.html>"]
+pub struct VideoSessionParametersUpdateInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub update_sequence_count: u32,
+}
+impl ::std::default::Default for VideoSessionParametersUpdateInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ update_sequence_count: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoSessionParametersUpdateInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR;
+}
+impl VideoSessionParametersUpdateInfoKHR {
+ pub fn builder<'a>() -> VideoSessionParametersUpdateInfoKHRBuilder<'a> {
+ VideoSessionParametersUpdateInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoSessionParametersUpdateInfoKHRBuilder<'a> {
+ inner: VideoSessionParametersUpdateInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoSessionParametersUpdateInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoSessionParametersUpdateInfoKHRBuilder<'a> {
+ type Target = VideoSessionParametersUpdateInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoSessionParametersUpdateInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoSessionParametersUpdateInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn update_sequence_count(mut self, update_sequence_count: u32) -> Self {
+ self.inner.update_sequence_count = update_sequence_count;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoSessionParametersUpdateInfoKHR>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoSessionParametersUpdateInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoBeginCodingInfoKHR.html>"]
+pub struct VideoBeginCodingInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoBeginCodingFlagsKHR,
+ pub video_session: VideoSessionKHR,
+ pub video_session_parameters: VideoSessionParametersKHR,
+ pub reference_slot_count: u32,
+ pub p_reference_slots: *const VideoReferenceSlotInfoKHR,
+}
+impl ::std::default::Default for VideoBeginCodingInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoBeginCodingFlagsKHR::default(),
+ video_session: VideoSessionKHR::default(),
+ video_session_parameters: VideoSessionParametersKHR::default(),
+ reference_slot_count: u32::default(),
+ p_reference_slots: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoBeginCodingInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_BEGIN_CODING_INFO_KHR;
+}
+impl VideoBeginCodingInfoKHR {
+ pub fn builder<'a>() -> VideoBeginCodingInfoKHRBuilder<'a> {
+ VideoBeginCodingInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoBeginCodingInfoKHRBuilder<'a> {
+ inner: VideoBeginCodingInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoBeginCodingInfoKHRBuilder<'a> {
+ type Target = VideoBeginCodingInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoBeginCodingInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoBeginCodingInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoBeginCodingFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn video_session(mut self, video_session: VideoSessionKHR) -> Self {
+ self.inner.video_session = video_session;
+ self
+ }
+ #[inline]
+ pub fn video_session_parameters(
+ mut self,
+ video_session_parameters: VideoSessionParametersKHR,
+ ) -> Self {
+ self.inner.video_session_parameters = video_session_parameters;
+ self
+ }
+ #[inline]
+ pub fn reference_slots(mut self, reference_slots: &'a [VideoReferenceSlotInfoKHR]) -> Self {
+ self.inner.reference_slot_count = reference_slots.len() as _;
+ self.inner.p_reference_slots = reference_slots.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoBeginCodingInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEndCodingInfoKHR.html>"]
+pub struct VideoEndCodingInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoEndCodingFlagsKHR,
+}
+impl ::std::default::Default for VideoEndCodingInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoEndCodingFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEndCodingInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_END_CODING_INFO_KHR;
+}
+impl VideoEndCodingInfoKHR {
+ pub fn builder<'a>() -> VideoEndCodingInfoKHRBuilder<'a> {
+ VideoEndCodingInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEndCodingInfoKHRBuilder<'a> {
+ inner: VideoEndCodingInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEndCodingInfoKHRBuilder<'a> {
+ type Target = VideoEndCodingInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEndCodingInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEndCodingInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoEndCodingFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEndCodingInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoCodingControlInfoKHR.html>"]
+pub struct VideoCodingControlInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoCodingControlFlagsKHR,
+}
+impl ::std::default::Default for VideoCodingControlInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoCodingControlFlagsKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoCodingControlInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_CODING_CONTROL_INFO_KHR;
+}
+impl VideoCodingControlInfoKHR {
+ pub fn builder<'a>() -> VideoCodingControlInfoKHRBuilder<'a> {
+ VideoCodingControlInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoCodingControlInfoKHRBuilder<'a> {
+ inner: VideoCodingControlInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoCodingControlInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoCodingControlInfoKHRBuilder<'a> {
+ type Target = VideoCodingControlInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoCodingControlInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoCodingControlInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoCodingControlFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoCodingControlInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoCodingControlInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeUsageInfoKHR.html>"]
+pub struct VideoEncodeUsageInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub video_usage_hints: VideoEncodeUsageFlagsKHR,
+ pub video_content_hints: VideoEncodeContentFlagsKHR,
+ pub tuning_mode: VideoEncodeTuningModeKHR,
+}
+impl ::std::default::Default for VideoEncodeUsageInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ video_usage_hints: VideoEncodeUsageFlagsKHR::default(),
+ video_content_hints: VideoEncodeContentFlagsKHR::default(),
+ tuning_mode: VideoEncodeTuningModeKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeUsageInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_USAGE_INFO_KHR;
+}
+impl VideoEncodeUsageInfoKHR {
+ pub fn builder<'a>() -> VideoEncodeUsageInfoKHRBuilder<'a> {
+ VideoEncodeUsageInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeUsageInfoKHRBuilder<'a> {
+ inner: VideoEncodeUsageInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeUsageInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeUsageInfoKHR {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeUsageInfoKHRBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeUsageInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoEncodeUsageInfoKHRBuilder<'a> {
+ type Target = VideoEncodeUsageInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeUsageInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeUsageInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn video_usage_hints(mut self, video_usage_hints: VideoEncodeUsageFlagsKHR) -> Self {
+ self.inner.video_usage_hints = video_usage_hints;
+ self
+ }
+ #[inline]
+ pub fn video_content_hints(mut self, video_content_hints: VideoEncodeContentFlagsKHR) -> Self {
+ self.inner.video_content_hints = video_content_hints;
+ self
+ }
+ #[inline]
+ pub fn tuning_mode(mut self, tuning_mode: VideoEncodeTuningModeKHR) -> Self {
+ self.inner.tuning_mode = tuning_mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeUsageInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeInfoKHR.html>"]
+pub struct VideoEncodeInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoEncodeFlagsKHR,
+ pub quality_level: u32,
+ pub dst_bitstream_buffer: Buffer,
+ pub dst_bitstream_buffer_offset: DeviceSize,
+ pub dst_bitstream_buffer_max_range: DeviceSize,
+ pub src_picture_resource: VideoPictureResourceInfoKHR,
+ pub p_setup_reference_slot: *const VideoReferenceSlotInfoKHR,
+ pub reference_slot_count: u32,
+ pub p_reference_slots: *const VideoReferenceSlotInfoKHR,
+ pub preceding_externally_encoded_bytes: u32,
+}
+impl ::std::default::Default for VideoEncodeInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoEncodeFlagsKHR::default(),
+ quality_level: u32::default(),
+ dst_bitstream_buffer: Buffer::default(),
+ dst_bitstream_buffer_offset: DeviceSize::default(),
+ dst_bitstream_buffer_max_range: DeviceSize::default(),
+ src_picture_resource: VideoPictureResourceInfoKHR::default(),
+ p_setup_reference_slot: ::std::ptr::null(),
+ reference_slot_count: u32::default(),
+ p_reference_slots: ::std::ptr::null(),
+ preceding_externally_encoded_bytes: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_INFO_KHR;
+}
+impl VideoEncodeInfoKHR {
+ pub fn builder<'a>() -> VideoEncodeInfoKHRBuilder<'a> {
+ VideoEncodeInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeInfoKHRBuilder<'a> {
+ inner: VideoEncodeInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsVideoEncodeInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoEncodeInfoKHRBuilder<'a> {
+ type Target = VideoEncodeInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoEncodeFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn quality_level(mut self, quality_level: u32) -> Self {
+ self.inner.quality_level = quality_level;
+ self
+ }
+ #[inline]
+ pub fn dst_bitstream_buffer(mut self, dst_bitstream_buffer: Buffer) -> Self {
+ self.inner.dst_bitstream_buffer = dst_bitstream_buffer;
+ self
+ }
+ #[inline]
+ pub fn dst_bitstream_buffer_offset(mut self, dst_bitstream_buffer_offset: DeviceSize) -> Self {
+ self.inner.dst_bitstream_buffer_offset = dst_bitstream_buffer_offset;
+ self
+ }
+ #[inline]
+ pub fn dst_bitstream_buffer_max_range(
+ mut self,
+ dst_bitstream_buffer_max_range: DeviceSize,
+ ) -> Self {
+ self.inner.dst_bitstream_buffer_max_range = dst_bitstream_buffer_max_range;
+ self
+ }
+ #[inline]
+ pub fn src_picture_resource(
+ mut self,
+ src_picture_resource: VideoPictureResourceInfoKHR,
+ ) -> Self {
+ self.inner.src_picture_resource = src_picture_resource;
+ self
+ }
+ #[inline]
+ pub fn setup_reference_slot(
+ mut self,
+ setup_reference_slot: &'a VideoReferenceSlotInfoKHR,
+ ) -> Self {
+ self.inner.p_setup_reference_slot = setup_reference_slot;
+ self
+ }
+ #[inline]
+ pub fn reference_slots(mut self, reference_slots: &'a [VideoReferenceSlotInfoKHR]) -> Self {
+ self.inner.reference_slot_count = reference_slots.len() as _;
+ self.inner.p_reference_slots = reference_slots.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn preceding_externally_encoded_bytes(
+ mut self,
+ preceding_externally_encoded_bytes: u32,
+ ) -> Self {
+ self.inner.preceding_externally_encoded_bytes = preceding_externally_encoded_bytes;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoEncodeInfoKHR>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeRateControlInfoKHR.html>"]
+pub struct VideoEncodeRateControlInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: VideoEncodeRateControlFlagsKHR,
+ pub rate_control_mode: VideoEncodeRateControlModeFlagsKHR,
+ pub layer_count: u8,
+ pub p_layer_configs: *const VideoEncodeRateControlLayerInfoKHR,
+}
+impl ::std::default::Default for VideoEncodeRateControlInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: VideoEncodeRateControlFlagsKHR::default(),
+ rate_control_mode: VideoEncodeRateControlModeFlagsKHR::default(),
+ layer_count: u8::default(),
+ p_layer_configs: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeRateControlInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_RATE_CONTROL_INFO_KHR;
+}
+impl VideoEncodeRateControlInfoKHR {
+ pub fn builder<'a>() -> VideoEncodeRateControlInfoKHRBuilder<'a> {
+ VideoEncodeRateControlInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeRateControlInfoKHRBuilder<'a> {
+ inner: VideoEncodeRateControlInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeRateControlInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeRateControlInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoEncodeRateControlInfoKHRBuilder<'a> {
+ type Target = VideoEncodeRateControlInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeRateControlInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeRateControlInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoEncodeRateControlFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn rate_control_mode(
+ mut self,
+ rate_control_mode: VideoEncodeRateControlModeFlagsKHR,
+ ) -> Self {
+ self.inner.rate_control_mode = rate_control_mode;
+ self
+ }
+ #[inline]
+ pub fn layer_configs(
+ mut self,
+ layer_configs: &'a [VideoEncodeRateControlLayerInfoKHR],
+ ) -> Self {
+ self.inner.layer_count = layer_configs.len() as _;
+ self.inner.p_layer_configs = layer_configs.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeRateControlInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeRateControlLayerInfoKHR.html>"]
+pub struct VideoEncodeRateControlLayerInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub average_bitrate: u32,
+ pub max_bitrate: u32,
+ pub frame_rate_numerator: u32,
+ pub frame_rate_denominator: u32,
+ pub virtual_buffer_size_in_ms: u32,
+ pub initial_virtual_buffer_size_in_ms: u32,
+}
+impl ::std::default::Default for VideoEncodeRateControlLayerInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ average_bitrate: u32::default(),
+ max_bitrate: u32::default(),
+ frame_rate_numerator: u32::default(),
+ frame_rate_denominator: u32::default(),
+ virtual_buffer_size_in_ms: u32::default(),
+ initial_virtual_buffer_size_in_ms: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeRateControlLayerInfoKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR;
+}
+impl VideoEncodeRateControlLayerInfoKHR {
+ pub fn builder<'a>() -> VideoEncodeRateControlLayerInfoKHRBuilder<'a> {
+ VideoEncodeRateControlLayerInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeRateControlLayerInfoKHRBuilder<'a> {
+ inner: VideoEncodeRateControlLayerInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeRateControlLayerInfoKHRBuilder<'_> {}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeRateControlLayerInfoKHR {}
+pub unsafe trait ExtendsVideoEncodeRateControlLayerInfoKHR {}
+impl<'a> ::std::ops::Deref for VideoEncodeRateControlLayerInfoKHRBuilder<'a> {
+ type Target = VideoEncodeRateControlLayerInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeRateControlLayerInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeRateControlLayerInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn average_bitrate(mut self, average_bitrate: u32) -> Self {
+ self.inner.average_bitrate = average_bitrate;
+ self
+ }
+ #[inline]
+ pub fn max_bitrate(mut self, max_bitrate: u32) -> Self {
+ self.inner.max_bitrate = max_bitrate;
+ self
+ }
+ #[inline]
+ pub fn frame_rate_numerator(mut self, frame_rate_numerator: u32) -> Self {
+ self.inner.frame_rate_numerator = frame_rate_numerator;
+ self
+ }
+ #[inline]
+ pub fn frame_rate_denominator(mut self, frame_rate_denominator: u32) -> Self {
+ self.inner.frame_rate_denominator = frame_rate_denominator;
+ self
+ }
+ #[inline]
+ pub fn virtual_buffer_size_in_ms(mut self, virtual_buffer_size_in_ms: u32) -> Self {
+ self.inner.virtual_buffer_size_in_ms = virtual_buffer_size_in_ms;
+ self
+ }
+ #[inline]
+ pub fn initial_virtual_buffer_size_in_ms(
+ mut self,
+ initial_virtual_buffer_size_in_ms: u32,
+ ) -> Self {
+ self.inner.initial_virtual_buffer_size_in_ms = initial_virtual_buffer_size_in_ms;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsVideoEncodeRateControlLayerInfoKHR>(
+ mut self,
+ next: &'a mut T,
+ ) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeRateControlLayerInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeCapabilitiesKHR.html>"]
+pub struct VideoEncodeCapabilitiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: VideoEncodeCapabilityFlagsKHR,
+ pub rate_control_modes: VideoEncodeRateControlModeFlagsKHR,
+ pub rate_control_layer_count: u8,
+ pub quality_level_count: u8,
+ pub input_image_data_fill_alignment: Extent2D,
+}
+impl ::std::default::Default for VideoEncodeCapabilitiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: VideoEncodeCapabilityFlagsKHR::default(),
+ rate_control_modes: VideoEncodeRateControlModeFlagsKHR::default(),
+ rate_control_layer_count: u8::default(),
+ quality_level_count: u8::default(),
+ input_image_data_fill_alignment: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeCapabilitiesKHR {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_CAPABILITIES_KHR;
+}
+impl VideoEncodeCapabilitiesKHR {
+ pub fn builder<'a>() -> VideoEncodeCapabilitiesKHRBuilder<'a> {
+ VideoEncodeCapabilitiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeCapabilitiesKHRBuilder<'a> {
+ inner: VideoEncodeCapabilitiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeCapabilitiesKHRBuilder<'_> {}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeCapabilitiesKHR {}
+impl<'a> ::std::ops::Deref for VideoEncodeCapabilitiesKHRBuilder<'a> {
+ type Target = VideoEncodeCapabilitiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeCapabilitiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeCapabilitiesKHRBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoEncodeCapabilityFlagsKHR) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn rate_control_modes(
+ mut self,
+ rate_control_modes: VideoEncodeRateControlModeFlagsKHR,
+ ) -> Self {
+ self.inner.rate_control_modes = rate_control_modes;
+ self
+ }
+ #[inline]
+ pub fn rate_control_layer_count(mut self, rate_control_layer_count: u8) -> Self {
+ self.inner.rate_control_layer_count = rate_control_layer_count;
+ self
+ }
+ #[inline]
+ pub fn quality_level_count(mut self, quality_level_count: u8) -> Self {
+ self.inner.quality_level_count = quality_level_count;
+ self
+ }
+ #[inline]
+ pub fn input_image_data_fill_alignment(
+ mut self,
+ input_image_data_fill_alignment: Extent2D,
+ ) -> Self {
+ self.inner.input_image_data_fill_alignment = input_image_data_fill_alignment;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeCapabilitiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264CapabilitiesEXT.html>"]
+pub struct VideoEncodeH264CapabilitiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: VideoEncodeH264CapabilityFlagsEXT,
+ pub input_mode_flags: VideoEncodeH264InputModeFlagsEXT,
+ pub output_mode_flags: VideoEncodeH264OutputModeFlagsEXT,
+ pub max_p_picture_l0_reference_count: u8,
+ pub max_b_picture_l0_reference_count: u8,
+ pub max_l1_reference_count: u8,
+ pub motion_vectors_over_pic_boundaries_flag: Bool32,
+ pub max_bytes_per_pic_denom: u32,
+ pub max_bits_per_mb_denom: u32,
+ pub log2_max_mv_length_horizontal: u32,
+ pub log2_max_mv_length_vertical: u32,
+}
+impl ::std::default::Default for VideoEncodeH264CapabilitiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: VideoEncodeH264CapabilityFlagsEXT::default(),
+ input_mode_flags: VideoEncodeH264InputModeFlagsEXT::default(),
+ output_mode_flags: VideoEncodeH264OutputModeFlagsEXT::default(),
+ max_p_picture_l0_reference_count: u8::default(),
+ max_b_picture_l0_reference_count: u8::default(),
+ max_l1_reference_count: u8::default(),
+ motion_vectors_over_pic_boundaries_flag: Bool32::default(),
+ max_bytes_per_pic_denom: u32::default(),
+ max_bits_per_mb_denom: u32::default(),
+ log2_max_mv_length_horizontal: u32::default(),
+ log2_max_mv_length_vertical: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264CapabilitiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_CAPABILITIES_EXT;
+}
+impl VideoEncodeH264CapabilitiesEXT {
+ pub fn builder<'a>() -> VideoEncodeH264CapabilitiesEXTBuilder<'a> {
+ VideoEncodeH264CapabilitiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264CapabilitiesEXTBuilder<'a> {
+ inner: VideoEncodeH264CapabilitiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeH264CapabilitiesEXTBuilder<'_> {}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeH264CapabilitiesEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH264CapabilitiesEXTBuilder<'a> {
+ type Target = VideoEncodeH264CapabilitiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264CapabilitiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264CapabilitiesEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoEncodeH264CapabilityFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn input_mode_flags(mut self, input_mode_flags: VideoEncodeH264InputModeFlagsEXT) -> Self {
+ self.inner.input_mode_flags = input_mode_flags;
+ self
+ }
+ #[inline]
+ pub fn output_mode_flags(
+ mut self,
+ output_mode_flags: VideoEncodeH264OutputModeFlagsEXT,
+ ) -> Self {
+ self.inner.output_mode_flags = output_mode_flags;
+ self
+ }
+ #[inline]
+ pub fn max_p_picture_l0_reference_count(
+ mut self,
+ max_p_picture_l0_reference_count: u8,
+ ) -> Self {
+ self.inner.max_p_picture_l0_reference_count = max_p_picture_l0_reference_count;
+ self
+ }
+ #[inline]
+ pub fn max_b_picture_l0_reference_count(
+ mut self,
+ max_b_picture_l0_reference_count: u8,
+ ) -> Self {
+ self.inner.max_b_picture_l0_reference_count = max_b_picture_l0_reference_count;
+ self
+ }
+ #[inline]
+ pub fn max_l1_reference_count(mut self, max_l1_reference_count: u8) -> Self {
+ self.inner.max_l1_reference_count = max_l1_reference_count;
+ self
+ }
+ #[inline]
+ pub fn motion_vectors_over_pic_boundaries_flag(
+ mut self,
+ motion_vectors_over_pic_boundaries_flag: bool,
+ ) -> Self {
+ self.inner.motion_vectors_over_pic_boundaries_flag =
+ motion_vectors_over_pic_boundaries_flag.into();
+ self
+ }
+ #[inline]
+ pub fn max_bytes_per_pic_denom(mut self, max_bytes_per_pic_denom: u32) -> Self {
+ self.inner.max_bytes_per_pic_denom = max_bytes_per_pic_denom;
+ self
+ }
+ #[inline]
+ pub fn max_bits_per_mb_denom(mut self, max_bits_per_mb_denom: u32) -> Self {
+ self.inner.max_bits_per_mb_denom = max_bits_per_mb_denom;
+ self
+ }
+ #[inline]
+ pub fn log2_max_mv_length_horizontal(mut self, log2_max_mv_length_horizontal: u32) -> Self {
+ self.inner.log2_max_mv_length_horizontal = log2_max_mv_length_horizontal;
+ self
+ }
+ #[inline]
+ pub fn log2_max_mv_length_vertical(mut self, log2_max_mv_length_vertical: u32) -> Self {
+ self.inner.log2_max_mv_length_vertical = log2_max_mv_length_vertical;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264CapabilitiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264SessionParametersAddInfoEXT.html>"]
+pub struct VideoEncodeH264SessionParametersAddInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_sps_count: u32,
+ pub p_std_sp_ss: *const StdVideoH264SequenceParameterSet,
+ pub std_pps_count: u32,
+ pub p_std_pp_ss: *const StdVideoH264PictureParameterSet,
+}
+impl ::std::default::Default for VideoEncodeH264SessionParametersAddInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_sps_count: u32::default(),
+ p_std_sp_ss: ::std::ptr::null(),
+ std_pps_count: u32::default(),
+ p_std_pp_ss: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264SessionParametersAddInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT;
+}
+impl VideoEncodeH264SessionParametersAddInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264SessionParametersAddInfoEXTBuilder<'a> {
+ VideoEncodeH264SessionParametersAddInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264SessionParametersAddInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264SessionParametersAddInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoEncodeH264SessionParametersAddInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoEncodeH264SessionParametersAddInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264SessionParametersAddInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264SessionParametersAddInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264SessionParametersAddInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264SessionParametersAddInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH264SequenceParameterSet]) -> Self {
+ self.inner.std_sps_count = std_sp_ss.len() as _;
+ self.inner.p_std_sp_ss = std_sp_ss.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH264PictureParameterSet]) -> Self {
+ self.inner.std_pps_count = std_pp_ss.len() as _;
+ self.inner.p_std_pp_ss = std_pp_ss.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264SessionParametersAddInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264SessionParametersCreateInfoEXT.html>"]
+pub struct VideoEncodeH264SessionParametersCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_std_sps_count: u32,
+ pub max_std_pps_count: u32,
+ pub p_parameters_add_info: *const VideoEncodeH264SessionParametersAddInfoEXT,
+}
+impl ::std::default::Default for VideoEncodeH264SessionParametersCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_std_sps_count: u32::default(),
+ max_std_pps_count: u32::default(),
+ p_parameters_add_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264SessionParametersCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT;
+}
+impl VideoEncodeH264SessionParametersCreateInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264SessionParametersCreateInfoEXTBuilder<'a> {
+ VideoEncodeH264SessionParametersCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264SessionParametersCreateInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264SessionParametersCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoEncodeH264SessionParametersCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoEncodeH264SessionParametersCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264SessionParametersCreateInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264SessionParametersCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264SessionParametersCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264SessionParametersCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self {
+ self.inner.max_std_sps_count = max_std_sps_count;
+ self
+ }
+ #[inline]
+ pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self {
+ self.inner.max_std_pps_count = max_std_pps_count;
+ self
+ }
+ #[inline]
+ pub fn parameters_add_info(
+ mut self,
+ parameters_add_info: &'a VideoEncodeH264SessionParametersAddInfoEXT,
+ ) -> Self {
+ self.inner.p_parameters_add_info = parameters_add_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264SessionParametersCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264DpbSlotInfoEXT.html>"]
+pub struct VideoEncodeH264DpbSlotInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub slot_index: i8,
+ pub p_std_reference_info: *const StdVideoEncodeH264ReferenceInfo,
+}
+impl ::std::default::Default for VideoEncodeH264DpbSlotInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ slot_index: i8::default(),
+ p_std_reference_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264DpbSlotInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT;
+}
+impl VideoEncodeH264DpbSlotInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264DpbSlotInfoEXTBuilder<'a> {
+ VideoEncodeH264DpbSlotInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264DpbSlotInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264DpbSlotInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264DpbSlotInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264DpbSlotInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264DpbSlotInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264DpbSlotInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn slot_index(mut self, slot_index: i8) -> Self {
+ self.inner.slot_index = slot_index;
+ self
+ }
+ #[inline]
+ pub fn std_reference_info(
+ mut self,
+ std_reference_info: &'a StdVideoEncodeH264ReferenceInfo,
+ ) -> Self {
+ self.inner.p_std_reference_info = std_reference_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264DpbSlotInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264VclFrameInfoEXT.html>"]
+pub struct VideoEncodeH264VclFrameInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_reference_final_lists: *const VideoEncodeH264ReferenceListsInfoEXT,
+ pub nalu_slice_entry_count: u32,
+ pub p_nalu_slice_entries: *const VideoEncodeH264NaluSliceInfoEXT,
+ pub p_current_picture_info: *const StdVideoEncodeH264PictureInfo,
+}
+impl ::std::default::Default for VideoEncodeH264VclFrameInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_reference_final_lists: ::std::ptr::null(),
+ nalu_slice_entry_count: u32::default(),
+ p_nalu_slice_entries: ::std::ptr::null(),
+ p_current_picture_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264VclFrameInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT;
+}
+impl VideoEncodeH264VclFrameInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264VclFrameInfoEXTBuilder<'a> {
+ VideoEncodeH264VclFrameInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264VclFrameInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264VclFrameInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH264VclFrameInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH264VclFrameInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH264VclFrameInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264VclFrameInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264VclFrameInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264VclFrameInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn reference_final_lists(
+ mut self,
+ reference_final_lists: &'a VideoEncodeH264ReferenceListsInfoEXT,
+ ) -> Self {
+ self.inner.p_reference_final_lists = reference_final_lists;
+ self
+ }
+ #[inline]
+ pub fn nalu_slice_entries(
+ mut self,
+ nalu_slice_entries: &'a [VideoEncodeH264NaluSliceInfoEXT],
+ ) -> Self {
+ self.inner.nalu_slice_entry_count = nalu_slice_entries.len() as _;
+ self.inner.p_nalu_slice_entries = nalu_slice_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn current_picture_info(
+ mut self,
+ current_picture_info: &'a StdVideoEncodeH264PictureInfo,
+ ) -> Self {
+ self.inner.p_current_picture_info = current_picture_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264VclFrameInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264ReferenceListsInfoEXT.html>"]
+pub struct VideoEncodeH264ReferenceListsInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub reference_list0_entry_count: u8,
+ pub p_reference_list0_entries: *const VideoEncodeH264DpbSlotInfoEXT,
+ pub reference_list1_entry_count: u8,
+ pub p_reference_list1_entries: *const VideoEncodeH264DpbSlotInfoEXT,
+ pub p_mem_mgmt_ctrl_operations: *const StdVideoEncodeH264RefMemMgmtCtrlOperations,
+}
+impl ::std::default::Default for VideoEncodeH264ReferenceListsInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ reference_list0_entry_count: u8::default(),
+ p_reference_list0_entries: ::std::ptr::null(),
+ reference_list1_entry_count: u8::default(),
+ p_reference_list1_entries: ::std::ptr::null(),
+ p_mem_mgmt_ctrl_operations: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264ReferenceListsInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT;
+}
+impl VideoEncodeH264ReferenceListsInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264ReferenceListsInfoEXTBuilder<'a> {
+ VideoEncodeH264ReferenceListsInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264ReferenceListsInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264ReferenceListsInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264ReferenceListsInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264ReferenceListsInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264ReferenceListsInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264ReferenceListsInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn reference_list0_entries(
+ mut self,
+ reference_list0_entries: &'a [VideoEncodeH264DpbSlotInfoEXT],
+ ) -> Self {
+ self.inner.reference_list0_entry_count = reference_list0_entries.len() as _;
+ self.inner.p_reference_list0_entries = reference_list0_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn reference_list1_entries(
+ mut self,
+ reference_list1_entries: &'a [VideoEncodeH264DpbSlotInfoEXT],
+ ) -> Self {
+ self.inner.reference_list1_entry_count = reference_list1_entries.len() as _;
+ self.inner.p_reference_list1_entries = reference_list1_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn mem_mgmt_ctrl_operations(
+ mut self,
+ mem_mgmt_ctrl_operations: &'a StdVideoEncodeH264RefMemMgmtCtrlOperations,
+ ) -> Self {
+ self.inner.p_mem_mgmt_ctrl_operations = mem_mgmt_ctrl_operations;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264ReferenceListsInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264EmitPictureParametersInfoEXT.html>"]
+pub struct VideoEncodeH264EmitPictureParametersInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub sps_id: u8,
+ pub emit_sps_enable: Bool32,
+ pub pps_id_entry_count: u32,
+ pub pps_id_entries: *const u8,
+}
+impl ::std::default::Default for VideoEncodeH264EmitPictureParametersInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ sps_id: u8::default(),
+ emit_sps_enable: Bool32::default(),
+ pps_id_entry_count: u32::default(),
+ pps_id_entries: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264EmitPictureParametersInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT;
+}
+impl VideoEncodeH264EmitPictureParametersInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264EmitPictureParametersInfoEXTBuilder<'a> {
+ VideoEncodeH264EmitPictureParametersInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264EmitPictureParametersInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264EmitPictureParametersInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH264EmitPictureParametersInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH264EmitPictureParametersInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH264EmitPictureParametersInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264EmitPictureParametersInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264EmitPictureParametersInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264EmitPictureParametersInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn sps_id(mut self, sps_id: u8) -> Self {
+ self.inner.sps_id = sps_id;
+ self
+ }
+ #[inline]
+ pub fn emit_sps_enable(mut self, emit_sps_enable: bool) -> Self {
+ self.inner.emit_sps_enable = emit_sps_enable.into();
+ self
+ }
+ #[inline]
+ pub fn pps_id_entries(mut self, pps_id_entries: &'a [u8]) -> Self {
+ self.inner.pps_id_entry_count = pps_id_entries.len() as _;
+ self.inner.pps_id_entries = pps_id_entries.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264EmitPictureParametersInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264ProfileInfoEXT.html>"]
+pub struct VideoEncodeH264ProfileInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_profile_idc: StdVideoH264ProfileIdc,
+}
+impl ::std::default::Default for VideoEncodeH264ProfileInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_profile_idc: StdVideoH264ProfileIdc::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264ProfileInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_PROFILE_INFO_EXT;
+}
+impl VideoEncodeH264ProfileInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264ProfileInfoEXTBuilder<'a> {
+ VideoEncodeH264ProfileInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264ProfileInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264ProfileInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeH264ProfileInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeH264ProfileInfoEXT {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeH264ProfileInfoEXTBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeH264ProfileInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH264ProfileInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264ProfileInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264ProfileInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264ProfileInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH264ProfileIdc) -> Self {
+ self.inner.std_profile_idc = std_profile_idc;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264ProfileInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264NaluSliceInfoEXT.html>"]
+pub struct VideoEncodeH264NaluSliceInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub mb_count: u32,
+ pub p_reference_final_lists: *const VideoEncodeH264ReferenceListsInfoEXT,
+ pub p_slice_header_std: *const StdVideoEncodeH264SliceHeader,
+}
+impl ::std::default::Default for VideoEncodeH264NaluSliceInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ mb_count: u32::default(),
+ p_reference_final_lists: ::std::ptr::null(),
+ p_slice_header_std: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264NaluSliceInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT;
+}
+impl VideoEncodeH264NaluSliceInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264NaluSliceInfoEXTBuilder<'a> {
+ VideoEncodeH264NaluSliceInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264NaluSliceInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264NaluSliceInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264NaluSliceInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264NaluSliceInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264NaluSliceInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264NaluSliceInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn mb_count(mut self, mb_count: u32) -> Self {
+ self.inner.mb_count = mb_count;
+ self
+ }
+ #[inline]
+ pub fn reference_final_lists(
+ mut self,
+ reference_final_lists: &'a VideoEncodeH264ReferenceListsInfoEXT,
+ ) -> Self {
+ self.inner.p_reference_final_lists = reference_final_lists;
+ self
+ }
+ #[inline]
+ pub fn slice_header_std(mut self, slice_header_std: &'a StdVideoEncodeH264SliceHeader) -> Self {
+ self.inner.p_slice_header_std = slice_header_std;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264NaluSliceInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264RateControlInfoEXT.html>"]
+pub struct VideoEncodeH264RateControlInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub gop_frame_count: u32,
+ pub idr_period: u32,
+ pub consecutive_b_frame_count: u32,
+ pub rate_control_structure: VideoEncodeH264RateControlStructureEXT,
+ pub temporal_layer_count: u8,
+}
+impl ::std::default::Default for VideoEncodeH264RateControlInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ gop_frame_count: u32::default(),
+ idr_period: u32::default(),
+ consecutive_b_frame_count: u32::default(),
+ rate_control_structure: VideoEncodeH264RateControlStructureEXT::default(),
+ temporal_layer_count: u8::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264RateControlInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT;
+}
+impl VideoEncodeH264RateControlInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264RateControlInfoEXTBuilder<'a> {
+ VideoEncodeH264RateControlInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264RateControlInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264RateControlInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH264RateControlInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH264RateControlInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH264RateControlInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264RateControlInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264RateControlInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264RateControlInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn gop_frame_count(mut self, gop_frame_count: u32) -> Self {
+ self.inner.gop_frame_count = gop_frame_count;
+ self
+ }
+ #[inline]
+ pub fn idr_period(mut self, idr_period: u32) -> Self {
+ self.inner.idr_period = idr_period;
+ self
+ }
+ #[inline]
+ pub fn consecutive_b_frame_count(mut self, consecutive_b_frame_count: u32) -> Self {
+ self.inner.consecutive_b_frame_count = consecutive_b_frame_count;
+ self
+ }
+ #[inline]
+ pub fn rate_control_structure(
+ mut self,
+ rate_control_structure: VideoEncodeH264RateControlStructureEXT,
+ ) -> Self {
+ self.inner.rate_control_structure = rate_control_structure;
+ self
+ }
+ #[inline]
+ pub fn temporal_layer_count(mut self, temporal_layer_count: u8) -> Self {
+ self.inner.temporal_layer_count = temporal_layer_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264RateControlInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264QpEXT.html>"]
+pub struct VideoEncodeH264QpEXT {
+ pub qp_i: i32,
+ pub qp_p: i32,
+ pub qp_b: i32,
+}
+impl VideoEncodeH264QpEXT {
+ pub fn builder<'a>() -> VideoEncodeH264QpEXTBuilder<'a> {
+ VideoEncodeH264QpEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264QpEXTBuilder<'a> {
+ inner: VideoEncodeH264QpEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264QpEXTBuilder<'a> {
+ type Target = VideoEncodeH264QpEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264QpEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264QpEXTBuilder<'a> {
+ #[inline]
+ pub fn qp_i(mut self, qp_i: i32) -> Self {
+ self.inner.qp_i = qp_i;
+ self
+ }
+ #[inline]
+ pub fn qp_p(mut self, qp_p: i32) -> Self {
+ self.inner.qp_p = qp_p;
+ self
+ }
+ #[inline]
+ pub fn qp_b(mut self, qp_b: i32) -> Self {
+ self.inner.qp_b = qp_b;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264QpEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264FrameSizeEXT.html>"]
+pub struct VideoEncodeH264FrameSizeEXT {
+ pub frame_i_size: u32,
+ pub frame_p_size: u32,
+ pub frame_b_size: u32,
+}
+impl VideoEncodeH264FrameSizeEXT {
+ pub fn builder<'a>() -> VideoEncodeH264FrameSizeEXTBuilder<'a> {
+ VideoEncodeH264FrameSizeEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264FrameSizeEXTBuilder<'a> {
+ inner: VideoEncodeH264FrameSizeEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH264FrameSizeEXTBuilder<'a> {
+ type Target = VideoEncodeH264FrameSizeEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264FrameSizeEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264FrameSizeEXTBuilder<'a> {
+ #[inline]
+ pub fn frame_i_size(mut self, frame_i_size: u32) -> Self {
+ self.inner.frame_i_size = frame_i_size;
+ self
+ }
+ #[inline]
+ pub fn frame_p_size(mut self, frame_p_size: u32) -> Self {
+ self.inner.frame_p_size = frame_p_size;
+ self
+ }
+ #[inline]
+ pub fn frame_b_size(mut self, frame_b_size: u32) -> Self {
+ self.inner.frame_b_size = frame_b_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264FrameSizeEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264RateControlLayerInfoEXT.html>"]
+pub struct VideoEncodeH264RateControlLayerInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub temporal_layer_id: u8,
+ pub use_initial_rc_qp: Bool32,
+ pub initial_rc_qp: VideoEncodeH264QpEXT,
+ pub use_min_qp: Bool32,
+ pub min_qp: VideoEncodeH264QpEXT,
+ pub use_max_qp: Bool32,
+ pub max_qp: VideoEncodeH264QpEXT,
+ pub use_max_frame_size: Bool32,
+ pub max_frame_size: VideoEncodeH264FrameSizeEXT,
+}
+impl ::std::default::Default for VideoEncodeH264RateControlLayerInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ temporal_layer_id: u8::default(),
+ use_initial_rc_qp: Bool32::default(),
+ initial_rc_qp: VideoEncodeH264QpEXT::default(),
+ use_min_qp: Bool32::default(),
+ min_qp: VideoEncodeH264QpEXT::default(),
+ use_max_qp: Bool32::default(),
+ max_qp: VideoEncodeH264QpEXT::default(),
+ use_max_frame_size: Bool32::default(),
+ max_frame_size: VideoEncodeH264FrameSizeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH264RateControlLayerInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT;
+}
+impl VideoEncodeH264RateControlLayerInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH264RateControlLayerInfoEXTBuilder<'a> {
+ VideoEncodeH264RateControlLayerInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH264RateControlLayerInfoEXTBuilder<'a> {
+ inner: VideoEncodeH264RateControlLayerInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH264RateControlLayerInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH264RateControlLayerInfoEXT {}
+unsafe impl ExtendsVideoEncodeRateControlLayerInfoKHR
+ for VideoEncodeH264RateControlLayerInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoEncodeRateControlLayerInfoKHR for VideoEncodeH264RateControlLayerInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH264RateControlLayerInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH264RateControlLayerInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH264RateControlLayerInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH264RateControlLayerInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn temporal_layer_id(mut self, temporal_layer_id: u8) -> Self {
+ self.inner.temporal_layer_id = temporal_layer_id;
+ self
+ }
+ #[inline]
+ pub fn use_initial_rc_qp(mut self, use_initial_rc_qp: bool) -> Self {
+ self.inner.use_initial_rc_qp = use_initial_rc_qp.into();
+ self
+ }
+ #[inline]
+ pub fn initial_rc_qp(mut self, initial_rc_qp: VideoEncodeH264QpEXT) -> Self {
+ self.inner.initial_rc_qp = initial_rc_qp;
+ self
+ }
+ #[inline]
+ pub fn use_min_qp(mut self, use_min_qp: bool) -> Self {
+ self.inner.use_min_qp = use_min_qp.into();
+ self
+ }
+ #[inline]
+ pub fn min_qp(mut self, min_qp: VideoEncodeH264QpEXT) -> Self {
+ self.inner.min_qp = min_qp;
+ self
+ }
+ #[inline]
+ pub fn use_max_qp(mut self, use_max_qp: bool) -> Self {
+ self.inner.use_max_qp = use_max_qp.into();
+ self
+ }
+ #[inline]
+ pub fn max_qp(mut self, max_qp: VideoEncodeH264QpEXT) -> Self {
+ self.inner.max_qp = max_qp;
+ self
+ }
+ #[inline]
+ pub fn use_max_frame_size(mut self, use_max_frame_size: bool) -> Self {
+ self.inner.use_max_frame_size = use_max_frame_size.into();
+ self
+ }
+ #[inline]
+ pub fn max_frame_size(mut self, max_frame_size: VideoEncodeH264FrameSizeEXT) -> Self {
+ self.inner.max_frame_size = max_frame_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH264RateControlLayerInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265CapabilitiesEXT.html>"]
+pub struct VideoEncodeH265CapabilitiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: VideoEncodeH265CapabilityFlagsEXT,
+ pub input_mode_flags: VideoEncodeH265InputModeFlagsEXT,
+ pub output_mode_flags: VideoEncodeH265OutputModeFlagsEXT,
+ pub ctb_sizes: VideoEncodeH265CtbSizeFlagsEXT,
+ pub transform_block_sizes: VideoEncodeH265TransformBlockSizeFlagsEXT,
+ pub max_p_picture_l0_reference_count: u8,
+ pub max_b_picture_l0_reference_count: u8,
+ pub max_l1_reference_count: u8,
+ pub max_sub_layers_count: u8,
+ pub min_log2_min_luma_coding_block_size_minus3: u8,
+ pub max_log2_min_luma_coding_block_size_minus3: u8,
+ pub min_log2_min_luma_transform_block_size_minus2: u8,
+ pub max_log2_min_luma_transform_block_size_minus2: u8,
+ pub min_max_transform_hierarchy_depth_inter: u8,
+ pub max_max_transform_hierarchy_depth_inter: u8,
+ pub min_max_transform_hierarchy_depth_intra: u8,
+ pub max_max_transform_hierarchy_depth_intra: u8,
+ pub max_diff_cu_qp_delta_depth: u8,
+ pub min_max_num_merge_cand: u8,
+ pub max_max_num_merge_cand: u8,
+}
+impl ::std::default::Default for VideoEncodeH265CapabilitiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: VideoEncodeH265CapabilityFlagsEXT::default(),
+ input_mode_flags: VideoEncodeH265InputModeFlagsEXT::default(),
+ output_mode_flags: VideoEncodeH265OutputModeFlagsEXT::default(),
+ ctb_sizes: VideoEncodeH265CtbSizeFlagsEXT::default(),
+ transform_block_sizes: VideoEncodeH265TransformBlockSizeFlagsEXT::default(),
+ max_p_picture_l0_reference_count: u8::default(),
+ max_b_picture_l0_reference_count: u8::default(),
+ max_l1_reference_count: u8::default(),
+ max_sub_layers_count: u8::default(),
+ min_log2_min_luma_coding_block_size_minus3: u8::default(),
+ max_log2_min_luma_coding_block_size_minus3: u8::default(),
+ min_log2_min_luma_transform_block_size_minus2: u8::default(),
+ max_log2_min_luma_transform_block_size_minus2: u8::default(),
+ min_max_transform_hierarchy_depth_inter: u8::default(),
+ max_max_transform_hierarchy_depth_inter: u8::default(),
+ min_max_transform_hierarchy_depth_intra: u8::default(),
+ max_max_transform_hierarchy_depth_intra: u8::default(),
+ max_diff_cu_qp_delta_depth: u8::default(),
+ min_max_num_merge_cand: u8::default(),
+ max_max_num_merge_cand: u8::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265CapabilitiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_CAPABILITIES_EXT;
+}
+impl VideoEncodeH265CapabilitiesEXT {
+ pub fn builder<'a>() -> VideoEncodeH265CapabilitiesEXTBuilder<'a> {
+ VideoEncodeH265CapabilitiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265CapabilitiesEXTBuilder<'a> {
+ inner: VideoEncodeH265CapabilitiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeH265CapabilitiesEXTBuilder<'_> {}
+unsafe impl ExtendsVideoCapabilitiesKHR for VideoEncodeH265CapabilitiesEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH265CapabilitiesEXTBuilder<'a> {
+ type Target = VideoEncodeH265CapabilitiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265CapabilitiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265CapabilitiesEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: VideoEncodeH265CapabilityFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn input_mode_flags(mut self, input_mode_flags: VideoEncodeH265InputModeFlagsEXT) -> Self {
+ self.inner.input_mode_flags = input_mode_flags;
+ self
+ }
+ #[inline]
+ pub fn output_mode_flags(
+ mut self,
+ output_mode_flags: VideoEncodeH265OutputModeFlagsEXT,
+ ) -> Self {
+ self.inner.output_mode_flags = output_mode_flags;
+ self
+ }
+ #[inline]
+ pub fn ctb_sizes(mut self, ctb_sizes: VideoEncodeH265CtbSizeFlagsEXT) -> Self {
+ self.inner.ctb_sizes = ctb_sizes;
+ self
+ }
+ #[inline]
+ pub fn transform_block_sizes(
+ mut self,
+ transform_block_sizes: VideoEncodeH265TransformBlockSizeFlagsEXT,
+ ) -> Self {
+ self.inner.transform_block_sizes = transform_block_sizes;
+ self
+ }
+ #[inline]
+ pub fn max_p_picture_l0_reference_count(
+ mut self,
+ max_p_picture_l0_reference_count: u8,
+ ) -> Self {
+ self.inner.max_p_picture_l0_reference_count = max_p_picture_l0_reference_count;
+ self
+ }
+ #[inline]
+ pub fn max_b_picture_l0_reference_count(
+ mut self,
+ max_b_picture_l0_reference_count: u8,
+ ) -> Self {
+ self.inner.max_b_picture_l0_reference_count = max_b_picture_l0_reference_count;
+ self
+ }
+ #[inline]
+ pub fn max_l1_reference_count(mut self, max_l1_reference_count: u8) -> Self {
+ self.inner.max_l1_reference_count = max_l1_reference_count;
+ self
+ }
+ #[inline]
+ pub fn max_sub_layers_count(mut self, max_sub_layers_count: u8) -> Self {
+ self.inner.max_sub_layers_count = max_sub_layers_count;
+ self
+ }
+ #[inline]
+ pub fn min_log2_min_luma_coding_block_size_minus3(
+ mut self,
+ min_log2_min_luma_coding_block_size_minus3: u8,
+ ) -> Self {
+ self.inner.min_log2_min_luma_coding_block_size_minus3 =
+ min_log2_min_luma_coding_block_size_minus3;
+ self
+ }
+ #[inline]
+ pub fn max_log2_min_luma_coding_block_size_minus3(
+ mut self,
+ max_log2_min_luma_coding_block_size_minus3: u8,
+ ) -> Self {
+ self.inner.max_log2_min_luma_coding_block_size_minus3 =
+ max_log2_min_luma_coding_block_size_minus3;
+ self
+ }
+ #[inline]
+ pub fn min_log2_min_luma_transform_block_size_minus2(
+ mut self,
+ min_log2_min_luma_transform_block_size_minus2: u8,
+ ) -> Self {
+ self.inner.min_log2_min_luma_transform_block_size_minus2 =
+ min_log2_min_luma_transform_block_size_minus2;
+ self
+ }
+ #[inline]
+ pub fn max_log2_min_luma_transform_block_size_minus2(
+ mut self,
+ max_log2_min_luma_transform_block_size_minus2: u8,
+ ) -> Self {
+ self.inner.max_log2_min_luma_transform_block_size_minus2 =
+ max_log2_min_luma_transform_block_size_minus2;
+ self
+ }
+ #[inline]
+ pub fn min_max_transform_hierarchy_depth_inter(
+ mut self,
+ min_max_transform_hierarchy_depth_inter: u8,
+ ) -> Self {
+ self.inner.min_max_transform_hierarchy_depth_inter =
+ min_max_transform_hierarchy_depth_inter;
+ self
+ }
+ #[inline]
+ pub fn max_max_transform_hierarchy_depth_inter(
+ mut self,
+ max_max_transform_hierarchy_depth_inter: u8,
+ ) -> Self {
+ self.inner.max_max_transform_hierarchy_depth_inter =
+ max_max_transform_hierarchy_depth_inter;
+ self
+ }
+ #[inline]
+ pub fn min_max_transform_hierarchy_depth_intra(
+ mut self,
+ min_max_transform_hierarchy_depth_intra: u8,
+ ) -> Self {
+ self.inner.min_max_transform_hierarchy_depth_intra =
+ min_max_transform_hierarchy_depth_intra;
+ self
+ }
+ #[inline]
+ pub fn max_max_transform_hierarchy_depth_intra(
+ mut self,
+ max_max_transform_hierarchy_depth_intra: u8,
+ ) -> Self {
+ self.inner.max_max_transform_hierarchy_depth_intra =
+ max_max_transform_hierarchy_depth_intra;
+ self
+ }
+ #[inline]
+ pub fn max_diff_cu_qp_delta_depth(mut self, max_diff_cu_qp_delta_depth: u8) -> Self {
+ self.inner.max_diff_cu_qp_delta_depth = max_diff_cu_qp_delta_depth;
+ self
+ }
+ #[inline]
+ pub fn min_max_num_merge_cand(mut self, min_max_num_merge_cand: u8) -> Self {
+ self.inner.min_max_num_merge_cand = min_max_num_merge_cand;
+ self
+ }
+ #[inline]
+ pub fn max_max_num_merge_cand(mut self, max_max_num_merge_cand: u8) -> Self {
+ self.inner.max_max_num_merge_cand = max_max_num_merge_cand;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265CapabilitiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265SessionParametersAddInfoEXT.html>"]
+pub struct VideoEncodeH265SessionParametersAddInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_vps_count: u32,
+ pub p_std_vp_ss: *const StdVideoH265VideoParameterSet,
+ pub std_sps_count: u32,
+ pub p_std_sp_ss: *const StdVideoH265SequenceParameterSet,
+ pub std_pps_count: u32,
+ pub p_std_pp_ss: *const StdVideoH265PictureParameterSet,
+}
+impl ::std::default::Default for VideoEncodeH265SessionParametersAddInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_vps_count: u32::default(),
+ p_std_vp_ss: ::std::ptr::null(),
+ std_sps_count: u32::default(),
+ p_std_sp_ss: ::std::ptr::null(),
+ std_pps_count: u32::default(),
+ p_std_pp_ss: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265SessionParametersAddInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT;
+}
+impl VideoEncodeH265SessionParametersAddInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265SessionParametersAddInfoEXTBuilder<'a> {
+ VideoEncodeH265SessionParametersAddInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265SessionParametersAddInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265SessionParametersAddInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoEncodeH265SessionParametersAddInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersUpdateInfoKHR
+ for VideoEncodeH265SessionParametersAddInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265SessionParametersAddInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265SessionParametersAddInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265SessionParametersAddInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265SessionParametersAddInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn std_vp_ss(mut self, std_vp_ss: &'a [StdVideoH265VideoParameterSet]) -> Self {
+ self.inner.std_vps_count = std_vp_ss.len() as _;
+ self.inner.p_std_vp_ss = std_vp_ss.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn std_sp_ss(mut self, std_sp_ss: &'a [StdVideoH265SequenceParameterSet]) -> Self {
+ self.inner.std_sps_count = std_sp_ss.len() as _;
+ self.inner.p_std_sp_ss = std_sp_ss.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn std_pp_ss(mut self, std_pp_ss: &'a [StdVideoH265PictureParameterSet]) -> Self {
+ self.inner.std_pps_count = std_pp_ss.len() as _;
+ self.inner.p_std_pp_ss = std_pp_ss.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265SessionParametersAddInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265SessionParametersCreateInfoEXT.html>"]
+pub struct VideoEncodeH265SessionParametersCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_std_vps_count: u32,
+ pub max_std_sps_count: u32,
+ pub max_std_pps_count: u32,
+ pub p_parameters_add_info: *const VideoEncodeH265SessionParametersAddInfoEXT,
+}
+impl ::std::default::Default for VideoEncodeH265SessionParametersCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_std_vps_count: u32::default(),
+ max_std_sps_count: u32::default(),
+ max_std_pps_count: u32::default(),
+ p_parameters_add_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265SessionParametersCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT;
+}
+impl VideoEncodeH265SessionParametersCreateInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265SessionParametersCreateInfoEXTBuilder<'a> {
+ VideoEncodeH265SessionParametersCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265SessionParametersCreateInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265SessionParametersCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoEncodeH265SessionParametersCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoSessionParametersCreateInfoKHR
+ for VideoEncodeH265SessionParametersCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265SessionParametersCreateInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265SessionParametersCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265SessionParametersCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265SessionParametersCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn max_std_vps_count(mut self, max_std_vps_count: u32) -> Self {
+ self.inner.max_std_vps_count = max_std_vps_count;
+ self
+ }
+ #[inline]
+ pub fn max_std_sps_count(mut self, max_std_sps_count: u32) -> Self {
+ self.inner.max_std_sps_count = max_std_sps_count;
+ self
+ }
+ #[inline]
+ pub fn max_std_pps_count(mut self, max_std_pps_count: u32) -> Self {
+ self.inner.max_std_pps_count = max_std_pps_count;
+ self
+ }
+ #[inline]
+ pub fn parameters_add_info(
+ mut self,
+ parameters_add_info: &'a VideoEncodeH265SessionParametersAddInfoEXT,
+ ) -> Self {
+ self.inner.p_parameters_add_info = parameters_add_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265SessionParametersCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265VclFrameInfoEXT.html>"]
+pub struct VideoEncodeH265VclFrameInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_reference_final_lists: *const VideoEncodeH265ReferenceListsInfoEXT,
+ pub nalu_slice_segment_entry_count: u32,
+ pub p_nalu_slice_segment_entries: *const VideoEncodeH265NaluSliceSegmentInfoEXT,
+ pub p_current_picture_info: *const StdVideoEncodeH265PictureInfo,
+}
+impl ::std::default::Default for VideoEncodeH265VclFrameInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_reference_final_lists: ::std::ptr::null(),
+ nalu_slice_segment_entry_count: u32::default(),
+ p_nalu_slice_segment_entries: ::std::ptr::null(),
+ p_current_picture_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265VclFrameInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT;
+}
+impl VideoEncodeH265VclFrameInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265VclFrameInfoEXTBuilder<'a> {
+ VideoEncodeH265VclFrameInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265VclFrameInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265VclFrameInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH265VclFrameInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH265VclFrameInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH265VclFrameInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265VclFrameInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265VclFrameInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265VclFrameInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn reference_final_lists(
+ mut self,
+ reference_final_lists: &'a VideoEncodeH265ReferenceListsInfoEXT,
+ ) -> Self {
+ self.inner.p_reference_final_lists = reference_final_lists;
+ self
+ }
+ #[inline]
+ pub fn nalu_slice_segment_entries(
+ mut self,
+ nalu_slice_segment_entries: &'a [VideoEncodeH265NaluSliceSegmentInfoEXT],
+ ) -> Self {
+ self.inner.nalu_slice_segment_entry_count = nalu_slice_segment_entries.len() as _;
+ self.inner.p_nalu_slice_segment_entries = nalu_slice_segment_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn current_picture_info(
+ mut self,
+ current_picture_info: &'a StdVideoEncodeH265PictureInfo,
+ ) -> Self {
+ self.inner.p_current_picture_info = current_picture_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265VclFrameInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265EmitPictureParametersInfoEXT.html>"]
+pub struct VideoEncodeH265EmitPictureParametersInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub vps_id: u8,
+ pub sps_id: u8,
+ pub emit_vps_enable: Bool32,
+ pub emit_sps_enable: Bool32,
+ pub pps_id_entry_count: u32,
+ pub pps_id_entries: *const u8,
+}
+impl ::std::default::Default for VideoEncodeH265EmitPictureParametersInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ vps_id: u8::default(),
+ sps_id: u8::default(),
+ emit_vps_enable: Bool32::default(),
+ emit_sps_enable: Bool32::default(),
+ pps_id_entry_count: u32::default(),
+ pps_id_entries: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265EmitPictureParametersInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT;
+}
+impl VideoEncodeH265EmitPictureParametersInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265EmitPictureParametersInfoEXTBuilder<'a> {
+ VideoEncodeH265EmitPictureParametersInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265EmitPictureParametersInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265EmitPictureParametersInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH265EmitPictureParametersInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoEncodeInfoKHR for VideoEncodeH265EmitPictureParametersInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH265EmitPictureParametersInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265EmitPictureParametersInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265EmitPictureParametersInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265EmitPictureParametersInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn vps_id(mut self, vps_id: u8) -> Self {
+ self.inner.vps_id = vps_id;
+ self
+ }
+ #[inline]
+ pub fn sps_id(mut self, sps_id: u8) -> Self {
+ self.inner.sps_id = sps_id;
+ self
+ }
+ #[inline]
+ pub fn emit_vps_enable(mut self, emit_vps_enable: bool) -> Self {
+ self.inner.emit_vps_enable = emit_vps_enable.into();
+ self
+ }
+ #[inline]
+ pub fn emit_sps_enable(mut self, emit_sps_enable: bool) -> Self {
+ self.inner.emit_sps_enable = emit_sps_enable.into();
+ self
+ }
+ #[inline]
+ pub fn pps_id_entries(mut self, pps_id_entries: &'a [u8]) -> Self {
+ self.inner.pps_id_entry_count = pps_id_entries.len() as _;
+ self.inner.pps_id_entries = pps_id_entries.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265EmitPictureParametersInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265NaluSliceSegmentInfoEXT.html>"]
+pub struct VideoEncodeH265NaluSliceSegmentInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ctb_count: u32,
+ pub p_reference_final_lists: *const VideoEncodeH265ReferenceListsInfoEXT,
+ pub p_slice_segment_header_std: *const StdVideoEncodeH265SliceSegmentHeader,
+}
+impl ::std::default::Default for VideoEncodeH265NaluSliceSegmentInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ctb_count: u32::default(),
+ p_reference_final_lists: ::std::ptr::null(),
+ p_slice_segment_header_std: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265NaluSliceSegmentInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT;
+}
+impl VideoEncodeH265NaluSliceSegmentInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265NaluSliceSegmentInfoEXTBuilder<'a> {
+ VideoEncodeH265NaluSliceSegmentInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265NaluSliceSegmentInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265NaluSliceSegmentInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265NaluSliceSegmentInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265NaluSliceSegmentInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265NaluSliceSegmentInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265NaluSliceSegmentInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn ctb_count(mut self, ctb_count: u32) -> Self {
+ self.inner.ctb_count = ctb_count;
+ self
+ }
+ #[inline]
+ pub fn reference_final_lists(
+ mut self,
+ reference_final_lists: &'a VideoEncodeH265ReferenceListsInfoEXT,
+ ) -> Self {
+ self.inner.p_reference_final_lists = reference_final_lists;
+ self
+ }
+ #[inline]
+ pub fn slice_segment_header_std(
+ mut self,
+ slice_segment_header_std: &'a StdVideoEncodeH265SliceSegmentHeader,
+ ) -> Self {
+ self.inner.p_slice_segment_header_std = slice_segment_header_std;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265NaluSliceSegmentInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265RateControlInfoEXT.html>"]
+pub struct VideoEncodeH265RateControlInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub gop_frame_count: u32,
+ pub idr_period: u32,
+ pub consecutive_b_frame_count: u32,
+ pub rate_control_structure: VideoEncodeH265RateControlStructureEXT,
+ pub sub_layer_count: u8,
+}
+impl ::std::default::Default for VideoEncodeH265RateControlInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ gop_frame_count: u32::default(),
+ idr_period: u32::default(),
+ consecutive_b_frame_count: u32::default(),
+ rate_control_structure: VideoEncodeH265RateControlStructureEXT::default(),
+ sub_layer_count: u8::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265RateControlInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT;
+}
+impl VideoEncodeH265RateControlInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265RateControlInfoEXTBuilder<'a> {
+ VideoEncodeH265RateControlInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265RateControlInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265RateControlInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH265RateControlInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH265RateControlInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH265RateControlInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265RateControlInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265RateControlInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265RateControlInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn gop_frame_count(mut self, gop_frame_count: u32) -> Self {
+ self.inner.gop_frame_count = gop_frame_count;
+ self
+ }
+ #[inline]
+ pub fn idr_period(mut self, idr_period: u32) -> Self {
+ self.inner.idr_period = idr_period;
+ self
+ }
+ #[inline]
+ pub fn consecutive_b_frame_count(mut self, consecutive_b_frame_count: u32) -> Self {
+ self.inner.consecutive_b_frame_count = consecutive_b_frame_count;
+ self
+ }
+ #[inline]
+ pub fn rate_control_structure(
+ mut self,
+ rate_control_structure: VideoEncodeH265RateControlStructureEXT,
+ ) -> Self {
+ self.inner.rate_control_structure = rate_control_structure;
+ self
+ }
+ #[inline]
+ pub fn sub_layer_count(mut self, sub_layer_count: u8) -> Self {
+ self.inner.sub_layer_count = sub_layer_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265RateControlInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265QpEXT.html>"]
+pub struct VideoEncodeH265QpEXT {
+ pub qp_i: i32,
+ pub qp_p: i32,
+ pub qp_b: i32,
+}
+impl VideoEncodeH265QpEXT {
+ pub fn builder<'a>() -> VideoEncodeH265QpEXTBuilder<'a> {
+ VideoEncodeH265QpEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265QpEXTBuilder<'a> {
+ inner: VideoEncodeH265QpEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265QpEXTBuilder<'a> {
+ type Target = VideoEncodeH265QpEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265QpEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265QpEXTBuilder<'a> {
+ #[inline]
+ pub fn qp_i(mut self, qp_i: i32) -> Self {
+ self.inner.qp_i = qp_i;
+ self
+ }
+ #[inline]
+ pub fn qp_p(mut self, qp_p: i32) -> Self {
+ self.inner.qp_p = qp_p;
+ self
+ }
+ #[inline]
+ pub fn qp_b(mut self, qp_b: i32) -> Self {
+ self.inner.qp_b = qp_b;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265QpEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265FrameSizeEXT.html>"]
+pub struct VideoEncodeH265FrameSizeEXT {
+ pub frame_i_size: u32,
+ pub frame_p_size: u32,
+ pub frame_b_size: u32,
+}
+impl VideoEncodeH265FrameSizeEXT {
+ pub fn builder<'a>() -> VideoEncodeH265FrameSizeEXTBuilder<'a> {
+ VideoEncodeH265FrameSizeEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265FrameSizeEXTBuilder<'a> {
+ inner: VideoEncodeH265FrameSizeEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265FrameSizeEXTBuilder<'a> {
+ type Target = VideoEncodeH265FrameSizeEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265FrameSizeEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265FrameSizeEXTBuilder<'a> {
+ #[inline]
+ pub fn frame_i_size(mut self, frame_i_size: u32) -> Self {
+ self.inner.frame_i_size = frame_i_size;
+ self
+ }
+ #[inline]
+ pub fn frame_p_size(mut self, frame_p_size: u32) -> Self {
+ self.inner.frame_p_size = frame_p_size;
+ self
+ }
+ #[inline]
+ pub fn frame_b_size(mut self, frame_b_size: u32) -> Self {
+ self.inner.frame_b_size = frame_b_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265FrameSizeEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265RateControlLayerInfoEXT.html>"]
+pub struct VideoEncodeH265RateControlLayerInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub temporal_id: u8,
+ pub use_initial_rc_qp: Bool32,
+ pub initial_rc_qp: VideoEncodeH265QpEXT,
+ pub use_min_qp: Bool32,
+ pub min_qp: VideoEncodeH265QpEXT,
+ pub use_max_qp: Bool32,
+ pub max_qp: VideoEncodeH265QpEXT,
+ pub use_max_frame_size: Bool32,
+ pub max_frame_size: VideoEncodeH265FrameSizeEXT,
+}
+impl ::std::default::Default for VideoEncodeH265RateControlLayerInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ temporal_id: u8::default(),
+ use_initial_rc_qp: Bool32::default(),
+ initial_rc_qp: VideoEncodeH265QpEXT::default(),
+ use_min_qp: Bool32::default(),
+ min_qp: VideoEncodeH265QpEXT::default(),
+ use_max_qp: Bool32::default(),
+ max_qp: VideoEncodeH265QpEXT::default(),
+ use_max_frame_size: Bool32::default(),
+ max_frame_size: VideoEncodeH265FrameSizeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265RateControlLayerInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT;
+}
+impl VideoEncodeH265RateControlLayerInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265RateControlLayerInfoEXTBuilder<'a> {
+ VideoEncodeH265RateControlLayerInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265RateControlLayerInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265RateControlLayerInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH265RateControlLayerInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoCodingControlInfoKHR for VideoEncodeH265RateControlLayerInfoEXT {}
+unsafe impl ExtendsVideoEncodeRateControlLayerInfoKHR
+ for VideoEncodeH265RateControlLayerInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsVideoEncodeRateControlLayerInfoKHR for VideoEncodeH265RateControlLayerInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH265RateControlLayerInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265RateControlLayerInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265RateControlLayerInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265RateControlLayerInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn temporal_id(mut self, temporal_id: u8) -> Self {
+ self.inner.temporal_id = temporal_id;
+ self
+ }
+ #[inline]
+ pub fn use_initial_rc_qp(mut self, use_initial_rc_qp: bool) -> Self {
+ self.inner.use_initial_rc_qp = use_initial_rc_qp.into();
+ self
+ }
+ #[inline]
+ pub fn initial_rc_qp(mut self, initial_rc_qp: VideoEncodeH265QpEXT) -> Self {
+ self.inner.initial_rc_qp = initial_rc_qp;
+ self
+ }
+ #[inline]
+ pub fn use_min_qp(mut self, use_min_qp: bool) -> Self {
+ self.inner.use_min_qp = use_min_qp.into();
+ self
+ }
+ #[inline]
+ pub fn min_qp(mut self, min_qp: VideoEncodeH265QpEXT) -> Self {
+ self.inner.min_qp = min_qp;
+ self
+ }
+ #[inline]
+ pub fn use_max_qp(mut self, use_max_qp: bool) -> Self {
+ self.inner.use_max_qp = use_max_qp.into();
+ self
+ }
+ #[inline]
+ pub fn max_qp(mut self, max_qp: VideoEncodeH265QpEXT) -> Self {
+ self.inner.max_qp = max_qp;
+ self
+ }
+ #[inline]
+ pub fn use_max_frame_size(mut self, use_max_frame_size: bool) -> Self {
+ self.inner.use_max_frame_size = use_max_frame_size.into();
+ self
+ }
+ #[inline]
+ pub fn max_frame_size(mut self, max_frame_size: VideoEncodeH265FrameSizeEXT) -> Self {
+ self.inner.max_frame_size = max_frame_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265RateControlLayerInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265ProfileInfoEXT.html>"]
+pub struct VideoEncodeH265ProfileInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub std_profile_idc: StdVideoH265ProfileIdc,
+}
+impl ::std::default::Default for VideoEncodeH265ProfileInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ std_profile_idc: StdVideoH265ProfileIdc::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265ProfileInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_PROFILE_INFO_EXT;
+}
+impl VideoEncodeH265ProfileInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265ProfileInfoEXTBuilder<'a> {
+ VideoEncodeH265ProfileInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265ProfileInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265ProfileInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeH265ProfileInfoEXTBuilder<'_> {}
+unsafe impl ExtendsVideoProfileInfoKHR for VideoEncodeH265ProfileInfoEXT {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeH265ProfileInfoEXTBuilder<'_> {}
+unsafe impl ExtendsQueryPoolCreateInfo for VideoEncodeH265ProfileInfoEXT {}
+impl<'a> ::std::ops::Deref for VideoEncodeH265ProfileInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265ProfileInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265ProfileInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265ProfileInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn std_profile_idc(mut self, std_profile_idc: StdVideoH265ProfileIdc) -> Self {
+ self.inner.std_profile_idc = std_profile_idc;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265ProfileInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265DpbSlotInfoEXT.html>"]
+pub struct VideoEncodeH265DpbSlotInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub slot_index: i8,
+ pub p_std_reference_info: *const StdVideoEncodeH265ReferenceInfo,
+}
+impl ::std::default::Default for VideoEncodeH265DpbSlotInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ slot_index: i8::default(),
+ p_std_reference_info: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265DpbSlotInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT;
+}
+impl VideoEncodeH265DpbSlotInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265DpbSlotInfoEXTBuilder<'a> {
+ VideoEncodeH265DpbSlotInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265DpbSlotInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265DpbSlotInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265DpbSlotInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265DpbSlotInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265DpbSlotInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265DpbSlotInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn slot_index(mut self, slot_index: i8) -> Self {
+ self.inner.slot_index = slot_index;
+ self
+ }
+ #[inline]
+ pub fn std_reference_info(
+ mut self,
+ std_reference_info: &'a StdVideoEncodeH265ReferenceInfo,
+ ) -> Self {
+ self.inner.p_std_reference_info = std_reference_info;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265DpbSlotInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265ReferenceListsInfoEXT.html>"]
+pub struct VideoEncodeH265ReferenceListsInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub reference_list0_entry_count: u8,
+ pub p_reference_list0_entries: *const VideoEncodeH265DpbSlotInfoEXT,
+ pub reference_list1_entry_count: u8,
+ pub p_reference_list1_entries: *const VideoEncodeH265DpbSlotInfoEXT,
+ pub p_reference_modifications: *const StdVideoEncodeH265ReferenceModifications,
+}
+impl ::std::default::Default for VideoEncodeH265ReferenceListsInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ reference_list0_entry_count: u8::default(),
+ p_reference_list0_entries: ::std::ptr::null(),
+ reference_list1_entry_count: u8::default(),
+ p_reference_list1_entries: ::std::ptr::null(),
+ p_reference_modifications: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for VideoEncodeH265ReferenceListsInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT;
+}
+impl VideoEncodeH265ReferenceListsInfoEXT {
+ pub fn builder<'a>() -> VideoEncodeH265ReferenceListsInfoEXTBuilder<'a> {
+ VideoEncodeH265ReferenceListsInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct VideoEncodeH265ReferenceListsInfoEXTBuilder<'a> {
+ inner: VideoEncodeH265ReferenceListsInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for VideoEncodeH265ReferenceListsInfoEXTBuilder<'a> {
+ type Target = VideoEncodeH265ReferenceListsInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for VideoEncodeH265ReferenceListsInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> VideoEncodeH265ReferenceListsInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn reference_list0_entries(
+ mut self,
+ reference_list0_entries: &'a [VideoEncodeH265DpbSlotInfoEXT],
+ ) -> Self {
+ self.inner.reference_list0_entry_count = reference_list0_entries.len() as _;
+ self.inner.p_reference_list0_entries = reference_list0_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn reference_list1_entries(
+ mut self,
+ reference_list1_entries: &'a [VideoEncodeH265DpbSlotInfoEXT],
+ ) -> Self {
+ self.inner.reference_list1_entry_count = reference_list1_entries.len() as _;
+ self.inner.p_reference_list1_entries = reference_list1_entries.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn reference_modifications(
+ mut self,
+ reference_modifications: &'a StdVideoEncodeH265ReferenceModifications,
+ ) -> Self {
+ self.inner.p_reference_modifications = reference_modifications;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> VideoEncodeH265ReferenceListsInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceInheritedViewportScissorFeaturesNV.html>"]
+pub struct PhysicalDeviceInheritedViewportScissorFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub inherited_viewport_scissor2_d: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceInheritedViewportScissorFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ inherited_viewport_scissor2_d: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceInheritedViewportScissorFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV;
+}
+impl PhysicalDeviceInheritedViewportScissorFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'a> {
+ PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceInheritedViewportScissorFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceInheritedViewportScissorFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceInheritedViewportScissorFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceInheritedViewportScissorFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceInheritedViewportScissorFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn inherited_viewport_scissor2_d(mut self, inherited_viewport_scissor2_d: bool) -> Self {
+ self.inner.inherited_viewport_scissor2_d = inherited_viewport_scissor2_d.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceInheritedViewportScissorFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferInheritanceViewportScissorInfoNV.html>"]
+pub struct CommandBufferInheritanceViewportScissorInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub viewport_scissor2_d: Bool32,
+ pub viewport_depth_count: u32,
+ pub p_viewport_depths: *const Viewport,
+}
+impl ::std::default::Default for CommandBufferInheritanceViewportScissorInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ viewport_scissor2_d: Bool32::default(),
+ viewport_depth_count: u32::default(),
+ p_viewport_depths: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferInheritanceViewportScissorInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV;
+}
+impl CommandBufferInheritanceViewportScissorInfoNV {
+ pub fn builder<'a>() -> CommandBufferInheritanceViewportScissorInfoNVBuilder<'a> {
+ CommandBufferInheritanceViewportScissorInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferInheritanceViewportScissorInfoNVBuilder<'a> {
+ inner: CommandBufferInheritanceViewportScissorInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo
+ for CommandBufferInheritanceViewportScissorInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo for CommandBufferInheritanceViewportScissorInfoNV {}
+impl<'a> ::std::ops::Deref for CommandBufferInheritanceViewportScissorInfoNVBuilder<'a> {
+ type Target = CommandBufferInheritanceViewportScissorInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceViewportScissorInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferInheritanceViewportScissorInfoNVBuilder<'a> {
+ #[inline]
+ pub fn viewport_scissor2_d(mut self, viewport_scissor2_d: bool) -> Self {
+ self.inner.viewport_scissor2_d = viewport_scissor2_d.into();
+ self
+ }
+ #[inline]
+ pub fn viewport_depth_count(mut self, viewport_depth_count: u32) -> Self {
+ self.inner.viewport_depth_count = viewport_depth_count;
+ self
+ }
+ #[inline]
+ pub fn viewport_depths(mut self, viewport_depths: &'a Viewport) -> Self {
+ self.inner.p_viewport_depths = viewport_depths;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferInheritanceViewportScissorInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT.html>"]
+pub struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ycbcr2plane444_formats: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ycbcr2plane444_formats: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT;
+}
+impl PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'a> {
+ PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn ycbcr2plane444_formats(mut self, ycbcr2plane444_formats: bool) -> Self {
+ self.inner.ycbcr2plane444_formats = ycbcr2plane444_formats.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceProvokingVertexFeaturesEXT.html>"]
+pub struct PhysicalDeviceProvokingVertexFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub provoking_vertex_last: Bool32,
+ pub transform_feedback_preserves_provoking_vertex: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceProvokingVertexFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ provoking_vertex_last: Bool32::default(),
+ transform_feedback_preserves_provoking_vertex: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceProvokingVertexFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT;
+}
+impl PhysicalDeviceProvokingVertexFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'a> {
+ PhysicalDeviceProvokingVertexFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceProvokingVertexFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceProvokingVertexFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceProvokingVertexFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceProvokingVertexFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceProvokingVertexFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn provoking_vertex_last(mut self, provoking_vertex_last: bool) -> Self {
+ self.inner.provoking_vertex_last = provoking_vertex_last.into();
+ self
+ }
+ #[inline]
+ pub fn transform_feedback_preserves_provoking_vertex(
+ mut self,
+ transform_feedback_preserves_provoking_vertex: bool,
+ ) -> Self {
+ self.inner.transform_feedback_preserves_provoking_vertex =
+ transform_feedback_preserves_provoking_vertex.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceProvokingVertexFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceProvokingVertexPropertiesEXT.html>"]
+pub struct PhysicalDeviceProvokingVertexPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub provoking_vertex_mode_per_pipeline: Bool32,
+ pub transform_feedback_preserves_triangle_fan_provoking_vertex: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceProvokingVertexPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ provoking_vertex_mode_per_pipeline: Bool32::default(),
+ transform_feedback_preserves_triangle_fan_provoking_vertex: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceProvokingVertexPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT;
+}
+impl PhysicalDeviceProvokingVertexPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceProvokingVertexPropertiesEXTBuilder<'a> {
+ PhysicalDeviceProvokingVertexPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceProvokingVertexPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceProvokingVertexPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceProvokingVertexPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceProvokingVertexPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceProvokingVertexPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceProvokingVertexPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceProvokingVertexPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceProvokingVertexPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn provoking_vertex_mode_per_pipeline(
+ mut self,
+ provoking_vertex_mode_per_pipeline: bool,
+ ) -> Self {
+ self.inner.provoking_vertex_mode_per_pipeline = provoking_vertex_mode_per_pipeline.into();
+ self
+ }
+ #[inline]
+ pub fn transform_feedback_preserves_triangle_fan_provoking_vertex(
+ mut self,
+ transform_feedback_preserves_triangle_fan_provoking_vertex: bool,
+ ) -> Self {
+ self.inner
+ .transform_feedback_preserves_triangle_fan_provoking_vertex =
+ transform_feedback_preserves_triangle_fan_provoking_vertex.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceProvokingVertexPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRasterizationProvokingVertexStateCreateInfoEXT.html>"]
+pub struct PipelineRasterizationProvokingVertexStateCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub provoking_vertex_mode: ProvokingVertexModeEXT,
+}
+impl ::std::default::Default for PipelineRasterizationProvokingVertexStateCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ provoking_vertex_mode: ProvokingVertexModeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRasterizationProvokingVertexStateCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT;
+}
+impl PipelineRasterizationProvokingVertexStateCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder<'a> {
+ PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder<'a> {
+ inner: PipelineRasterizationProvokingVertexStateCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineRasterizationStateCreateInfo
+ for PipelineRasterizationProvokingVertexStateCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder<'a> {
+ type Target = PipelineRasterizationProvokingVertexStateCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRasterizationProvokingVertexStateCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn provoking_vertex_mode(mut self, provoking_vertex_mode: ProvokingVertexModeEXT) -> Self {
+ self.inner.provoking_vertex_mode = provoking_vertex_mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRasterizationProvokingVertexStateCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCuModuleCreateInfoNVX.html>"]
+pub struct CuModuleCreateInfoNVX {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub data_size: usize,
+ pub p_data: *const c_void,
+}
+impl ::std::default::Default for CuModuleCreateInfoNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ data_size: usize::default(),
+ p_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CuModuleCreateInfoNVX {
+ const STRUCTURE_TYPE: StructureType = StructureType::CU_MODULE_CREATE_INFO_NVX;
+}
+impl CuModuleCreateInfoNVX {
+ pub fn builder<'a>() -> CuModuleCreateInfoNVXBuilder<'a> {
+ CuModuleCreateInfoNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CuModuleCreateInfoNVXBuilder<'a> {
+ inner: CuModuleCreateInfoNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CuModuleCreateInfoNVXBuilder<'a> {
+ type Target = CuModuleCreateInfoNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CuModuleCreateInfoNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CuModuleCreateInfoNVXBuilder<'a> {
+ #[inline]
+ pub fn data(mut self, data: &'a [u8]) -> Self {
+ self.inner.data_size = data.len();
+ self.inner.p_data = data.as_ptr().cast();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CuModuleCreateInfoNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCuFunctionCreateInfoNVX.html>"]
+pub struct CuFunctionCreateInfoNVX {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub module: CuModuleNVX,
+ pub p_name: *const c_char,
+}
+impl ::std::default::Default for CuFunctionCreateInfoNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ module: CuModuleNVX::default(),
+ p_name: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CuFunctionCreateInfoNVX {
+ const STRUCTURE_TYPE: StructureType = StructureType::CU_FUNCTION_CREATE_INFO_NVX;
+}
+impl CuFunctionCreateInfoNVX {
+ pub fn builder<'a>() -> CuFunctionCreateInfoNVXBuilder<'a> {
+ CuFunctionCreateInfoNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CuFunctionCreateInfoNVXBuilder<'a> {
+ inner: CuFunctionCreateInfoNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CuFunctionCreateInfoNVXBuilder<'a> {
+ type Target = CuFunctionCreateInfoNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CuFunctionCreateInfoNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CuFunctionCreateInfoNVXBuilder<'a> {
+ #[inline]
+ pub fn module(mut self, module: CuModuleNVX) -> Self {
+ self.inner.module = module;
+ self
+ }
+ #[inline]
+ pub fn name(mut self, name: &'a ::std::ffi::CStr) -> Self {
+ self.inner.p_name = name.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CuFunctionCreateInfoNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCuLaunchInfoNVX.html>"]
+pub struct CuLaunchInfoNVX {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub function: CuFunctionNVX,
+ pub grid_dim_x: u32,
+ pub grid_dim_y: u32,
+ pub grid_dim_z: u32,
+ pub block_dim_x: u32,
+ pub block_dim_y: u32,
+ pub block_dim_z: u32,
+ pub shared_mem_bytes: u32,
+ pub param_count: usize,
+ pub p_params: *const *const c_void,
+ pub extra_count: usize,
+ pub p_extras: *const *const c_void,
+}
+impl ::std::default::Default for CuLaunchInfoNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ function: CuFunctionNVX::default(),
+ grid_dim_x: u32::default(),
+ grid_dim_y: u32::default(),
+ grid_dim_z: u32::default(),
+ block_dim_x: u32::default(),
+ block_dim_y: u32::default(),
+ block_dim_z: u32::default(),
+ shared_mem_bytes: u32::default(),
+ param_count: usize::default(),
+ p_params: ::std::ptr::null(),
+ extra_count: usize::default(),
+ p_extras: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CuLaunchInfoNVX {
+ const STRUCTURE_TYPE: StructureType = StructureType::CU_LAUNCH_INFO_NVX;
+}
+impl CuLaunchInfoNVX {
+ pub fn builder<'a>() -> CuLaunchInfoNVXBuilder<'a> {
+ CuLaunchInfoNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CuLaunchInfoNVXBuilder<'a> {
+ inner: CuLaunchInfoNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CuLaunchInfoNVXBuilder<'a> {
+ type Target = CuLaunchInfoNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CuLaunchInfoNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CuLaunchInfoNVXBuilder<'a> {
+ #[inline]
+ pub fn function(mut self, function: CuFunctionNVX) -> Self {
+ self.inner.function = function;
+ self
+ }
+ #[inline]
+ pub fn grid_dim_x(mut self, grid_dim_x: u32) -> Self {
+ self.inner.grid_dim_x = grid_dim_x;
+ self
+ }
+ #[inline]
+ pub fn grid_dim_y(mut self, grid_dim_y: u32) -> Self {
+ self.inner.grid_dim_y = grid_dim_y;
+ self
+ }
+ #[inline]
+ pub fn grid_dim_z(mut self, grid_dim_z: u32) -> Self {
+ self.inner.grid_dim_z = grid_dim_z;
+ self
+ }
+ #[inline]
+ pub fn block_dim_x(mut self, block_dim_x: u32) -> Self {
+ self.inner.block_dim_x = block_dim_x;
+ self
+ }
+ #[inline]
+ pub fn block_dim_y(mut self, block_dim_y: u32) -> Self {
+ self.inner.block_dim_y = block_dim_y;
+ self
+ }
+ #[inline]
+ pub fn block_dim_z(mut self, block_dim_z: u32) -> Self {
+ self.inner.block_dim_z = block_dim_z;
+ self
+ }
+ #[inline]
+ pub fn shared_mem_bytes(mut self, shared_mem_bytes: u32) -> Self {
+ self.inner.shared_mem_bytes = shared_mem_bytes;
+ self
+ }
+ #[inline]
+ pub fn params(mut self, params: &'a [*const c_void]) -> Self {
+ self.inner.param_count = params.len();
+ self.inner.p_params = params.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn extras(mut self, extras: &'a [*const c_void]) -> Self {
+ self.inner.extra_count = extras.len();
+ self.inner.p_extras = extras.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CuLaunchInfoNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDescriptorBufferFeaturesEXT.html>"]
+pub struct PhysicalDeviceDescriptorBufferFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub descriptor_buffer: Bool32,
+ pub descriptor_buffer_capture_replay: Bool32,
+ pub descriptor_buffer_image_layout_ignored: Bool32,
+ pub descriptor_buffer_push_descriptors: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDescriptorBufferFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ descriptor_buffer: Bool32::default(),
+ descriptor_buffer_capture_replay: Bool32::default(),
+ descriptor_buffer_image_layout_ignored: Bool32::default(),
+ descriptor_buffer_push_descriptors: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDescriptorBufferFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT;
+}
+impl PhysicalDeviceDescriptorBufferFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'a> {
+ PhysicalDeviceDescriptorBufferFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceDescriptorBufferFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorBufferFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorBufferFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDescriptorBufferFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDescriptorBufferFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn descriptor_buffer(mut self, descriptor_buffer: bool) -> Self {
+ self.inner.descriptor_buffer = descriptor_buffer.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_buffer_capture_replay(
+ mut self,
+ descriptor_buffer_capture_replay: bool,
+ ) -> Self {
+ self.inner.descriptor_buffer_capture_replay = descriptor_buffer_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_buffer_image_layout_ignored(
+ mut self,
+ descriptor_buffer_image_layout_ignored: bool,
+ ) -> Self {
+ self.inner.descriptor_buffer_image_layout_ignored =
+ descriptor_buffer_image_layout_ignored.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_buffer_push_descriptors(
+ mut self,
+ descriptor_buffer_push_descriptors: bool,
+ ) -> Self {
+ self.inner.descriptor_buffer_push_descriptors = descriptor_buffer_push_descriptors.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDescriptorBufferFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDescriptorBufferPropertiesEXT.html>"]
+pub struct PhysicalDeviceDescriptorBufferPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub combined_image_sampler_descriptor_single_array: Bool32,
+ pub bufferless_push_descriptors: Bool32,
+ pub allow_sampler_image_view_post_submit_creation: Bool32,
+ pub descriptor_buffer_offset_alignment: DeviceSize,
+ pub max_descriptor_buffer_bindings: u32,
+ pub max_resource_descriptor_buffer_bindings: u32,
+ pub max_sampler_descriptor_buffer_bindings: u32,
+ pub max_embedded_immutable_sampler_bindings: u32,
+ pub max_embedded_immutable_samplers: u32,
+ pub buffer_capture_replay_descriptor_data_size: usize,
+ pub image_capture_replay_descriptor_data_size: usize,
+ pub image_view_capture_replay_descriptor_data_size: usize,
+ pub sampler_capture_replay_descriptor_data_size: usize,
+ pub acceleration_structure_capture_replay_descriptor_data_size: usize,
+ pub sampler_descriptor_size: usize,
+ pub combined_image_sampler_descriptor_size: usize,
+ pub sampled_image_descriptor_size: usize,
+ pub storage_image_descriptor_size: usize,
+ pub uniform_texel_buffer_descriptor_size: usize,
+ pub robust_uniform_texel_buffer_descriptor_size: usize,
+ pub storage_texel_buffer_descriptor_size: usize,
+ pub robust_storage_texel_buffer_descriptor_size: usize,
+ pub uniform_buffer_descriptor_size: usize,
+ pub robust_uniform_buffer_descriptor_size: usize,
+ pub storage_buffer_descriptor_size: usize,
+ pub robust_storage_buffer_descriptor_size: usize,
+ pub input_attachment_descriptor_size: usize,
+ pub acceleration_structure_descriptor_size: usize,
+ pub max_sampler_descriptor_buffer_range: DeviceSize,
+ pub max_resource_descriptor_buffer_range: DeviceSize,
+ pub sampler_descriptor_buffer_address_space_size: DeviceSize,
+ pub resource_descriptor_buffer_address_space_size: DeviceSize,
+ pub descriptor_buffer_address_space_size: DeviceSize,
+}
+impl ::std::default::Default for PhysicalDeviceDescriptorBufferPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ combined_image_sampler_descriptor_single_array: Bool32::default(),
+ bufferless_push_descriptors: Bool32::default(),
+ allow_sampler_image_view_post_submit_creation: Bool32::default(),
+ descriptor_buffer_offset_alignment: DeviceSize::default(),
+ max_descriptor_buffer_bindings: u32::default(),
+ max_resource_descriptor_buffer_bindings: u32::default(),
+ max_sampler_descriptor_buffer_bindings: u32::default(),
+ max_embedded_immutable_sampler_bindings: u32::default(),
+ max_embedded_immutable_samplers: u32::default(),
+ buffer_capture_replay_descriptor_data_size: usize::default(),
+ image_capture_replay_descriptor_data_size: usize::default(),
+ image_view_capture_replay_descriptor_data_size: usize::default(),
+ sampler_capture_replay_descriptor_data_size: usize::default(),
+ acceleration_structure_capture_replay_descriptor_data_size: usize::default(),
+ sampler_descriptor_size: usize::default(),
+ combined_image_sampler_descriptor_size: usize::default(),
+ sampled_image_descriptor_size: usize::default(),
+ storage_image_descriptor_size: usize::default(),
+ uniform_texel_buffer_descriptor_size: usize::default(),
+ robust_uniform_texel_buffer_descriptor_size: usize::default(),
+ storage_texel_buffer_descriptor_size: usize::default(),
+ robust_storage_texel_buffer_descriptor_size: usize::default(),
+ uniform_buffer_descriptor_size: usize::default(),
+ robust_uniform_buffer_descriptor_size: usize::default(),
+ storage_buffer_descriptor_size: usize::default(),
+ robust_storage_buffer_descriptor_size: usize::default(),
+ input_attachment_descriptor_size: usize::default(),
+ acceleration_structure_descriptor_size: usize::default(),
+ max_sampler_descriptor_buffer_range: DeviceSize::default(),
+ max_resource_descriptor_buffer_range: DeviceSize::default(),
+ sampler_descriptor_buffer_address_space_size: DeviceSize::default(),
+ resource_descriptor_buffer_address_space_size: DeviceSize::default(),
+ descriptor_buffer_address_space_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDescriptorBufferPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT;
+}
+impl PhysicalDeviceDescriptorBufferPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDescriptorBufferPropertiesEXTBuilder<'a> {
+ PhysicalDeviceDescriptorBufferPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDescriptorBufferPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceDescriptorBufferPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDescriptorBufferPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDescriptorBufferPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorBufferPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDescriptorBufferPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorBufferPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDescriptorBufferPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn combined_image_sampler_descriptor_single_array(
+ mut self,
+ combined_image_sampler_descriptor_single_array: bool,
+ ) -> Self {
+ self.inner.combined_image_sampler_descriptor_single_array =
+ combined_image_sampler_descriptor_single_array.into();
+ self
+ }
+ #[inline]
+ pub fn bufferless_push_descriptors(mut self, bufferless_push_descriptors: bool) -> Self {
+ self.inner.bufferless_push_descriptors = bufferless_push_descriptors.into();
+ self
+ }
+ #[inline]
+ pub fn allow_sampler_image_view_post_submit_creation(
+ mut self,
+ allow_sampler_image_view_post_submit_creation: bool,
+ ) -> Self {
+ self.inner.allow_sampler_image_view_post_submit_creation =
+ allow_sampler_image_view_post_submit_creation.into();
+ self
+ }
+ #[inline]
+ pub fn descriptor_buffer_offset_alignment(
+ mut self,
+ descriptor_buffer_offset_alignment: DeviceSize,
+ ) -> Self {
+ self.inner.descriptor_buffer_offset_alignment = descriptor_buffer_offset_alignment;
+ self
+ }
+ #[inline]
+ pub fn max_descriptor_buffer_bindings(mut self, max_descriptor_buffer_bindings: u32) -> Self {
+ self.inner.max_descriptor_buffer_bindings = max_descriptor_buffer_bindings;
+ self
+ }
+ #[inline]
+ pub fn max_resource_descriptor_buffer_bindings(
+ mut self,
+ max_resource_descriptor_buffer_bindings: u32,
+ ) -> Self {
+ self.inner.max_resource_descriptor_buffer_bindings =
+ max_resource_descriptor_buffer_bindings;
+ self
+ }
+ #[inline]
+ pub fn max_sampler_descriptor_buffer_bindings(
+ mut self,
+ max_sampler_descriptor_buffer_bindings: u32,
+ ) -> Self {
+ self.inner.max_sampler_descriptor_buffer_bindings = max_sampler_descriptor_buffer_bindings;
+ self
+ }
+ #[inline]
+ pub fn max_embedded_immutable_sampler_bindings(
+ mut self,
+ max_embedded_immutable_sampler_bindings: u32,
+ ) -> Self {
+ self.inner.max_embedded_immutable_sampler_bindings =
+ max_embedded_immutable_sampler_bindings;
+ self
+ }
+ #[inline]
+ pub fn max_embedded_immutable_samplers(mut self, max_embedded_immutable_samplers: u32) -> Self {
+ self.inner.max_embedded_immutable_samplers = max_embedded_immutable_samplers;
+ self
+ }
+ #[inline]
+ pub fn buffer_capture_replay_descriptor_data_size(
+ mut self,
+ buffer_capture_replay_descriptor_data_size: usize,
+ ) -> Self {
+ self.inner.buffer_capture_replay_descriptor_data_size =
+ buffer_capture_replay_descriptor_data_size;
+ self
+ }
+ #[inline]
+ pub fn image_capture_replay_descriptor_data_size(
+ mut self,
+ image_capture_replay_descriptor_data_size: usize,
+ ) -> Self {
+ self.inner.image_capture_replay_descriptor_data_size =
+ image_capture_replay_descriptor_data_size;
+ self
+ }
+ #[inline]
+ pub fn image_view_capture_replay_descriptor_data_size(
+ mut self,
+ image_view_capture_replay_descriptor_data_size: usize,
+ ) -> Self {
+ self.inner.image_view_capture_replay_descriptor_data_size =
+ image_view_capture_replay_descriptor_data_size;
+ self
+ }
+ #[inline]
+ pub fn sampler_capture_replay_descriptor_data_size(
+ mut self,
+ sampler_capture_replay_descriptor_data_size: usize,
+ ) -> Self {
+ self.inner.sampler_capture_replay_descriptor_data_size =
+ sampler_capture_replay_descriptor_data_size;
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure_capture_replay_descriptor_data_size(
+ mut self,
+ acceleration_structure_capture_replay_descriptor_data_size: usize,
+ ) -> Self {
+ self.inner
+ .acceleration_structure_capture_replay_descriptor_data_size =
+ acceleration_structure_capture_replay_descriptor_data_size;
+ self
+ }
+ #[inline]
+ pub fn sampler_descriptor_size(mut self, sampler_descriptor_size: usize) -> Self {
+ self.inner.sampler_descriptor_size = sampler_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn combined_image_sampler_descriptor_size(
+ mut self,
+ combined_image_sampler_descriptor_size: usize,
+ ) -> Self {
+ self.inner.combined_image_sampler_descriptor_size = combined_image_sampler_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn sampled_image_descriptor_size(mut self, sampled_image_descriptor_size: usize) -> Self {
+ self.inner.sampled_image_descriptor_size = sampled_image_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn storage_image_descriptor_size(mut self, storage_image_descriptor_size: usize) -> Self {
+ self.inner.storage_image_descriptor_size = storage_image_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn uniform_texel_buffer_descriptor_size(
+ mut self,
+ uniform_texel_buffer_descriptor_size: usize,
+ ) -> Self {
+ self.inner.uniform_texel_buffer_descriptor_size = uniform_texel_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn robust_uniform_texel_buffer_descriptor_size(
+ mut self,
+ robust_uniform_texel_buffer_descriptor_size: usize,
+ ) -> Self {
+ self.inner.robust_uniform_texel_buffer_descriptor_size =
+ robust_uniform_texel_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn storage_texel_buffer_descriptor_size(
+ mut self,
+ storage_texel_buffer_descriptor_size: usize,
+ ) -> Self {
+ self.inner.storage_texel_buffer_descriptor_size = storage_texel_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn robust_storage_texel_buffer_descriptor_size(
+ mut self,
+ robust_storage_texel_buffer_descriptor_size: usize,
+ ) -> Self {
+ self.inner.robust_storage_texel_buffer_descriptor_size =
+ robust_storage_texel_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn uniform_buffer_descriptor_size(mut self, uniform_buffer_descriptor_size: usize) -> Self {
+ self.inner.uniform_buffer_descriptor_size = uniform_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn robust_uniform_buffer_descriptor_size(
+ mut self,
+ robust_uniform_buffer_descriptor_size: usize,
+ ) -> Self {
+ self.inner.robust_uniform_buffer_descriptor_size = robust_uniform_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn storage_buffer_descriptor_size(mut self, storage_buffer_descriptor_size: usize) -> Self {
+ self.inner.storage_buffer_descriptor_size = storage_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn robust_storage_buffer_descriptor_size(
+ mut self,
+ robust_storage_buffer_descriptor_size: usize,
+ ) -> Self {
+ self.inner.robust_storage_buffer_descriptor_size = robust_storage_buffer_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn input_attachment_descriptor_size(
+ mut self,
+ input_attachment_descriptor_size: usize,
+ ) -> Self {
+ self.inner.input_attachment_descriptor_size = input_attachment_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure_descriptor_size(
+ mut self,
+ acceleration_structure_descriptor_size: usize,
+ ) -> Self {
+ self.inner.acceleration_structure_descriptor_size = acceleration_structure_descriptor_size;
+ self
+ }
+ #[inline]
+ pub fn max_sampler_descriptor_buffer_range(
+ mut self,
+ max_sampler_descriptor_buffer_range: DeviceSize,
+ ) -> Self {
+ self.inner.max_sampler_descriptor_buffer_range = max_sampler_descriptor_buffer_range;
+ self
+ }
+ #[inline]
+ pub fn max_resource_descriptor_buffer_range(
+ mut self,
+ max_resource_descriptor_buffer_range: DeviceSize,
+ ) -> Self {
+ self.inner.max_resource_descriptor_buffer_range = max_resource_descriptor_buffer_range;
+ self
+ }
+ #[inline]
+ pub fn sampler_descriptor_buffer_address_space_size(
+ mut self,
+ sampler_descriptor_buffer_address_space_size: DeviceSize,
+ ) -> Self {
+ self.inner.sampler_descriptor_buffer_address_space_size =
+ sampler_descriptor_buffer_address_space_size;
+ self
+ }
+ #[inline]
+ pub fn resource_descriptor_buffer_address_space_size(
+ mut self,
+ resource_descriptor_buffer_address_space_size: DeviceSize,
+ ) -> Self {
+ self.inner.resource_descriptor_buffer_address_space_size =
+ resource_descriptor_buffer_address_space_size;
+ self
+ }
+ #[inline]
+ pub fn descriptor_buffer_address_space_size(
+ mut self,
+ descriptor_buffer_address_space_size: DeviceSize,
+ ) -> Self {
+ self.inner.descriptor_buffer_address_space_size = descriptor_buffer_address_space_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDescriptorBufferPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT.html>"]
+pub struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub combined_image_sampler_density_map_descriptor_size: usize,
+}
+impl ::std::default::Default for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ combined_image_sampler_density_map_descriptor_size: usize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT;
+}
+impl PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder<'a> {
+ PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDescriptorBufferDensityMapPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn combined_image_sampler_density_map_descriptor_size(
+ mut self,
+ combined_image_sampler_density_map_descriptor_size: usize,
+ ) -> Self {
+ self.inner
+ .combined_image_sampler_density_map_descriptor_size =
+ combined_image_sampler_density_map_descriptor_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorAddressInfoEXT.html>"]
+pub struct DescriptorAddressInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub address: DeviceAddress,
+ pub range: DeviceSize,
+ pub format: Format,
+}
+impl ::std::default::Default for DescriptorAddressInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ address: DeviceAddress::default(),
+ range: DeviceSize::default(),
+ format: Format::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorAddressInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_ADDRESS_INFO_EXT;
+}
+impl DescriptorAddressInfoEXT {
+ pub fn builder<'a>() -> DescriptorAddressInfoEXTBuilder<'a> {
+ DescriptorAddressInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorAddressInfoEXTBuilder<'a> {
+ inner: DescriptorAddressInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorAddressInfoEXTBuilder<'a> {
+ type Target = DescriptorAddressInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorAddressInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorAddressInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn address(mut self, address: DeviceAddress) -> Self {
+ self.inner.address = address;
+ self
+ }
+ #[inline]
+ pub fn range(mut self, range: DeviceSize) -> Self {
+ self.inner.range = range;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorAddressInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorBufferBindingInfoEXT.html>"]
+pub struct DescriptorBufferBindingInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub address: DeviceAddress,
+ pub usage: BufferUsageFlags,
+}
+impl ::std::default::Default for DescriptorBufferBindingInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ address: DeviceAddress::default(),
+ usage: BufferUsageFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorBufferBindingInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_BUFFER_BINDING_INFO_EXT;
+}
+impl DescriptorBufferBindingInfoEXT {
+ pub fn builder<'a>() -> DescriptorBufferBindingInfoEXTBuilder<'a> {
+ DescriptorBufferBindingInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorBufferBindingInfoEXTBuilder<'a> {
+ inner: DescriptorBufferBindingInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsDescriptorBufferBindingInfoEXT {}
+impl<'a> ::std::ops::Deref for DescriptorBufferBindingInfoEXTBuilder<'a> {
+ type Target = DescriptorBufferBindingInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorBufferBindingInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorBufferBindingInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn address(mut self, address: DeviceAddress) -> Self {
+ self.inner.address = address;
+ self
+ }
+ #[inline]
+ pub fn usage(mut self, usage: BufferUsageFlags) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsDescriptorBufferBindingInfoEXT>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorBufferBindingInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorBufferBindingPushDescriptorBufferHandleEXT.html>"]
+pub struct DescriptorBufferBindingPushDescriptorBufferHandleEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub buffer: Buffer,
+}
+impl ::std::default::Default for DescriptorBufferBindingPushDescriptorBufferHandleEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ buffer: Buffer::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorBufferBindingPushDescriptorBufferHandleEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT;
+}
+impl DescriptorBufferBindingPushDescriptorBufferHandleEXT {
+ pub fn builder<'a>() -> DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder<'a> {
+ DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder<'a> {
+ inner: DescriptorBufferBindingPushDescriptorBufferHandleEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDescriptorBufferBindingInfoEXT
+ for DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDescriptorBufferBindingInfoEXT
+ for DescriptorBufferBindingPushDescriptorBufferHandleEXT
+{
+}
+impl<'a> ::std::ops::Deref for DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder<'a> {
+ type Target = DescriptorBufferBindingPushDescriptorBufferHandleEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorBufferBindingPushDescriptorBufferHandleEXTBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorBufferBindingPushDescriptorBufferHandleEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorDataEXT.html>"]
+pub union DescriptorDataEXT {
+ pub p_sampler: *const Sampler,
+ pub p_combined_image_sampler: *const DescriptorImageInfo,
+ pub p_input_attachment_image: *const DescriptorImageInfo,
+ pub p_sampled_image: *const DescriptorImageInfo,
+ pub p_storage_image: *const DescriptorImageInfo,
+ pub p_uniform_texel_buffer: *const DescriptorAddressInfoEXT,
+ pub p_storage_texel_buffer: *const DescriptorAddressInfoEXT,
+ pub p_uniform_buffer: *const DescriptorAddressInfoEXT,
+ pub p_storage_buffer: *const DescriptorAddressInfoEXT,
+ pub acceleration_structure: DeviceAddress,
+}
+impl ::std::default::Default for DescriptorDataEXT {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorGetInfoEXT.html>"]
+pub struct DescriptorGetInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: DescriptorType,
+ pub data: DescriptorDataEXT,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DescriptorGetInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DescriptorGetInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("ty", &self.ty)
+ .field("data", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for DescriptorGetInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: DescriptorType::default(),
+ data: DescriptorDataEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorGetInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_GET_INFO_EXT;
+}
+impl DescriptorGetInfoEXT {
+ pub fn builder<'a>() -> DescriptorGetInfoEXTBuilder<'a> {
+ DescriptorGetInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorGetInfoEXTBuilder<'a> {
+ inner: DescriptorGetInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorGetInfoEXTBuilder<'a> {
+ type Target = DescriptorGetInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorGetInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorGetInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: DescriptorType) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: DescriptorDataEXT) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorGetInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCaptureDescriptorDataInfoEXT.html>"]
+pub struct BufferCaptureDescriptorDataInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub buffer: Buffer,
+}
+impl ::std::default::Default for BufferCaptureDescriptorDataInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ buffer: Buffer::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCaptureDescriptorDataInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT;
+}
+impl BufferCaptureDescriptorDataInfoEXT {
+ pub fn builder<'a>() -> BufferCaptureDescriptorDataInfoEXTBuilder<'a> {
+ BufferCaptureDescriptorDataInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCaptureDescriptorDataInfoEXTBuilder<'a> {
+ inner: BufferCaptureDescriptorDataInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferCaptureDescriptorDataInfoEXTBuilder<'a> {
+ type Target = BufferCaptureDescriptorDataInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCaptureDescriptorDataInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCaptureDescriptorDataInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCaptureDescriptorDataInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCaptureDescriptorDataInfoEXT.html>"]
+pub struct ImageCaptureDescriptorDataInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+}
+impl ::std::default::Default for ImageCaptureDescriptorDataInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageCaptureDescriptorDataInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT;
+}
+impl ImageCaptureDescriptorDataInfoEXT {
+ pub fn builder<'a>() -> ImageCaptureDescriptorDataInfoEXTBuilder<'a> {
+ ImageCaptureDescriptorDataInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageCaptureDescriptorDataInfoEXTBuilder<'a> {
+ inner: ImageCaptureDescriptorDataInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageCaptureDescriptorDataInfoEXTBuilder<'a> {
+ type Target = ImageCaptureDescriptorDataInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageCaptureDescriptorDataInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageCaptureDescriptorDataInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageCaptureDescriptorDataInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewCaptureDescriptorDataInfoEXT.html>"]
+pub struct ImageViewCaptureDescriptorDataInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_view: ImageView,
+}
+impl ::std::default::Default for ImageViewCaptureDescriptorDataInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_view: ImageView::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewCaptureDescriptorDataInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT;
+}
+impl ImageViewCaptureDescriptorDataInfoEXT {
+ pub fn builder<'a>() -> ImageViewCaptureDescriptorDataInfoEXTBuilder<'a> {
+ ImageViewCaptureDescriptorDataInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewCaptureDescriptorDataInfoEXTBuilder<'a> {
+ inner: ImageViewCaptureDescriptorDataInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageViewCaptureDescriptorDataInfoEXTBuilder<'a> {
+ type Target = ImageViewCaptureDescriptorDataInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewCaptureDescriptorDataInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewCaptureDescriptorDataInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewCaptureDescriptorDataInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerCaptureDescriptorDataInfoEXT.html>"]
+pub struct SamplerCaptureDescriptorDataInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub sampler: Sampler,
+}
+impl ::std::default::Default for SamplerCaptureDescriptorDataInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ sampler: Sampler::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SamplerCaptureDescriptorDataInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT;
+}
+impl SamplerCaptureDescriptorDataInfoEXT {
+ pub fn builder<'a>() -> SamplerCaptureDescriptorDataInfoEXTBuilder<'a> {
+ SamplerCaptureDescriptorDataInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SamplerCaptureDescriptorDataInfoEXTBuilder<'a> {
+ inner: SamplerCaptureDescriptorDataInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SamplerCaptureDescriptorDataInfoEXTBuilder<'a> {
+ type Target = SamplerCaptureDescriptorDataInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SamplerCaptureDescriptorDataInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SamplerCaptureDescriptorDataInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn sampler(mut self, sampler: Sampler) -> Self {
+ self.inner.sampler = sampler;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SamplerCaptureDescriptorDataInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureCaptureDescriptorDataInfoEXT.html>"]
+pub struct AccelerationStructureCaptureDescriptorDataInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub acceleration_structure: AccelerationStructureKHR,
+ pub acceleration_structure_nv: AccelerationStructureNV,
+}
+impl ::std::default::Default for AccelerationStructureCaptureDescriptorDataInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ acceleration_structure: AccelerationStructureKHR::default(),
+ acceleration_structure_nv: AccelerationStructureNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureCaptureDescriptorDataInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT;
+}
+impl AccelerationStructureCaptureDescriptorDataInfoEXT {
+ pub fn builder<'a>() -> AccelerationStructureCaptureDescriptorDataInfoEXTBuilder<'a> {
+ AccelerationStructureCaptureDescriptorDataInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureCaptureDescriptorDataInfoEXTBuilder<'a> {
+ inner: AccelerationStructureCaptureDescriptorDataInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureCaptureDescriptorDataInfoEXTBuilder<'a> {
+ type Target = AccelerationStructureCaptureDescriptorDataInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureCaptureDescriptorDataInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureCaptureDescriptorDataInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn acceleration_structure(
+ mut self,
+ acceleration_structure: AccelerationStructureKHR,
+ ) -> Self {
+ self.inner.acceleration_structure = acceleration_structure;
+ self
+ }
+ #[inline]
+ pub fn acceleration_structure_nv(
+ mut self,
+ acceleration_structure_nv: AccelerationStructureNV,
+ ) -> Self {
+ self.inner.acceleration_structure_nv = acceleration_structure_nv;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureCaptureDescriptorDataInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpaqueCaptureDescriptorDataCreateInfoEXT.html>"]
+pub struct OpaqueCaptureDescriptorDataCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub opaque_capture_descriptor_data: *const c_void,
+}
+impl ::std::default::Default for OpaqueCaptureDescriptorDataCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ opaque_capture_descriptor_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for OpaqueCaptureDescriptorDataCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT;
+}
+impl OpaqueCaptureDescriptorDataCreateInfoEXT {
+ pub fn builder<'a>() -> OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'a> {
+ OpaqueCaptureDescriptorDataCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'a> {
+ inner: OpaqueCaptureDescriptorDataCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT {}
+unsafe impl ExtendsImageCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT {}
+unsafe impl ExtendsImageViewCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT {}
+unsafe impl ExtendsSamplerCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSamplerCreateInfo for OpaqueCaptureDescriptorDataCreateInfoEXT {}
+unsafe impl ExtendsAccelerationStructureCreateInfoKHR
+ for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsAccelerationStructureCreateInfoKHR for OpaqueCaptureDescriptorDataCreateInfoEXT {}
+unsafe impl ExtendsAccelerationStructureCreateInfoNV
+ for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsAccelerationStructureCreateInfoNV for OpaqueCaptureDescriptorDataCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'a> {
+ type Target = OpaqueCaptureDescriptorDataCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> OpaqueCaptureDescriptorDataCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn opaque_capture_descriptor_data(
+ mut self,
+ opaque_capture_descriptor_data: *const c_void,
+ ) -> Self {
+ self.inner.opaque_capture_descriptor_data = opaque_capture_descriptor_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> OpaqueCaptureDescriptorDataCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderIntegerDotProductFeatures.html>"]
+pub struct PhysicalDeviceShaderIntegerDotProductFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_integer_dot_product: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderIntegerDotProductFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_integer_dot_product: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderIntegerDotProductFeatures {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES;
+}
+impl PhysicalDeviceShaderIntegerDotProductFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'a> {
+ PhysicalDeviceShaderIntegerDotProductFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'a> {
+ inner: PhysicalDeviceShaderIntegerDotProductFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderIntegerDotProductFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderIntegerDotProductFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceShaderIntegerDotProductFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderIntegerDotProductFeaturesBuilder<'a> {
+ #[inline]
+ pub fn shader_integer_dot_product(mut self, shader_integer_dot_product: bool) -> Self {
+ self.inner.shader_integer_dot_product = shader_integer_dot_product.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderIntegerDotProductFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderIntegerDotProductProperties.html>"]
+pub struct PhysicalDeviceShaderIntegerDotProductProperties {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub integer_dot_product8_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product8_bit_signed_accelerated: Bool32,
+ pub integer_dot_product8_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product4x8_bit_packed_unsigned_accelerated: Bool32,
+ pub integer_dot_product4x8_bit_packed_signed_accelerated: Bool32,
+ pub integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product16_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product16_bit_signed_accelerated: Bool32,
+ pub integer_dot_product16_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product32_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product32_bit_signed_accelerated: Bool32,
+ pub integer_dot_product32_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product64_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product64_bit_signed_accelerated: Bool32,
+ pub integer_dot_product64_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating8_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated:
+ Bool32,
+ pub integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating16_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating32_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating64_bit_signed_accelerated: Bool32,
+ pub integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderIntegerDotProductProperties {
+ #[inline]
+ fn default() -> Self {
+ Self { s_type : Self :: STRUCTURE_TYPE , p_next : :: std :: ptr :: null_mut () , integer_dot_product8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product64_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_signed_accelerated : Bool32 :: default () , integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated : Bool32 :: default () }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderIntegerDotProductProperties {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES;
+}
+impl PhysicalDeviceShaderIntegerDotProductProperties {
+ pub fn builder<'a>() -> PhysicalDeviceShaderIntegerDotProductPropertiesBuilder<'a> {
+ PhysicalDeviceShaderIntegerDotProductPropertiesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderIntegerDotProductPropertiesBuilder<'a> {
+ inner: PhysicalDeviceShaderIntegerDotProductProperties,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceShaderIntegerDotProductPropertiesBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderIntegerDotProductProperties {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderIntegerDotProductPropertiesBuilder<'a> {
+ type Target = PhysicalDeviceShaderIntegerDotProductProperties;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderIntegerDotProductPropertiesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderIntegerDotProductPropertiesBuilder<'a> {
+ #[inline]
+ pub fn integer_dot_product8_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product8_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product8_bit_unsigned_accelerated =
+ integer_dot_product8_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product8_bit_signed_accelerated(
+ mut self,
+ integer_dot_product8_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product8_bit_signed_accelerated =
+ integer_dot_product8_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product8_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product8_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product8_bit_mixed_signedness_accelerated =
+ integer_dot_product8_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product4x8_bit_packed_unsigned_accelerated(
+ mut self,
+ integer_dot_product4x8_bit_packed_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product4x8_bit_packed_unsigned_accelerated =
+ integer_dot_product4x8_bit_packed_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product4x8_bit_packed_signed_accelerated(
+ mut self,
+ integer_dot_product4x8_bit_packed_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product4x8_bit_packed_signed_accelerated =
+ integer_dot_product4x8_bit_packed_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product4x8_bit_packed_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product4x8_bit_packed_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product4x8_bit_packed_mixed_signedness_accelerated =
+ integer_dot_product4x8_bit_packed_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product16_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product16_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product16_bit_unsigned_accelerated =
+ integer_dot_product16_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product16_bit_signed_accelerated(
+ mut self,
+ integer_dot_product16_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product16_bit_signed_accelerated =
+ integer_dot_product16_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product16_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product16_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product16_bit_mixed_signedness_accelerated =
+ integer_dot_product16_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product32_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product32_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product32_bit_unsigned_accelerated =
+ integer_dot_product32_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product32_bit_signed_accelerated(
+ mut self,
+ integer_dot_product32_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product32_bit_signed_accelerated =
+ integer_dot_product32_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product32_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product32_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product32_bit_mixed_signedness_accelerated =
+ integer_dot_product32_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product64_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product64_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product64_bit_unsigned_accelerated =
+ integer_dot_product64_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product64_bit_signed_accelerated(
+ mut self,
+ integer_dot_product64_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner.integer_dot_product64_bit_signed_accelerated =
+ integer_dot_product64_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product64_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product64_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product64_bit_mixed_signedness_accelerated =
+ integer_dot_product64_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating8_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating8_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating8_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating8_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating8_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating8_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating4x8_bit_packed_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated =
+ integer_dot_product_accumulating_saturating4x8_bit_packed_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated : bool,
+ ) -> Self {
+ self . inner . integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated = integer_dot_product_accumulating_saturating4x8_bit_packed_mixed_signedness_accelerated . into () ;
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating16_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating16_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating16_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating16_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating16_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating16_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating32_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating32_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating32_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating32_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating32_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating32_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated =
+ integer_dot_product_accumulating_saturating64_bit_unsigned_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating64_bit_signed_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating64_bit_signed_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating64_bit_signed_accelerated =
+ integer_dot_product_accumulating_saturating64_bit_signed_accelerated.into();
+ self
+ }
+ #[inline]
+ pub fn integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated(
+ mut self,
+ integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated: bool,
+ ) -> Self {
+ self.inner
+ .integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated =
+ integer_dot_product_accumulating_saturating64_bit_mixed_signedness_accelerated.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderIntegerDotProductProperties {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDrmPropertiesEXT.html>"]
+pub struct PhysicalDeviceDrmPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub has_primary: Bool32,
+ pub has_render: Bool32,
+ pub primary_major: i64,
+ pub primary_minor: i64,
+ pub render_major: i64,
+ pub render_minor: i64,
+}
+impl ::std::default::Default for PhysicalDeviceDrmPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ has_primary: Bool32::default(),
+ has_render: Bool32::default(),
+ primary_major: i64::default(),
+ primary_minor: i64::default(),
+ render_major: i64::default(),
+ render_minor: i64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDrmPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DRM_PROPERTIES_EXT;
+}
+impl PhysicalDeviceDrmPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDrmPropertiesEXTBuilder<'a> {
+ PhysicalDeviceDrmPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDrmPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceDrmPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDrmPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceDrmPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDrmPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDrmPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDrmPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDrmPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn has_primary(mut self, has_primary: bool) -> Self {
+ self.inner.has_primary = has_primary.into();
+ self
+ }
+ #[inline]
+ pub fn has_render(mut self, has_render: bool) -> Self {
+ self.inner.has_render = has_render.into();
+ self
+ }
+ #[inline]
+ pub fn primary_major(mut self, primary_major: i64) -> Self {
+ self.inner.primary_major = primary_major;
+ self
+ }
+ #[inline]
+ pub fn primary_minor(mut self, primary_minor: i64) -> Self {
+ self.inner.primary_minor = primary_minor;
+ self
+ }
+ #[inline]
+ pub fn render_major(mut self, render_major: i64) -> Self {
+ self.inner.render_major = render_major;
+ self
+ }
+ #[inline]
+ pub fn render_minor(mut self, render_minor: i64) -> Self {
+ self.inner.render_minor = render_minor;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDrmPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR.html>"]
+pub struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub fragment_shader_barycentric: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ fragment_shader_barycentric: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR;
+}
+impl PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'a> {
+ PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'a> {
+ inner: PhysicalDeviceFragmentShaderBarycentricFeaturesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShaderBarycentricFeaturesKHRBuilder<'a> {
+ #[inline]
+ pub fn fragment_shader_barycentric(mut self, fragment_shader_barycentric: bool) -> Self {
+ self.inner.fragment_shader_barycentric = fragment_shader_barycentric.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR.html>"]
+pub struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub tri_strip_vertex_order_independent_of_provoking_vertex: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ tri_strip_vertex_order_independent_of_provoking_vertex: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR;
+}
+impl PhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+ pub fn builder<'a>() -> PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder<'a> {
+ PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder<'a> {
+ inner: PhysicalDeviceFragmentShaderBarycentricPropertiesKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceFragmentShaderBarycentricPropertiesKHR
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder<'a> {
+ type Target = PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFragmentShaderBarycentricPropertiesKHRBuilder<'a> {
+ #[inline]
+ pub fn tri_strip_vertex_order_independent_of_provoking_vertex(
+ mut self,
+ tri_strip_vertex_order_independent_of_provoking_vertex: bool,
+ ) -> Self {
+ self.inner
+ .tri_strip_vertex_order_independent_of_provoking_vertex =
+ tri_strip_vertex_order_independent_of_provoking_vertex.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingMotionBlurFeaturesNV.html>"]
+pub struct PhysicalDeviceRayTracingMotionBlurFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ray_tracing_motion_blur: Bool32,
+ pub ray_tracing_motion_blur_pipeline_trace_rays_indirect: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingMotionBlurFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ray_tracing_motion_blur: Bool32::default(),
+ ray_tracing_motion_blur_pipeline_trace_rays_indirect: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingMotionBlurFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV;
+}
+impl PhysicalDeviceRayTracingMotionBlurFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'a> {
+ PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceRayTracingMotionBlurFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingMotionBlurFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingMotionBlurFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingMotionBlurFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingMotionBlurFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn ray_tracing_motion_blur(mut self, ray_tracing_motion_blur: bool) -> Self {
+ self.inner.ray_tracing_motion_blur = ray_tracing_motion_blur.into();
+ self
+ }
+ #[inline]
+ pub fn ray_tracing_motion_blur_pipeline_trace_rays_indirect(
+ mut self,
+ ray_tracing_motion_blur_pipeline_trace_rays_indirect: bool,
+ ) -> Self {
+ self.inner
+ .ray_tracing_motion_blur_pipeline_trace_rays_indirect =
+ ray_tracing_motion_blur_pipeline_trace_rays_indirect.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingMotionBlurFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureGeometryMotionTrianglesDataNV.html>"]
+pub struct AccelerationStructureGeometryMotionTrianglesDataNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub vertex_data: DeviceOrHostAddressConstKHR,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureGeometryMotionTrianglesDataNV {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureGeometryMotionTrianglesDataNV")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("vertex_data", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureGeometryMotionTrianglesDataNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ vertex_data: DeviceOrHostAddressConstKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureGeometryMotionTrianglesDataNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV;
+}
+impl AccelerationStructureGeometryMotionTrianglesDataNV {
+ pub fn builder<'a>() -> AccelerationStructureGeometryMotionTrianglesDataNVBuilder<'a> {
+ AccelerationStructureGeometryMotionTrianglesDataNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureGeometryMotionTrianglesDataNVBuilder<'a> {
+ inner: AccelerationStructureGeometryMotionTrianglesDataNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR
+ for AccelerationStructureGeometryMotionTrianglesDataNVBuilder<'_>
+{
+}
+unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR
+ for AccelerationStructureGeometryMotionTrianglesDataNV
+{
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureGeometryMotionTrianglesDataNVBuilder<'a> {
+ type Target = AccelerationStructureGeometryMotionTrianglesDataNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureGeometryMotionTrianglesDataNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureGeometryMotionTrianglesDataNVBuilder<'a> {
+ #[inline]
+ pub fn vertex_data(mut self, vertex_data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.vertex_data = vertex_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureGeometryMotionTrianglesDataNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMotionInfoNV.html>"]
+pub struct AccelerationStructureMotionInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub max_instances: u32,
+ pub flags: AccelerationStructureMotionInfoFlagsNV,
+}
+impl ::std::default::Default for AccelerationStructureMotionInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ max_instances: u32::default(),
+ flags: AccelerationStructureMotionInfoFlagsNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureMotionInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::ACCELERATION_STRUCTURE_MOTION_INFO_NV;
+}
+impl AccelerationStructureMotionInfoNV {
+ pub fn builder<'a>() -> AccelerationStructureMotionInfoNVBuilder<'a> {
+ AccelerationStructureMotionInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureMotionInfoNVBuilder<'a> {
+ inner: AccelerationStructureMotionInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAccelerationStructureCreateInfoKHR
+ for AccelerationStructureMotionInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsAccelerationStructureCreateInfoKHR for AccelerationStructureMotionInfoNV {}
+impl<'a> ::std::ops::Deref for AccelerationStructureMotionInfoNVBuilder<'a> {
+ type Target = AccelerationStructureMotionInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureMotionInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureMotionInfoNVBuilder<'a> {
+ #[inline]
+ pub fn max_instances(mut self, max_instances: u32) -> Self {
+ self.inner.max_instances = max_instances;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: AccelerationStructureMotionInfoFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureMotionInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSRTDataNV.html>"]
+pub struct SRTDataNV {
+ pub sx: f32,
+ pub a: f32,
+ pub b: f32,
+ pub pvx: f32,
+ pub sy: f32,
+ pub c: f32,
+ pub pvy: f32,
+ pub sz: f32,
+ pub pvz: f32,
+ pub qx: f32,
+ pub qy: f32,
+ pub qz: f32,
+ pub qw: f32,
+ pub tx: f32,
+ pub ty: f32,
+ pub tz: f32,
+}
+impl SRTDataNV {
+ pub fn builder<'a>() -> SRTDataNVBuilder<'a> {
+ SRTDataNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SRTDataNVBuilder<'a> {
+ inner: SRTDataNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SRTDataNVBuilder<'a> {
+ type Target = SRTDataNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SRTDataNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SRTDataNVBuilder<'a> {
+ #[inline]
+ pub fn sx(mut self, sx: f32) -> Self {
+ self.inner.sx = sx;
+ self
+ }
+ #[inline]
+ pub fn a(mut self, a: f32) -> Self {
+ self.inner.a = a;
+ self
+ }
+ #[inline]
+ pub fn b(mut self, b: f32) -> Self {
+ self.inner.b = b;
+ self
+ }
+ #[inline]
+ pub fn pvx(mut self, pvx: f32) -> Self {
+ self.inner.pvx = pvx;
+ self
+ }
+ #[inline]
+ pub fn sy(mut self, sy: f32) -> Self {
+ self.inner.sy = sy;
+ self
+ }
+ #[inline]
+ pub fn c(mut self, c: f32) -> Self {
+ self.inner.c = c;
+ self
+ }
+ #[inline]
+ pub fn pvy(mut self, pvy: f32) -> Self {
+ self.inner.pvy = pvy;
+ self
+ }
+ #[inline]
+ pub fn sz(mut self, sz: f32) -> Self {
+ self.inner.sz = sz;
+ self
+ }
+ #[inline]
+ pub fn pvz(mut self, pvz: f32) -> Self {
+ self.inner.pvz = pvz;
+ self
+ }
+ #[inline]
+ pub fn qx(mut self, qx: f32) -> Self {
+ self.inner.qx = qx;
+ self
+ }
+ #[inline]
+ pub fn qy(mut self, qy: f32) -> Self {
+ self.inner.qy = qy;
+ self
+ }
+ #[inline]
+ pub fn qz(mut self, qz: f32) -> Self {
+ self.inner.qz = qz;
+ self
+ }
+ #[inline]
+ pub fn qw(mut self, qw: f32) -> Self {
+ self.inner.qw = qw;
+ self
+ }
+ #[inline]
+ pub fn tx(mut self, tx: f32) -> Self {
+ self.inner.tx = tx;
+ self
+ }
+ #[inline]
+ pub fn ty(mut self, ty: f32) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn tz(mut self, tz: f32) -> Self {
+ self.inner.tz = tz;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SRTDataNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureSRTMotionInstanceNV.html>"]
+pub struct AccelerationStructureSRTMotionInstanceNV {
+ pub transform_t0: SRTDataNV,
+ pub transform_t1: SRTDataNV,
+ #[doc = r" Use [`Packed24_8::new(instance_custom_index, mask)`][Packed24_8::new()] to construct this field"]
+ pub instance_custom_index_and_mask: Packed24_8,
+ #[doc = r" Use [`Packed24_8::new(instance_shader_binding_table_record_offset, flags)`][Packed24_8::new()] to construct this field"]
+ pub instance_shader_binding_table_record_offset_and_flags: Packed24_8,
+ pub acceleration_structure_reference: AccelerationStructureReferenceKHR,
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/AccelerationStructureMatrixMotionInstanceNV.html>"]
+pub struct AccelerationStructureMatrixMotionInstanceNV {
+ pub transform_t0: TransformMatrixKHR,
+ pub transform_t1: TransformMatrixKHR,
+ #[doc = r" Use [`Packed24_8::new(instance_custom_index, mask)`][Packed24_8::new()] to construct this field"]
+ pub instance_custom_index_and_mask: Packed24_8,
+ #[doc = r" Use [`Packed24_8::new(instance_shader_binding_table_record_offset, flags)`][Packed24_8::new()] to construct this field"]
+ pub instance_shader_binding_table_record_offset_and_flags: Packed24_8,
+ pub acceleration_structure_reference: AccelerationStructureReferenceKHR,
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMotionInstanceDataNV.html>"]
+pub union AccelerationStructureMotionInstanceDataNV {
+ pub static_instance: AccelerationStructureInstanceKHR,
+ pub matrix_motion_instance: AccelerationStructureMatrixMotionInstanceNV,
+ pub srt_motion_instance: AccelerationStructureSRTMotionInstanceNV,
+}
+impl ::std::default::Default for AccelerationStructureMotionInstanceDataNV {
+ #[inline]
+ fn default() -> Self {
+ unsafe { ::std::mem::zeroed() }
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMotionInstanceNV.html>"]
+pub struct AccelerationStructureMotionInstanceNV {
+ pub ty: AccelerationStructureMotionInstanceTypeNV,
+ pub flags: AccelerationStructureMotionInstanceFlagsNV,
+ pub data: AccelerationStructureMotionInstanceDataNV,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureMotionInstanceNV {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureMotionInstanceNV")
+ .field("ty", &self.ty)
+ .field("flags", &self.flags)
+ .field("data", &"union")
+ .finish()
+ }
+}
+impl AccelerationStructureMotionInstanceNV {
+ pub fn builder<'a>() -> AccelerationStructureMotionInstanceNVBuilder<'a> {
+ AccelerationStructureMotionInstanceNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureMotionInstanceNVBuilder<'a> {
+ inner: AccelerationStructureMotionInstanceNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureMotionInstanceNVBuilder<'a> {
+ type Target = AccelerationStructureMotionInstanceNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureMotionInstanceNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureMotionInstanceNVBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: AccelerationStructureMotionInstanceTypeNV) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: AccelerationStructureMotionInstanceFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: AccelerationStructureMotionInstanceDataNV) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureMotionInstanceNV {
+ self.inner
+ }
+}
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRemoteAddressNV.html>"]
+pub type RemoteAddressNV = c_void;
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryGetRemoteAddressInfoNV.html>"]
+pub struct MemoryGetRemoteAddressInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+ pub handle_type: ExternalMemoryHandleTypeFlags,
+}
+impl ::std::default::Default for MemoryGetRemoteAddressInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ handle_type: ExternalMemoryHandleTypeFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MemoryGetRemoteAddressInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::MEMORY_GET_REMOTE_ADDRESS_INFO_NV;
+}
+impl MemoryGetRemoteAddressInfoNV {
+ pub fn builder<'a>() -> MemoryGetRemoteAddressInfoNVBuilder<'a> {
+ MemoryGetRemoteAddressInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MemoryGetRemoteAddressInfoNVBuilder<'a> {
+ inner: MemoryGetRemoteAddressInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MemoryGetRemoteAddressInfoNVBuilder<'a> {
+ type Target = MemoryGetRemoteAddressInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MemoryGetRemoteAddressInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MemoryGetRemoteAddressInfoNVBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn handle_type(mut self, handle_type: ExternalMemoryHandleTypeFlags) -> Self {
+ self.inner.handle_type = handle_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MemoryGetRemoteAddressInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMemoryBufferCollectionFUCHSIA.html>"]
+pub struct ImportMemoryBufferCollectionFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub collection: BufferCollectionFUCHSIA,
+ pub index: u32,
+}
+impl ::std::default::Default for ImportMemoryBufferCollectionFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ collection: BufferCollectionFUCHSIA::default(),
+ index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMemoryBufferCollectionFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA;
+}
+impl ImportMemoryBufferCollectionFUCHSIA {
+ pub fn builder<'a>() -> ImportMemoryBufferCollectionFUCHSIABuilder<'a> {
+ ImportMemoryBufferCollectionFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMemoryBufferCollectionFUCHSIABuilder<'a> {
+ inner: ImportMemoryBufferCollectionFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryBufferCollectionFUCHSIABuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMemoryBufferCollectionFUCHSIA {}
+impl<'a> ::std::ops::Deref for ImportMemoryBufferCollectionFUCHSIABuilder<'a> {
+ type Target = ImportMemoryBufferCollectionFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMemoryBufferCollectionFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMemoryBufferCollectionFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn collection(mut self, collection: BufferCollectionFUCHSIA) -> Self {
+ self.inner.collection = collection;
+ self
+ }
+ #[inline]
+ pub fn index(mut self, index: u32) -> Self {
+ self.inner.index = index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMemoryBufferCollectionFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCollectionImageCreateInfoFUCHSIA.html>"]
+pub struct BufferCollectionImageCreateInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub collection: BufferCollectionFUCHSIA,
+ pub index: u32,
+}
+impl ::std::default::Default for BufferCollectionImageCreateInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ collection: BufferCollectionFUCHSIA::default(),
+ index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCollectionImageCreateInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA;
+}
+impl BufferCollectionImageCreateInfoFUCHSIA {
+ pub fn builder<'a>() -> BufferCollectionImageCreateInfoFUCHSIABuilder<'a> {
+ BufferCollectionImageCreateInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCollectionImageCreateInfoFUCHSIABuilder<'a> {
+ inner: BufferCollectionImageCreateInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for BufferCollectionImageCreateInfoFUCHSIABuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for BufferCollectionImageCreateInfoFUCHSIA {}
+impl<'a> ::std::ops::Deref for BufferCollectionImageCreateInfoFUCHSIABuilder<'a> {
+ type Target = BufferCollectionImageCreateInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCollectionImageCreateInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCollectionImageCreateInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn collection(mut self, collection: BufferCollectionFUCHSIA) -> Self {
+ self.inner.collection = collection;
+ self
+ }
+ #[inline]
+ pub fn index(mut self, index: u32) -> Self {
+ self.inner.index = index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCollectionImageCreateInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCollectionBufferCreateInfoFUCHSIA.html>"]
+pub struct BufferCollectionBufferCreateInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub collection: BufferCollectionFUCHSIA,
+ pub index: u32,
+}
+impl ::std::default::Default for BufferCollectionBufferCreateInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ collection: BufferCollectionFUCHSIA::default(),
+ index: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCollectionBufferCreateInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA;
+}
+impl BufferCollectionBufferCreateInfoFUCHSIA {
+ pub fn builder<'a>() -> BufferCollectionBufferCreateInfoFUCHSIABuilder<'a> {
+ BufferCollectionBufferCreateInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCollectionBufferCreateInfoFUCHSIABuilder<'a> {
+ inner: BufferCollectionBufferCreateInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsBufferCreateInfo for BufferCollectionBufferCreateInfoFUCHSIABuilder<'_> {}
+unsafe impl ExtendsBufferCreateInfo for BufferCollectionBufferCreateInfoFUCHSIA {}
+impl<'a> ::std::ops::Deref for BufferCollectionBufferCreateInfoFUCHSIABuilder<'a> {
+ type Target = BufferCollectionBufferCreateInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCollectionBufferCreateInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCollectionBufferCreateInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn collection(mut self, collection: BufferCollectionFUCHSIA) -> Self {
+ self.inner.collection = collection;
+ self
+ }
+ #[inline]
+ pub fn index(mut self, index: u32) -> Self {
+ self.inner.index = index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCollectionBufferCreateInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCollectionCreateInfoFUCHSIA.html>"]
+pub struct BufferCollectionCreateInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub collection_token: zx_handle_t,
+}
+impl ::std::default::Default for BufferCollectionCreateInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ collection_token: zx_handle_t::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCollectionCreateInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_CREATE_INFO_FUCHSIA;
+}
+impl BufferCollectionCreateInfoFUCHSIA {
+ pub fn builder<'a>() -> BufferCollectionCreateInfoFUCHSIABuilder<'a> {
+ BufferCollectionCreateInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCollectionCreateInfoFUCHSIABuilder<'a> {
+ inner: BufferCollectionCreateInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferCollectionCreateInfoFUCHSIABuilder<'a> {
+ type Target = BufferCollectionCreateInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCollectionCreateInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCollectionCreateInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn collection_token(mut self, collection_token: zx_handle_t) -> Self {
+ self.inner.collection_token = collection_token;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCollectionCreateInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCollectionPropertiesFUCHSIA.html>"]
+pub struct BufferCollectionPropertiesFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub memory_type_bits: u32,
+ pub buffer_count: u32,
+ pub create_info_index: u32,
+ pub sysmem_pixel_format: u64,
+ pub format_features: FormatFeatureFlags,
+ pub sysmem_color_space_index: SysmemColorSpaceFUCHSIA,
+ pub sampler_ycbcr_conversion_components: ComponentMapping,
+ pub suggested_ycbcr_model: SamplerYcbcrModelConversion,
+ pub suggested_ycbcr_range: SamplerYcbcrRange,
+ pub suggested_x_chroma_offset: ChromaLocation,
+ pub suggested_y_chroma_offset: ChromaLocation,
+}
+impl ::std::default::Default for BufferCollectionPropertiesFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ memory_type_bits: u32::default(),
+ buffer_count: u32::default(),
+ create_info_index: u32::default(),
+ sysmem_pixel_format: u64::default(),
+ format_features: FormatFeatureFlags::default(),
+ sysmem_color_space_index: SysmemColorSpaceFUCHSIA::default(),
+ sampler_ycbcr_conversion_components: ComponentMapping::default(),
+ suggested_ycbcr_model: SamplerYcbcrModelConversion::default(),
+ suggested_ycbcr_range: SamplerYcbcrRange::default(),
+ suggested_x_chroma_offset: ChromaLocation::default(),
+ suggested_y_chroma_offset: ChromaLocation::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCollectionPropertiesFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_PROPERTIES_FUCHSIA;
+}
+impl BufferCollectionPropertiesFUCHSIA {
+ pub fn builder<'a>() -> BufferCollectionPropertiesFUCHSIABuilder<'a> {
+ BufferCollectionPropertiesFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCollectionPropertiesFUCHSIABuilder<'a> {
+ inner: BufferCollectionPropertiesFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferCollectionPropertiesFUCHSIABuilder<'a> {
+ type Target = BufferCollectionPropertiesFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCollectionPropertiesFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCollectionPropertiesFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn memory_type_bits(mut self, memory_type_bits: u32) -> Self {
+ self.inner.memory_type_bits = memory_type_bits;
+ self
+ }
+ #[inline]
+ pub fn buffer_count(mut self, buffer_count: u32) -> Self {
+ self.inner.buffer_count = buffer_count;
+ self
+ }
+ #[inline]
+ pub fn create_info_index(mut self, create_info_index: u32) -> Self {
+ self.inner.create_info_index = create_info_index;
+ self
+ }
+ #[inline]
+ pub fn sysmem_pixel_format(mut self, sysmem_pixel_format: u64) -> Self {
+ self.inner.sysmem_pixel_format = sysmem_pixel_format;
+ self
+ }
+ #[inline]
+ pub fn format_features(mut self, format_features: FormatFeatureFlags) -> Self {
+ self.inner.format_features = format_features;
+ self
+ }
+ #[inline]
+ pub fn sysmem_color_space_index(
+ mut self,
+ sysmem_color_space_index: SysmemColorSpaceFUCHSIA,
+ ) -> Self {
+ self.inner.sysmem_color_space_index = sysmem_color_space_index;
+ self
+ }
+ #[inline]
+ pub fn sampler_ycbcr_conversion_components(
+ mut self,
+ sampler_ycbcr_conversion_components: ComponentMapping,
+ ) -> Self {
+ self.inner.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components;
+ self
+ }
+ #[inline]
+ pub fn suggested_ycbcr_model(
+ mut self,
+ suggested_ycbcr_model: SamplerYcbcrModelConversion,
+ ) -> Self {
+ self.inner.suggested_ycbcr_model = suggested_ycbcr_model;
+ self
+ }
+ #[inline]
+ pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self {
+ self.inner.suggested_ycbcr_range = suggested_ycbcr_range;
+ self
+ }
+ #[inline]
+ pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self {
+ self.inner.suggested_x_chroma_offset = suggested_x_chroma_offset;
+ self
+ }
+ #[inline]
+ pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self {
+ self.inner.suggested_y_chroma_offset = suggested_y_chroma_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCollectionPropertiesFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferConstraintsInfoFUCHSIA.html>"]
+pub struct BufferConstraintsInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub create_info: BufferCreateInfo,
+ pub required_format_features: FormatFeatureFlags,
+ pub buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA,
+}
+impl ::std::default::Default for BufferConstraintsInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ create_info: BufferCreateInfo::default(),
+ required_format_features: FormatFeatureFlags::default(),
+ buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferConstraintsInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_CONSTRAINTS_INFO_FUCHSIA;
+}
+impl BufferConstraintsInfoFUCHSIA {
+ pub fn builder<'a>() -> BufferConstraintsInfoFUCHSIABuilder<'a> {
+ BufferConstraintsInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferConstraintsInfoFUCHSIABuilder<'a> {
+ inner: BufferConstraintsInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferConstraintsInfoFUCHSIABuilder<'a> {
+ type Target = BufferConstraintsInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferConstraintsInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferConstraintsInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn create_info(mut self, create_info: BufferCreateInfo) -> Self {
+ self.inner.create_info = create_info;
+ self
+ }
+ #[inline]
+ pub fn required_format_features(
+ mut self,
+ required_format_features: FormatFeatureFlags,
+ ) -> Self {
+ self.inner.required_format_features = required_format_features;
+ self
+ }
+ #[inline]
+ pub fn buffer_collection_constraints(
+ mut self,
+ buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA,
+ ) -> Self {
+ self.inner.buffer_collection_constraints = buffer_collection_constraints;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferConstraintsInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSysmemColorSpaceFUCHSIA.html>"]
+pub struct SysmemColorSpaceFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub color_space: u32,
+}
+impl ::std::default::Default for SysmemColorSpaceFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ color_space: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SysmemColorSpaceFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::SYSMEM_COLOR_SPACE_FUCHSIA;
+}
+impl SysmemColorSpaceFUCHSIA {
+ pub fn builder<'a>() -> SysmemColorSpaceFUCHSIABuilder<'a> {
+ SysmemColorSpaceFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SysmemColorSpaceFUCHSIABuilder<'a> {
+ inner: SysmemColorSpaceFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for SysmemColorSpaceFUCHSIABuilder<'a> {
+ type Target = SysmemColorSpaceFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SysmemColorSpaceFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SysmemColorSpaceFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn color_space(mut self, color_space: u32) -> Self {
+ self.inner.color_space = color_space;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SysmemColorSpaceFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageFormatConstraintsInfoFUCHSIA.html>"]
+pub struct ImageFormatConstraintsInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_create_info: ImageCreateInfo,
+ pub required_format_features: FormatFeatureFlags,
+ pub flags: ImageFormatConstraintsFlagsFUCHSIA,
+ pub sysmem_pixel_format: u64,
+ pub color_space_count: u32,
+ pub p_color_spaces: *const SysmemColorSpaceFUCHSIA,
+}
+impl ::std::default::Default for ImageFormatConstraintsInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_create_info: ImageCreateInfo::default(),
+ required_format_features: FormatFeatureFlags::default(),
+ flags: ImageFormatConstraintsFlagsFUCHSIA::default(),
+ sysmem_pixel_format: u64::default(),
+ color_space_count: u32::default(),
+ p_color_spaces: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageFormatConstraintsInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA;
+}
+impl ImageFormatConstraintsInfoFUCHSIA {
+ pub fn builder<'a>() -> ImageFormatConstraintsInfoFUCHSIABuilder<'a> {
+ ImageFormatConstraintsInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageFormatConstraintsInfoFUCHSIABuilder<'a> {
+ inner: ImageFormatConstraintsInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageFormatConstraintsInfoFUCHSIABuilder<'a> {
+ type Target = ImageFormatConstraintsInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageFormatConstraintsInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageFormatConstraintsInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn image_create_info(mut self, image_create_info: ImageCreateInfo) -> Self {
+ self.inner.image_create_info = image_create_info;
+ self
+ }
+ #[inline]
+ pub fn required_format_features(
+ mut self,
+ required_format_features: FormatFeatureFlags,
+ ) -> Self {
+ self.inner.required_format_features = required_format_features;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: ImageFormatConstraintsFlagsFUCHSIA) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn sysmem_pixel_format(mut self, sysmem_pixel_format: u64) -> Self {
+ self.inner.sysmem_pixel_format = sysmem_pixel_format;
+ self
+ }
+ #[inline]
+ pub fn color_spaces(mut self, color_spaces: &'a [SysmemColorSpaceFUCHSIA]) -> Self {
+ self.inner.color_space_count = color_spaces.len() as _;
+ self.inner.p_color_spaces = color_spaces.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageFormatConstraintsInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageConstraintsInfoFUCHSIA.html>"]
+pub struct ImageConstraintsInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub format_constraints_count: u32,
+ pub p_format_constraints: *const ImageFormatConstraintsInfoFUCHSIA,
+ pub buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA,
+ pub flags: ImageConstraintsInfoFlagsFUCHSIA,
+}
+impl ::std::default::Default for ImageConstraintsInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ format_constraints_count: u32::default(),
+ p_format_constraints: ::std::ptr::null(),
+ buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA::default(),
+ flags: ImageConstraintsInfoFlagsFUCHSIA::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageConstraintsInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_CONSTRAINTS_INFO_FUCHSIA;
+}
+impl ImageConstraintsInfoFUCHSIA {
+ pub fn builder<'a>() -> ImageConstraintsInfoFUCHSIABuilder<'a> {
+ ImageConstraintsInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageConstraintsInfoFUCHSIABuilder<'a> {
+ inner: ImageConstraintsInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageConstraintsInfoFUCHSIABuilder<'a> {
+ type Target = ImageConstraintsInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageConstraintsInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageConstraintsInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn format_constraints(
+ mut self,
+ format_constraints: &'a [ImageFormatConstraintsInfoFUCHSIA],
+ ) -> Self {
+ self.inner.format_constraints_count = format_constraints.len() as _;
+ self.inner.p_format_constraints = format_constraints.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn buffer_collection_constraints(
+ mut self,
+ buffer_collection_constraints: BufferCollectionConstraintsInfoFUCHSIA,
+ ) -> Self {
+ self.inner.buffer_collection_constraints = buffer_collection_constraints;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: ImageConstraintsInfoFlagsFUCHSIA) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageConstraintsInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBufferCollectionConstraintsInfoFUCHSIA.html>"]
+pub struct BufferCollectionConstraintsInfoFUCHSIA {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub min_buffer_count: u32,
+ pub max_buffer_count: u32,
+ pub min_buffer_count_for_camping: u32,
+ pub min_buffer_count_for_dedicated_slack: u32,
+ pub min_buffer_count_for_shared_slack: u32,
+}
+impl ::std::default::Default for BufferCollectionConstraintsInfoFUCHSIA {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ min_buffer_count: u32::default(),
+ max_buffer_count: u32::default(),
+ min_buffer_count_for_camping: u32::default(),
+ min_buffer_count_for_dedicated_slack: u32::default(),
+ min_buffer_count_for_shared_slack: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for BufferCollectionConstraintsInfoFUCHSIA {
+ const STRUCTURE_TYPE: StructureType = StructureType::BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA;
+}
+impl BufferCollectionConstraintsInfoFUCHSIA {
+ pub fn builder<'a>() -> BufferCollectionConstraintsInfoFUCHSIABuilder<'a> {
+ BufferCollectionConstraintsInfoFUCHSIABuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct BufferCollectionConstraintsInfoFUCHSIABuilder<'a> {
+ inner: BufferCollectionConstraintsInfoFUCHSIA,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for BufferCollectionConstraintsInfoFUCHSIABuilder<'a> {
+ type Target = BufferCollectionConstraintsInfoFUCHSIA;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for BufferCollectionConstraintsInfoFUCHSIABuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> BufferCollectionConstraintsInfoFUCHSIABuilder<'a> {
+ #[inline]
+ pub fn min_buffer_count(mut self, min_buffer_count: u32) -> Self {
+ self.inner.min_buffer_count = min_buffer_count;
+ self
+ }
+ #[inline]
+ pub fn max_buffer_count(mut self, max_buffer_count: u32) -> Self {
+ self.inner.max_buffer_count = max_buffer_count;
+ self
+ }
+ #[inline]
+ pub fn min_buffer_count_for_camping(mut self, min_buffer_count_for_camping: u32) -> Self {
+ self.inner.min_buffer_count_for_camping = min_buffer_count_for_camping;
+ self
+ }
+ #[inline]
+ pub fn min_buffer_count_for_dedicated_slack(
+ mut self,
+ min_buffer_count_for_dedicated_slack: u32,
+ ) -> Self {
+ self.inner.min_buffer_count_for_dedicated_slack = min_buffer_count_for_dedicated_slack;
+ self
+ }
+ #[inline]
+ pub fn min_buffer_count_for_shared_slack(
+ mut self,
+ min_buffer_count_for_shared_slack: u32,
+ ) -> Self {
+ self.inner.min_buffer_count_for_shared_slack = min_buffer_count_for_shared_slack;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> BufferCollectionConstraintsInfoFUCHSIA {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT.html>"]
+pub struct PhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub format_rgba10x6_without_y_cb_cr_sampler: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ format_rgba10x6_without_y_cb_cr_sampler: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT;
+}
+impl PhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'a> {
+ PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceRGBA10X6FormatsFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRGBA10X6FormatsFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRGBA10X6FormatsFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRGBA10X6FormatsFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn format_rgba10x6_without_y_cb_cr_sampler(
+ mut self,
+ format_rgba10x6_without_y_cb_cr_sampler: bool,
+ ) -> Self {
+ self.inner.format_rgba10x6_without_y_cb_cr_sampler =
+ format_rgba10x6_without_y_cb_cr_sampler.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFormatProperties3.html>"]
+pub struct FormatProperties3 {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub linear_tiling_features: FormatFeatureFlags2,
+ pub optimal_tiling_features: FormatFeatureFlags2,
+ pub buffer_features: FormatFeatureFlags2,
+}
+impl ::std::default::Default for FormatProperties3 {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ linear_tiling_features: FormatFeatureFlags2::default(),
+ optimal_tiling_features: FormatFeatureFlags2::default(),
+ buffer_features: FormatFeatureFlags2::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for FormatProperties3 {
+ const STRUCTURE_TYPE: StructureType = StructureType::FORMAT_PROPERTIES_3;
+}
+impl FormatProperties3 {
+ pub fn builder<'a>() -> FormatProperties3Builder<'a> {
+ FormatProperties3Builder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct FormatProperties3Builder<'a> {
+ inner: FormatProperties3,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFormatProperties2 for FormatProperties3Builder<'_> {}
+unsafe impl ExtendsFormatProperties2 for FormatProperties3 {}
+impl<'a> ::std::ops::Deref for FormatProperties3Builder<'a> {
+ type Target = FormatProperties3;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for FormatProperties3Builder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> FormatProperties3Builder<'a> {
+ #[inline]
+ pub fn linear_tiling_features(mut self, linear_tiling_features: FormatFeatureFlags2) -> Self {
+ self.inner.linear_tiling_features = linear_tiling_features;
+ self
+ }
+ #[inline]
+ pub fn optimal_tiling_features(mut self, optimal_tiling_features: FormatFeatureFlags2) -> Self {
+ self.inner.optimal_tiling_features = optimal_tiling_features;
+ self
+ }
+ #[inline]
+ pub fn buffer_features(mut self, buffer_features: FormatFeatureFlags2) -> Self {
+ self.inner.buffer_features = buffer_features;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> FormatProperties3 {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrmFormatModifierPropertiesList2EXT.html>"]
+pub struct DrmFormatModifierPropertiesList2EXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub drm_format_modifier_count: u32,
+ pub p_drm_format_modifier_properties: *mut DrmFormatModifierProperties2EXT,
+}
+impl ::std::default::Default for DrmFormatModifierPropertiesList2EXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ drm_format_modifier_count: u32::default(),
+ p_drm_format_modifier_properties: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DrmFormatModifierPropertiesList2EXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT;
+}
+impl DrmFormatModifierPropertiesList2EXT {
+ pub fn builder<'a>() -> DrmFormatModifierPropertiesList2EXTBuilder<'a> {
+ DrmFormatModifierPropertiesList2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrmFormatModifierPropertiesList2EXTBuilder<'a> {
+ inner: DrmFormatModifierPropertiesList2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesList2EXTBuilder<'_> {}
+unsafe impl ExtendsFormatProperties2 for DrmFormatModifierPropertiesList2EXT {}
+impl<'a> ::std::ops::Deref for DrmFormatModifierPropertiesList2EXTBuilder<'a> {
+ type Target = DrmFormatModifierPropertiesList2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrmFormatModifierPropertiesList2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrmFormatModifierPropertiesList2EXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier_properties(
+ mut self,
+ drm_format_modifier_properties: &'a mut [DrmFormatModifierProperties2EXT],
+ ) -> Self {
+ self.inner.drm_format_modifier_count = drm_format_modifier_properties.len() as _;
+ self.inner.p_drm_format_modifier_properties = drm_format_modifier_properties.as_mut_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrmFormatModifierPropertiesList2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDrmFormatModifierProperties2EXT.html>"]
+pub struct DrmFormatModifierProperties2EXT {
+ pub drm_format_modifier: u64,
+ pub drm_format_modifier_plane_count: u32,
+ pub drm_format_modifier_tiling_features: FormatFeatureFlags2,
+}
+impl DrmFormatModifierProperties2EXT {
+ pub fn builder<'a>() -> DrmFormatModifierProperties2EXTBuilder<'a> {
+ DrmFormatModifierProperties2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DrmFormatModifierProperties2EXTBuilder<'a> {
+ inner: DrmFormatModifierProperties2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DrmFormatModifierProperties2EXTBuilder<'a> {
+ type Target = DrmFormatModifierProperties2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DrmFormatModifierProperties2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DrmFormatModifierProperties2EXTBuilder<'a> {
+ #[inline]
+ pub fn drm_format_modifier(mut self, drm_format_modifier: u64) -> Self {
+ self.inner.drm_format_modifier = drm_format_modifier;
+ self
+ }
+ #[inline]
+ pub fn drm_format_modifier_plane_count(mut self, drm_format_modifier_plane_count: u32) -> Self {
+ self.inner.drm_format_modifier_plane_count = drm_format_modifier_plane_count;
+ self
+ }
+ #[inline]
+ pub fn drm_format_modifier_tiling_features(
+ mut self,
+ drm_format_modifier_tiling_features: FormatFeatureFlags2,
+ ) -> Self {
+ self.inner.drm_format_modifier_tiling_features = drm_format_modifier_tiling_features;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DrmFormatModifierProperties2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAndroidHardwareBufferFormatProperties2ANDROID.html>"]
+pub struct AndroidHardwareBufferFormatProperties2ANDROID {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub format: Format,
+ pub external_format: u64,
+ pub format_features: FormatFeatureFlags2,
+ pub sampler_ycbcr_conversion_components: ComponentMapping,
+ pub suggested_ycbcr_model: SamplerYcbcrModelConversion,
+ pub suggested_ycbcr_range: SamplerYcbcrRange,
+ pub suggested_x_chroma_offset: ChromaLocation,
+ pub suggested_y_chroma_offset: ChromaLocation,
+}
+impl ::std::default::Default for AndroidHardwareBufferFormatProperties2ANDROID {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ format: Format::default(),
+ external_format: u64::default(),
+ format_features: FormatFeatureFlags2::default(),
+ sampler_ycbcr_conversion_components: ComponentMapping::default(),
+ suggested_ycbcr_model: SamplerYcbcrModelConversion::default(),
+ suggested_ycbcr_range: SamplerYcbcrRange::default(),
+ suggested_x_chroma_offset: ChromaLocation::default(),
+ suggested_y_chroma_offset: ChromaLocation::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AndroidHardwareBufferFormatProperties2ANDROID {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID;
+}
+impl AndroidHardwareBufferFormatProperties2ANDROID {
+ pub fn builder<'a>() -> AndroidHardwareBufferFormatProperties2ANDROIDBuilder<'a> {
+ AndroidHardwareBufferFormatProperties2ANDROIDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AndroidHardwareBufferFormatProperties2ANDROIDBuilder<'a> {
+ inner: AndroidHardwareBufferFormatProperties2ANDROID,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID
+ for AndroidHardwareBufferFormatProperties2ANDROIDBuilder<'_>
+{
+}
+unsafe impl ExtendsAndroidHardwareBufferPropertiesANDROID
+ for AndroidHardwareBufferFormatProperties2ANDROID
+{
+}
+impl<'a> ::std::ops::Deref for AndroidHardwareBufferFormatProperties2ANDROIDBuilder<'a> {
+ type Target = AndroidHardwareBufferFormatProperties2ANDROID;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AndroidHardwareBufferFormatProperties2ANDROIDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AndroidHardwareBufferFormatProperties2ANDROIDBuilder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[inline]
+ pub fn external_format(mut self, external_format: u64) -> Self {
+ self.inner.external_format = external_format;
+ self
+ }
+ #[inline]
+ pub fn format_features(mut self, format_features: FormatFeatureFlags2) -> Self {
+ self.inner.format_features = format_features;
+ self
+ }
+ #[inline]
+ pub fn sampler_ycbcr_conversion_components(
+ mut self,
+ sampler_ycbcr_conversion_components: ComponentMapping,
+ ) -> Self {
+ self.inner.sampler_ycbcr_conversion_components = sampler_ycbcr_conversion_components;
+ self
+ }
+ #[inline]
+ pub fn suggested_ycbcr_model(
+ mut self,
+ suggested_ycbcr_model: SamplerYcbcrModelConversion,
+ ) -> Self {
+ self.inner.suggested_ycbcr_model = suggested_ycbcr_model;
+ self
+ }
+ #[inline]
+ pub fn suggested_ycbcr_range(mut self, suggested_ycbcr_range: SamplerYcbcrRange) -> Self {
+ self.inner.suggested_ycbcr_range = suggested_ycbcr_range;
+ self
+ }
+ #[inline]
+ pub fn suggested_x_chroma_offset(mut self, suggested_x_chroma_offset: ChromaLocation) -> Self {
+ self.inner.suggested_x_chroma_offset = suggested_x_chroma_offset;
+ self
+ }
+ #[inline]
+ pub fn suggested_y_chroma_offset(mut self, suggested_y_chroma_offset: ChromaLocation) -> Self {
+ self.inner.suggested_y_chroma_offset = suggested_y_chroma_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AndroidHardwareBufferFormatProperties2ANDROID {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRenderingCreateInfo.html>"]
+pub struct PipelineRenderingCreateInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub view_mask: u32,
+ pub color_attachment_count: u32,
+ pub p_color_attachment_formats: *const Format,
+ pub depth_attachment_format: Format,
+ pub stencil_attachment_format: Format,
+}
+impl ::std::default::Default for PipelineRenderingCreateInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ view_mask: u32::default(),
+ color_attachment_count: u32::default(),
+ p_color_attachment_formats: ::std::ptr::null(),
+ depth_attachment_format: Format::default(),
+ stencil_attachment_format: Format::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRenderingCreateInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_RENDERING_CREATE_INFO;
+}
+impl PipelineRenderingCreateInfo {
+ pub fn builder<'a>() -> PipelineRenderingCreateInfoBuilder<'a> {
+ PipelineRenderingCreateInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRenderingCreateInfoBuilder<'a> {
+ inner: PipelineRenderingCreateInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRenderingCreateInfoBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRenderingCreateInfo {}
+impl<'a> ::std::ops::Deref for PipelineRenderingCreateInfoBuilder<'a> {
+ type Target = PipelineRenderingCreateInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRenderingCreateInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRenderingCreateInfoBuilder<'a> {
+ #[inline]
+ pub fn view_mask(mut self, view_mask: u32) -> Self {
+ self.inner.view_mask = view_mask;
+ self
+ }
+ #[inline]
+ pub fn color_attachment_formats(mut self, color_attachment_formats: &'a [Format]) -> Self {
+ self.inner.color_attachment_count = color_attachment_formats.len() as _;
+ self.inner.p_color_attachment_formats = color_attachment_formats.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn depth_attachment_format(mut self, depth_attachment_format: Format) -> Self {
+ self.inner.depth_attachment_format = depth_attachment_format;
+ self
+ }
+ #[inline]
+ pub fn stencil_attachment_format(mut self, stencil_attachment_format: Format) -> Self {
+ self.inner.stencil_attachment_format = stencil_attachment_format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRenderingCreateInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderingInfo.html>"]
+pub struct RenderingInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: RenderingFlags,
+ pub render_area: Rect2D,
+ pub layer_count: u32,
+ pub view_mask: u32,
+ pub color_attachment_count: u32,
+ pub p_color_attachments: *const RenderingAttachmentInfo,
+ pub p_depth_attachment: *const RenderingAttachmentInfo,
+ pub p_stencil_attachment: *const RenderingAttachmentInfo,
+}
+impl ::std::default::Default for RenderingInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: RenderingFlags::default(),
+ render_area: Rect2D::default(),
+ layer_count: u32::default(),
+ view_mask: u32::default(),
+ color_attachment_count: u32::default(),
+ p_color_attachments: ::std::ptr::null(),
+ p_depth_attachment: ::std::ptr::null(),
+ p_stencil_attachment: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderingInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_INFO;
+}
+impl RenderingInfo {
+ pub fn builder<'a>() -> RenderingInfoBuilder<'a> {
+ RenderingInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderingInfoBuilder<'a> {
+ inner: RenderingInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsRenderingInfo {}
+impl<'a> ::std::ops::Deref for RenderingInfoBuilder<'a> {
+ type Target = RenderingInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderingInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderingInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: RenderingFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn render_area(mut self, render_area: Rect2D) -> Self {
+ self.inner.render_area = render_area;
+ self
+ }
+ #[inline]
+ pub fn layer_count(mut self, layer_count: u32) -> Self {
+ self.inner.layer_count = layer_count;
+ self
+ }
+ #[inline]
+ pub fn view_mask(mut self, view_mask: u32) -> Self {
+ self.inner.view_mask = view_mask;
+ self
+ }
+ #[inline]
+ pub fn color_attachments(mut self, color_attachments: &'a [RenderingAttachmentInfo]) -> Self {
+ self.inner.color_attachment_count = color_attachments.len() as _;
+ self.inner.p_color_attachments = color_attachments.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn depth_attachment(mut self, depth_attachment: &'a RenderingAttachmentInfo) -> Self {
+ self.inner.p_depth_attachment = depth_attachment;
+ self
+ }
+ #[inline]
+ pub fn stencil_attachment(mut self, stencil_attachment: &'a RenderingAttachmentInfo) -> Self {
+ self.inner.p_stencil_attachment = stencil_attachment;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsRenderingInfo>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderingInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderingAttachmentInfo.html>"]
+pub struct RenderingAttachmentInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_view: ImageView,
+ pub image_layout: ImageLayout,
+ pub resolve_mode: ResolveModeFlags,
+ pub resolve_image_view: ImageView,
+ pub resolve_image_layout: ImageLayout,
+ pub load_op: AttachmentLoadOp,
+ pub store_op: AttachmentStoreOp,
+ pub clear_value: ClearValue,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for RenderingAttachmentInfo {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("RenderingAttachmentInfo")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("image_view", &self.image_view)
+ .field("image_layout", &self.image_layout)
+ .field("resolve_mode", &self.resolve_mode)
+ .field("resolve_image_view", &self.resolve_image_view)
+ .field("resolve_image_layout", &self.resolve_image_layout)
+ .field("load_op", &self.load_op)
+ .field("store_op", &self.store_op)
+ .field("clear_value", &"union")
+ .finish()
+ }
+}
+impl ::std::default::Default for RenderingAttachmentInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_view: ImageView::default(),
+ image_layout: ImageLayout::default(),
+ resolve_mode: ResolveModeFlags::default(),
+ resolve_image_view: ImageView::default(),
+ resolve_image_layout: ImageLayout::default(),
+ load_op: AttachmentLoadOp::default(),
+ store_op: AttachmentStoreOp::default(),
+ clear_value: ClearValue::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderingAttachmentInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDERING_ATTACHMENT_INFO;
+}
+impl RenderingAttachmentInfo {
+ pub fn builder<'a>() -> RenderingAttachmentInfoBuilder<'a> {
+ RenderingAttachmentInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderingAttachmentInfoBuilder<'a> {
+ inner: RenderingAttachmentInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RenderingAttachmentInfoBuilder<'a> {
+ type Target = RenderingAttachmentInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderingAttachmentInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderingAttachmentInfoBuilder<'a> {
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[inline]
+ pub fn image_layout(mut self, image_layout: ImageLayout) -> Self {
+ self.inner.image_layout = image_layout;
+ self
+ }
+ #[inline]
+ pub fn resolve_mode(mut self, resolve_mode: ResolveModeFlags) -> Self {
+ self.inner.resolve_mode = resolve_mode;
+ self
+ }
+ #[inline]
+ pub fn resolve_image_view(mut self, resolve_image_view: ImageView) -> Self {
+ self.inner.resolve_image_view = resolve_image_view;
+ self
+ }
+ #[inline]
+ pub fn resolve_image_layout(mut self, resolve_image_layout: ImageLayout) -> Self {
+ self.inner.resolve_image_layout = resolve_image_layout;
+ self
+ }
+ #[inline]
+ pub fn load_op(mut self, load_op: AttachmentLoadOp) -> Self {
+ self.inner.load_op = load_op;
+ self
+ }
+ #[inline]
+ pub fn store_op(mut self, store_op: AttachmentStoreOp) -> Self {
+ self.inner.store_op = store_op;
+ self
+ }
+ #[inline]
+ pub fn clear_value(mut self, clear_value: ClearValue) -> Self {
+ self.inner.clear_value = clear_value;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderingAttachmentInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderingFragmentShadingRateAttachmentInfoKHR.html>"]
+pub struct RenderingFragmentShadingRateAttachmentInfoKHR {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_view: ImageView,
+ pub image_layout: ImageLayout,
+ pub shading_rate_attachment_texel_size: Extent2D,
+}
+impl ::std::default::Default for RenderingFragmentShadingRateAttachmentInfoKHR {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_view: ImageView::default(),
+ image_layout: ImageLayout::default(),
+ shading_rate_attachment_texel_size: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderingFragmentShadingRateAttachmentInfoKHR {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR;
+}
+impl RenderingFragmentShadingRateAttachmentInfoKHR {
+ pub fn builder<'a>() -> RenderingFragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ RenderingFragmentShadingRateAttachmentInfoKHRBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderingFragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ inner: RenderingFragmentShadingRateAttachmentInfoKHR,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderingInfo for RenderingFragmentShadingRateAttachmentInfoKHRBuilder<'_> {}
+unsafe impl ExtendsRenderingInfo for RenderingFragmentShadingRateAttachmentInfoKHR {}
+impl<'a> ::std::ops::Deref for RenderingFragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ type Target = RenderingFragmentShadingRateAttachmentInfoKHR;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderingFragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderingFragmentShadingRateAttachmentInfoKHRBuilder<'a> {
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[inline]
+ pub fn image_layout(mut self, image_layout: ImageLayout) -> Self {
+ self.inner.image_layout = image_layout;
+ self
+ }
+ #[inline]
+ pub fn shading_rate_attachment_texel_size(
+ mut self,
+ shading_rate_attachment_texel_size: Extent2D,
+ ) -> Self {
+ self.inner.shading_rate_attachment_texel_size = shading_rate_attachment_texel_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderingFragmentShadingRateAttachmentInfoKHR {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderingFragmentDensityMapAttachmentInfoEXT.html>"]
+pub struct RenderingFragmentDensityMapAttachmentInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image_view: ImageView,
+ pub image_layout: ImageLayout,
+}
+impl ::std::default::Default for RenderingFragmentDensityMapAttachmentInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image_view: ImageView::default(),
+ image_layout: ImageLayout::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderingFragmentDensityMapAttachmentInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT;
+}
+impl RenderingFragmentDensityMapAttachmentInfoEXT {
+ pub fn builder<'a>() -> RenderingFragmentDensityMapAttachmentInfoEXTBuilder<'a> {
+ RenderingFragmentDensityMapAttachmentInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderingFragmentDensityMapAttachmentInfoEXTBuilder<'a> {
+ inner: RenderingFragmentDensityMapAttachmentInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderingInfo for RenderingFragmentDensityMapAttachmentInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRenderingInfo for RenderingFragmentDensityMapAttachmentInfoEXT {}
+impl<'a> ::std::ops::Deref for RenderingFragmentDensityMapAttachmentInfoEXTBuilder<'a> {
+ type Target = RenderingFragmentDensityMapAttachmentInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderingFragmentDensityMapAttachmentInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderingFragmentDensityMapAttachmentInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[inline]
+ pub fn image_layout(mut self, image_layout: ImageLayout) -> Self {
+ self.inner.image_layout = image_layout;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderingFragmentDensityMapAttachmentInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDynamicRenderingFeatures.html>"]
+pub struct PhysicalDeviceDynamicRenderingFeatures {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub dynamic_rendering: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDynamicRenderingFeatures {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ dynamic_rendering: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDynamicRenderingFeatures {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES;
+}
+impl PhysicalDeviceDynamicRenderingFeatures {
+ pub fn builder<'a>() -> PhysicalDeviceDynamicRenderingFeaturesBuilder<'a> {
+ PhysicalDeviceDynamicRenderingFeaturesBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDynamicRenderingFeaturesBuilder<'a> {
+ inner: PhysicalDeviceDynamicRenderingFeatures,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDynamicRenderingFeaturesBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDynamicRenderingFeatures {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDynamicRenderingFeaturesBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDynamicRenderingFeatures {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDynamicRenderingFeaturesBuilder<'a> {
+ type Target = PhysicalDeviceDynamicRenderingFeatures;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDynamicRenderingFeaturesBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDynamicRenderingFeaturesBuilder<'a> {
+ #[inline]
+ pub fn dynamic_rendering(mut self, dynamic_rendering: bool) -> Self {
+ self.inner.dynamic_rendering = dynamic_rendering.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDynamicRenderingFeatures {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferInheritanceRenderingInfo.html>"]
+pub struct CommandBufferInheritanceRenderingInfo {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: RenderingFlags,
+ pub view_mask: u32,
+ pub color_attachment_count: u32,
+ pub p_color_attachment_formats: *const Format,
+ pub depth_attachment_format: Format,
+ pub stencil_attachment_format: Format,
+ pub rasterization_samples: SampleCountFlags,
+}
+impl ::std::default::Default for CommandBufferInheritanceRenderingInfo {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: RenderingFlags::default(),
+ view_mask: u32::default(),
+ color_attachment_count: u32::default(),
+ p_color_attachment_formats: ::std::ptr::null(),
+ depth_attachment_format: Format::default(),
+ stencil_attachment_format: Format::default(),
+ rasterization_samples: SampleCountFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CommandBufferInheritanceRenderingInfo {
+ const STRUCTURE_TYPE: StructureType = StructureType::COMMAND_BUFFER_INHERITANCE_RENDERING_INFO;
+}
+impl CommandBufferInheritanceRenderingInfo {
+ pub fn builder<'a>() -> CommandBufferInheritanceRenderingInfoBuilder<'a> {
+ CommandBufferInheritanceRenderingInfoBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CommandBufferInheritanceRenderingInfoBuilder<'a> {
+ inner: CommandBufferInheritanceRenderingInfo,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo
+ for CommandBufferInheritanceRenderingInfoBuilder<'_>
+{
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo for CommandBufferInheritanceRenderingInfo {}
+impl<'a> ::std::ops::Deref for CommandBufferInheritanceRenderingInfoBuilder<'a> {
+ type Target = CommandBufferInheritanceRenderingInfo;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CommandBufferInheritanceRenderingInfoBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CommandBufferInheritanceRenderingInfoBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: RenderingFlags) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn view_mask(mut self, view_mask: u32) -> Self {
+ self.inner.view_mask = view_mask;
+ self
+ }
+ #[inline]
+ pub fn color_attachment_formats(mut self, color_attachment_formats: &'a [Format]) -> Self {
+ self.inner.color_attachment_count = color_attachment_formats.len() as _;
+ self.inner.p_color_attachment_formats = color_attachment_formats.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn depth_attachment_format(mut self, depth_attachment_format: Format) -> Self {
+ self.inner.depth_attachment_format = depth_attachment_format;
+ self
+ }
+ #[inline]
+ pub fn stencil_attachment_format(mut self, stencil_attachment_format: Format) -> Self {
+ self.inner.stencil_attachment_format = stencil_attachment_format;
+ self
+ }
+ #[inline]
+ pub fn rasterization_samples(mut self, rasterization_samples: SampleCountFlags) -> Self {
+ self.inner.rasterization_samples = rasterization_samples;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CommandBufferInheritanceRenderingInfo {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentSampleCountInfoAMD.html>"]
+pub struct AttachmentSampleCountInfoAMD {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub color_attachment_count: u32,
+ pub p_color_attachment_samples: *const SampleCountFlags,
+ pub depth_stencil_attachment_samples: SampleCountFlags,
+}
+impl ::std::default::Default for AttachmentSampleCountInfoAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ color_attachment_count: u32::default(),
+ p_color_attachment_samples: ::std::ptr::null(),
+ depth_stencil_attachment_samples: SampleCountFlags::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AttachmentSampleCountInfoAMD {
+ const STRUCTURE_TYPE: StructureType = StructureType::ATTACHMENT_SAMPLE_COUNT_INFO_AMD;
+}
+impl AttachmentSampleCountInfoAMD {
+ pub fn builder<'a>() -> AttachmentSampleCountInfoAMDBuilder<'a> {
+ AttachmentSampleCountInfoAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AttachmentSampleCountInfoAMDBuilder<'a> {
+ inner: AttachmentSampleCountInfoAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo for AttachmentSampleCountInfoAMDBuilder<'_> {}
+unsafe impl ExtendsCommandBufferInheritanceInfo for AttachmentSampleCountInfoAMD {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for AttachmentSampleCountInfoAMDBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for AttachmentSampleCountInfoAMD {}
+impl<'a> ::std::ops::Deref for AttachmentSampleCountInfoAMDBuilder<'a> {
+ type Target = AttachmentSampleCountInfoAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AttachmentSampleCountInfoAMDBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AttachmentSampleCountInfoAMDBuilder<'a> {
+ #[inline]
+ pub fn color_attachment_samples(
+ mut self,
+ color_attachment_samples: &'a [SampleCountFlags],
+ ) -> Self {
+ self.inner.color_attachment_count = color_attachment_samples.len() as _;
+ self.inner.p_color_attachment_samples = color_attachment_samples.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn depth_stencil_attachment_samples(
+ mut self,
+ depth_stencil_attachment_samples: SampleCountFlags,
+ ) -> Self {
+ self.inner.depth_stencil_attachment_samples = depth_stencil_attachment_samples;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AttachmentSampleCountInfoAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMultiviewPerViewAttributesInfoNVX.html>"]
+pub struct MultiviewPerViewAttributesInfoNVX {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub per_view_attributes: Bool32,
+ pub per_view_attributes_position_x_only: Bool32,
+}
+impl ::std::default::Default for MultiviewPerViewAttributesInfoNVX {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ per_view_attributes: Bool32::default(),
+ per_view_attributes_position_x_only: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MultiviewPerViewAttributesInfoNVX {
+ const STRUCTURE_TYPE: StructureType = StructureType::MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX;
+}
+impl MultiviewPerViewAttributesInfoNVX {
+ pub fn builder<'a>() -> MultiviewPerViewAttributesInfoNVXBuilder<'a> {
+ MultiviewPerViewAttributesInfoNVXBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MultiviewPerViewAttributesInfoNVXBuilder<'a> {
+ inner: MultiviewPerViewAttributesInfoNVX,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsCommandBufferInheritanceInfo for MultiviewPerViewAttributesInfoNVXBuilder<'_> {}
+unsafe impl ExtendsCommandBufferInheritanceInfo for MultiviewPerViewAttributesInfoNVX {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for MultiviewPerViewAttributesInfoNVXBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for MultiviewPerViewAttributesInfoNVX {}
+unsafe impl ExtendsRenderingInfo for MultiviewPerViewAttributesInfoNVXBuilder<'_> {}
+unsafe impl ExtendsRenderingInfo for MultiviewPerViewAttributesInfoNVX {}
+impl<'a> ::std::ops::Deref for MultiviewPerViewAttributesInfoNVXBuilder<'a> {
+ type Target = MultiviewPerViewAttributesInfoNVX;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MultiviewPerViewAttributesInfoNVXBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MultiviewPerViewAttributesInfoNVXBuilder<'a> {
+ #[inline]
+ pub fn per_view_attributes(mut self, per_view_attributes: bool) -> Self {
+ self.inner.per_view_attributes = per_view_attributes.into();
+ self
+ }
+ #[inline]
+ pub fn per_view_attributes_position_x_only(
+ mut self,
+ per_view_attributes_position_x_only: bool,
+ ) -> Self {
+ self.inner.per_view_attributes_position_x_only = per_view_attributes_position_x_only.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MultiviewPerViewAttributesInfoNVX {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageViewMinLodFeaturesEXT.html>"]
+pub struct PhysicalDeviceImageViewMinLodFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub min_lod: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImageViewMinLodFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ min_lod: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageViewMinLodFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT;
+}
+impl PhysicalDeviceImageViewMinLodFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'a> {
+ PhysicalDeviceImageViewMinLodFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceImageViewMinLodFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageViewMinLodFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageViewMinLodFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceImageViewMinLodFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageViewMinLodFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn min_lod(mut self, min_lod: bool) -> Self {
+ self.inner.min_lod = min_lod.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageViewMinLodFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewMinLodCreateInfoEXT.html>"]
+pub struct ImageViewMinLodCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub min_lod: f32,
+}
+impl ::std::default::Default for ImageViewMinLodCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ min_lod: f32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewMinLodCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT;
+}
+impl ImageViewMinLodCreateInfoEXT {
+ pub fn builder<'a>() -> ImageViewMinLodCreateInfoEXTBuilder<'a> {
+ ImageViewMinLodCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewMinLodCreateInfoEXTBuilder<'a> {
+ inner: ImageViewMinLodCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewMinLodCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewMinLodCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for ImageViewMinLodCreateInfoEXTBuilder<'a> {
+ type Target = ImageViewMinLodCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewMinLodCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewMinLodCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn min_lod(mut self, min_lod: f32) -> Self {
+ self.inner.min_lod = min_lod;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewMinLodCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT.html>"]
+pub struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub rasterization_order_color_attachment_access: Bool32,
+ pub rasterization_order_depth_attachment_access: Bool32,
+ pub rasterization_order_stencil_attachment_access: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ rasterization_order_color_attachment_access: Bool32::default(),
+ rasterization_order_depth_attachment_access: Bool32::default(),
+ rasterization_order_stencil_attachment_access: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT;
+}
+impl PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'a> {
+ PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT
+{
+}
+impl<'a> ::std::ops::Deref
+ for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'a>
+{
+ type Target = PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn rasterization_order_color_attachment_access(
+ mut self,
+ rasterization_order_color_attachment_access: bool,
+ ) -> Self {
+ self.inner.rasterization_order_color_attachment_access =
+ rasterization_order_color_attachment_access.into();
+ self
+ }
+ #[inline]
+ pub fn rasterization_order_depth_attachment_access(
+ mut self,
+ rasterization_order_depth_attachment_access: bool,
+ ) -> Self {
+ self.inner.rasterization_order_depth_attachment_access =
+ rasterization_order_depth_attachment_access.into();
+ self
+ }
+ #[inline]
+ pub fn rasterization_order_stencil_attachment_access(
+ mut self,
+ rasterization_order_stencil_attachment_access: bool,
+ ) -> Self {
+ self.inner.rasterization_order_stencil_attachment_access =
+ rasterization_order_stencil_attachment_access.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceLinearColorAttachmentFeaturesNV.html>"]
+pub struct PhysicalDeviceLinearColorAttachmentFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub linear_color_attachment: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceLinearColorAttachmentFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ linear_color_attachment: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceLinearColorAttachmentFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV;
+}
+impl PhysicalDeviceLinearColorAttachmentFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'a> {
+ PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceLinearColorAttachmentFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceLinearColorAttachmentFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceLinearColorAttachmentFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceLinearColorAttachmentFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceLinearColorAttachmentFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn linear_color_attachment(mut self, linear_color_attachment: bool) -> Self {
+ self.inner.linear_color_attachment = linear_color_attachment.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceLinearColorAttachmentFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT.html>"]
+pub struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub graphics_pipeline_library: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ graphics_pipeline_library: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT;
+}
+impl PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'a> {
+ PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceGraphicsPipelineLibraryFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn graphics_pipeline_library(mut self, graphics_pipeline_library: bool) -> Self {
+ self.inner.graphics_pipeline_library = graphics_pipeline_library.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT.html>"]
+pub struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub graphics_pipeline_library_fast_linking: Bool32,
+ pub graphics_pipeline_library_independent_interpolation_decoration: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ graphics_pipeline_library_fast_linking: Bool32::default(),
+ graphics_pipeline_library_independent_interpolation_decoration: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT;
+}
+impl PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder<'a> {
+ PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceGraphicsPipelineLibraryPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn graphics_pipeline_library_fast_linking(
+ mut self,
+ graphics_pipeline_library_fast_linking: bool,
+ ) -> Self {
+ self.inner.graphics_pipeline_library_fast_linking =
+ graphics_pipeline_library_fast_linking.into();
+ self
+ }
+ #[inline]
+ pub fn graphics_pipeline_library_independent_interpolation_decoration(
+ mut self,
+ graphics_pipeline_library_independent_interpolation_decoration: bool,
+ ) -> Self {
+ self.inner
+ .graphics_pipeline_library_independent_interpolation_decoration =
+ graphics_pipeline_library_independent_interpolation_decoration.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGraphicsPipelineLibraryCreateInfoEXT.html>"]
+pub struct GraphicsPipelineLibraryCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: GraphicsPipelineLibraryFlagsEXT,
+}
+impl ::std::default::Default for GraphicsPipelineLibraryCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: GraphicsPipelineLibraryFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for GraphicsPipelineLibraryCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT;
+}
+impl GraphicsPipelineLibraryCreateInfoEXT {
+ pub fn builder<'a>() -> GraphicsPipelineLibraryCreateInfoEXTBuilder<'a> {
+ GraphicsPipelineLibraryCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct GraphicsPipelineLibraryCreateInfoEXTBuilder<'a> {
+ inner: GraphicsPipelineLibraryCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for GraphicsPipelineLibraryCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for GraphicsPipelineLibraryCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for GraphicsPipelineLibraryCreateInfoEXTBuilder<'a> {
+ type Target = GraphicsPipelineLibraryCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for GraphicsPipelineLibraryCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> GraphicsPipelineLibraryCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: GraphicsPipelineLibraryFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> GraphicsPipelineLibraryCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE.html>"]
+pub struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub descriptor_set_host_mapping: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ descriptor_set_host_mapping: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE;
+}
+impl PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+ pub fn builder<'a>() -> PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'a> {
+ PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'a> {
+ inner: PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'a> {
+ type Target = PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDescriptorSetHostMappingFeaturesVALVEBuilder<'a> {
+ #[inline]
+ pub fn descriptor_set_host_mapping(mut self, descriptor_set_host_mapping: bool) -> Self {
+ self.inner.descriptor_set_host_mapping = descriptor_set_host_mapping.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetBindingReferenceVALVE.html>"]
+pub struct DescriptorSetBindingReferenceVALVE {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub descriptor_set_layout: DescriptorSetLayout,
+ pub binding: u32,
+}
+impl ::std::default::Default for DescriptorSetBindingReferenceVALVE {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ descriptor_set_layout: DescriptorSetLayout::default(),
+ binding: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetBindingReferenceVALVE {
+ const STRUCTURE_TYPE: StructureType = StructureType::DESCRIPTOR_SET_BINDING_REFERENCE_VALVE;
+}
+impl DescriptorSetBindingReferenceVALVE {
+ pub fn builder<'a>() -> DescriptorSetBindingReferenceVALVEBuilder<'a> {
+ DescriptorSetBindingReferenceVALVEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetBindingReferenceVALVEBuilder<'a> {
+ inner: DescriptorSetBindingReferenceVALVE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorSetBindingReferenceVALVEBuilder<'a> {
+ type Target = DescriptorSetBindingReferenceVALVE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetBindingReferenceVALVEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetBindingReferenceVALVEBuilder<'a> {
+ #[inline]
+ pub fn descriptor_set_layout(mut self, descriptor_set_layout: DescriptorSetLayout) -> Self {
+ self.inner.descriptor_set_layout = descriptor_set_layout;
+ self
+ }
+ #[inline]
+ pub fn binding(mut self, binding: u32) -> Self {
+ self.inner.binding = binding;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetBindingReferenceVALVE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorSetLayoutHostMappingInfoVALVE.html>"]
+pub struct DescriptorSetLayoutHostMappingInfoVALVE {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub descriptor_offset: usize,
+ pub descriptor_size: u32,
+}
+impl ::std::default::Default for DescriptorSetLayoutHostMappingInfoVALVE {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ descriptor_offset: usize::default(),
+ descriptor_size: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DescriptorSetLayoutHostMappingInfoVALVE {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE;
+}
+impl DescriptorSetLayoutHostMappingInfoVALVE {
+ pub fn builder<'a>() -> DescriptorSetLayoutHostMappingInfoVALVEBuilder<'a> {
+ DescriptorSetLayoutHostMappingInfoVALVEBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DescriptorSetLayoutHostMappingInfoVALVEBuilder<'a> {
+ inner: DescriptorSetLayoutHostMappingInfoVALVE,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DescriptorSetLayoutHostMappingInfoVALVEBuilder<'a> {
+ type Target = DescriptorSetLayoutHostMappingInfoVALVE;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DescriptorSetLayoutHostMappingInfoVALVEBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DescriptorSetLayoutHostMappingInfoVALVEBuilder<'a> {
+ #[inline]
+ pub fn descriptor_offset(mut self, descriptor_offset: usize) -> Self {
+ self.inner.descriptor_offset = descriptor_offset;
+ self
+ }
+ #[inline]
+ pub fn descriptor_size(mut self, descriptor_size: u32) -> Self {
+ self.inner.descriptor_size = descriptor_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DescriptorSetLayoutHostMappingInfoVALVE {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT.html>"]
+pub struct PhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_module_identifier: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_module_identifier: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT;
+}
+impl PhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'a> {
+ PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceShaderModuleIdentifierFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderModuleIdentifierFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderModuleIdentifierFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderModuleIdentifierFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn shader_module_identifier(mut self, shader_module_identifier: bool) -> Self {
+ self.inner.shader_module_identifier = shader_module_identifier.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT.html>"]
+pub struct PhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_module_identifier_algorithm_uuid: [u8; UUID_SIZE],
+}
+impl ::std::default::Default for PhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_module_identifier_algorithm_uuid: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT;
+}
+impl PhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder<'a> {
+ PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceShaderModuleIdentifierPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderModuleIdentifierPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderModuleIdentifierPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn shader_module_identifier_algorithm_uuid(
+ mut self,
+ shader_module_identifier_algorithm_uuid: [u8; UUID_SIZE],
+ ) -> Self {
+ self.inner.shader_module_identifier_algorithm_uuid =
+ shader_module_identifier_algorithm_uuid;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineShaderStageModuleIdentifierCreateInfoEXT.html>"]
+pub struct PipelineShaderStageModuleIdentifierCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub identifier_size: u32,
+ pub p_identifier: *const u8,
+}
+impl ::std::default::Default for PipelineShaderStageModuleIdentifierCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ identifier_size: u32::default(),
+ p_identifier: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineShaderStageModuleIdentifierCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT;
+}
+impl PipelineShaderStageModuleIdentifierCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder<'a> {
+ PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder<'a> {
+ inner: PipelineShaderStageModuleIdentifierCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo
+ for PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPipelineShaderStageCreateInfo
+ for PipelineShaderStageModuleIdentifierCreateInfoEXT
+{
+}
+impl<'a> ::std::ops::Deref for PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder<'a> {
+ type Target = PipelineShaderStageModuleIdentifierCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineShaderStageModuleIdentifierCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn identifier(mut self, identifier: &'a [u8]) -> Self {
+ self.inner.identifier_size = identifier.len() as _;
+ self.inner.p_identifier = identifier.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineShaderStageModuleIdentifierCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderModuleIdentifierEXT.html>"]
+pub struct ShaderModuleIdentifierEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub identifier_size: u32,
+ pub identifier: [u8; MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT],
+}
+impl ::std::default::Default for ShaderModuleIdentifierEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ identifier_size: u32::default(),
+ identifier: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ShaderModuleIdentifierEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SHADER_MODULE_IDENTIFIER_EXT;
+}
+impl ShaderModuleIdentifierEXT {
+ pub fn builder<'a>() -> ShaderModuleIdentifierEXTBuilder<'a> {
+ ShaderModuleIdentifierEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ShaderModuleIdentifierEXTBuilder<'a> {
+ inner: ShaderModuleIdentifierEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ShaderModuleIdentifierEXTBuilder<'a> {
+ type Target = ShaderModuleIdentifierEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ShaderModuleIdentifierEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ShaderModuleIdentifierEXTBuilder<'a> {
+ #[inline]
+ pub fn identifier_size(mut self, identifier_size: u32) -> Self {
+ self.inner.identifier_size = identifier_size;
+ self
+ }
+ #[inline]
+ pub fn identifier(mut self, identifier: [u8; MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT]) -> Self {
+ self.inner.identifier = identifier;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ShaderModuleIdentifierEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCompressionControlEXT.html>"]
+pub struct ImageCompressionControlEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub flags: ImageCompressionFlagsEXT,
+ pub compression_control_plane_count: u32,
+ pub p_fixed_rate_flags: *mut ImageCompressionFixedRateFlagsEXT,
+}
+impl ::std::default::Default for ImageCompressionControlEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ flags: ImageCompressionFlagsEXT::default(),
+ compression_control_plane_count: u32::default(),
+ p_fixed_rate_flags: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageCompressionControlEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_COMPRESSION_CONTROL_EXT;
+}
+impl ImageCompressionControlEXT {
+ pub fn builder<'a>() -> ImageCompressionControlEXTBuilder<'a> {
+ ImageCompressionControlEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageCompressionControlEXTBuilder<'a> {
+ inner: ImageCompressionControlEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImageCompressionControlEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImageCompressionControlEXT {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for ImageCompressionControlEXTBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for ImageCompressionControlEXT {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageCompressionControlEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for ImageCompressionControlEXT {}
+impl<'a> ::std::ops::Deref for ImageCompressionControlEXTBuilder<'a> {
+ type Target = ImageCompressionControlEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageCompressionControlEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageCompressionControlEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: ImageCompressionFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn fixed_rate_flags(
+ mut self,
+ fixed_rate_flags: &'a mut [ImageCompressionFixedRateFlagsEXT],
+ ) -> Self {
+ self.inner.compression_control_plane_count = fixed_rate_flags.len() as _;
+ self.inner.p_fixed_rate_flags = fixed_rate_flags.as_mut_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageCompressionControlEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageCompressionControlFeaturesEXT.html>"]
+pub struct PhysicalDeviceImageCompressionControlFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_compression_control: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImageCompressionControlFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_compression_control: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageCompressionControlFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT;
+}
+impl PhysicalDeviceImageCompressionControlFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'a> {
+ PhysicalDeviceImageCompressionControlFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceImageCompressionControlFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageCompressionControlFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageCompressionControlFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceImageCompressionControlFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageCompressionControlFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn image_compression_control(mut self, image_compression_control: bool) -> Self {
+ self.inner.image_compression_control = image_compression_control.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageCompressionControlFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageCompressionPropertiesEXT.html>"]
+pub struct ImageCompressionPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_compression_flags: ImageCompressionFlagsEXT,
+ pub image_compression_fixed_rate_flags: ImageCompressionFixedRateFlagsEXT,
+}
+impl ::std::default::Default for ImageCompressionPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_compression_flags: ImageCompressionFlagsEXT::default(),
+ image_compression_fixed_rate_flags: ImageCompressionFixedRateFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageCompressionPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_COMPRESSION_PROPERTIES_EXT;
+}
+impl ImageCompressionPropertiesEXT {
+ pub fn builder<'a>() -> ImageCompressionPropertiesEXTBuilder<'a> {
+ ImageCompressionPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageCompressionPropertiesEXTBuilder<'a> {
+ inner: ImageCompressionPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageFormatProperties2 for ImageCompressionPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsImageFormatProperties2 for ImageCompressionPropertiesEXT {}
+unsafe impl ExtendsSurfaceFormat2KHR for ImageCompressionPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsSurfaceFormat2KHR for ImageCompressionPropertiesEXT {}
+unsafe impl ExtendsSubresourceLayout2EXT for ImageCompressionPropertiesEXTBuilder<'_> {}
+unsafe impl ExtendsSubresourceLayout2EXT for ImageCompressionPropertiesEXT {}
+impl<'a> ::std::ops::Deref for ImageCompressionPropertiesEXTBuilder<'a> {
+ type Target = ImageCompressionPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageCompressionPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageCompressionPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn image_compression_flags(
+ mut self,
+ image_compression_flags: ImageCompressionFlagsEXT,
+ ) -> Self {
+ self.inner.image_compression_flags = image_compression_flags;
+ self
+ }
+ #[inline]
+ pub fn image_compression_fixed_rate_flags(
+ mut self,
+ image_compression_fixed_rate_flags: ImageCompressionFixedRateFlagsEXT,
+ ) -> Self {
+ self.inner.image_compression_fixed_rate_flags = image_compression_fixed_rate_flags;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageCompressionPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT.html>"]
+pub struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_compression_control_swapchain: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_compression_control_swapchain: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT;
+}
+impl PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'a> {
+ PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {}
+impl<'a> ::std::ops::Deref
+ for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'a>
+{
+ type Target = PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageCompressionControlSwapchainFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn image_compression_control_swapchain(
+ mut self,
+ image_compression_control_swapchain: bool,
+ ) -> Self {
+ self.inner.image_compression_control_swapchain = image_compression_control_swapchain.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageSubresource2EXT.html>"]
+pub struct ImageSubresource2EXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub image_subresource: ImageSubresource,
+}
+impl ::std::default::Default for ImageSubresource2EXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ image_subresource: ImageSubresource::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageSubresource2EXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_SUBRESOURCE_2_EXT;
+}
+impl ImageSubresource2EXT {
+ pub fn builder<'a>() -> ImageSubresource2EXTBuilder<'a> {
+ ImageSubresource2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageSubresource2EXTBuilder<'a> {
+ inner: ImageSubresource2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ImageSubresource2EXTBuilder<'a> {
+ type Target = ImageSubresource2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageSubresource2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageSubresource2EXTBuilder<'a> {
+ #[inline]
+ pub fn image_subresource(mut self, image_subresource: ImageSubresource) -> Self {
+ self.inner.image_subresource = image_subresource;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageSubresource2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubresourceLayout2EXT.html>"]
+pub struct SubresourceLayout2EXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub subresource_layout: SubresourceLayout,
+}
+impl ::std::default::Default for SubresourceLayout2EXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ subresource_layout: SubresourceLayout::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SubresourceLayout2EXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SUBRESOURCE_LAYOUT_2_EXT;
+}
+impl SubresourceLayout2EXT {
+ pub fn builder<'a>() -> SubresourceLayout2EXTBuilder<'a> {
+ SubresourceLayout2EXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SubresourceLayout2EXTBuilder<'a> {
+ inner: SubresourceLayout2EXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsSubresourceLayout2EXT {}
+impl<'a> ::std::ops::Deref for SubresourceLayout2EXTBuilder<'a> {
+ type Target = SubresourceLayout2EXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SubresourceLayout2EXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SubresourceLayout2EXTBuilder<'a> {
+ #[inline]
+ pub fn subresource_layout(mut self, subresource_layout: SubresourceLayout) -> Self {
+ self.inner.subresource_layout = subresource_layout;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsSubresourceLayout2EXT>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SubresourceLayout2EXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassCreationControlEXT.html>"]
+pub struct RenderPassCreationControlEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub disallow_merging: Bool32,
+}
+impl ::std::default::Default for RenderPassCreationControlEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ disallow_merging: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassCreationControlEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::RENDER_PASS_CREATION_CONTROL_EXT;
+}
+impl RenderPassCreationControlEXT {
+ pub fn builder<'a>() -> RenderPassCreationControlEXTBuilder<'a> {
+ RenderPassCreationControlEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassCreationControlEXTBuilder<'a> {
+ inner: RenderPassCreationControlEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassCreationControlEXTBuilder<'_> {}
+unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassCreationControlEXT {}
+unsafe impl ExtendsSubpassDescription2 for RenderPassCreationControlEXTBuilder<'_> {}
+unsafe impl ExtendsSubpassDescription2 for RenderPassCreationControlEXT {}
+impl<'a> ::std::ops::Deref for RenderPassCreationControlEXTBuilder<'a> {
+ type Target = RenderPassCreationControlEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassCreationControlEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassCreationControlEXTBuilder<'a> {
+ #[inline]
+ pub fn disallow_merging(mut self, disallow_merging: bool) -> Self {
+ self.inner.disallow_merging = disallow_merging.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassCreationControlEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassCreationFeedbackInfoEXT.html>"]
+pub struct RenderPassCreationFeedbackInfoEXT {
+ pub post_merge_subpass_count: u32,
+}
+impl RenderPassCreationFeedbackInfoEXT {
+ pub fn builder<'a>() -> RenderPassCreationFeedbackInfoEXTBuilder<'a> {
+ RenderPassCreationFeedbackInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassCreationFeedbackInfoEXTBuilder<'a> {
+ inner: RenderPassCreationFeedbackInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RenderPassCreationFeedbackInfoEXTBuilder<'a> {
+ type Target = RenderPassCreationFeedbackInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassCreationFeedbackInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassCreationFeedbackInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn post_merge_subpass_count(mut self, post_merge_subpass_count: u32) -> Self {
+ self.inner.post_merge_subpass_count = post_merge_subpass_count;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassCreationFeedbackInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassCreationFeedbackCreateInfoEXT.html>"]
+pub struct RenderPassCreationFeedbackCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_render_pass_feedback: *mut RenderPassCreationFeedbackInfoEXT,
+}
+impl ::std::default::Default for RenderPassCreationFeedbackCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_render_pass_feedback: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassCreationFeedbackCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT;
+}
+impl RenderPassCreationFeedbackCreateInfoEXT {
+ pub fn builder<'a>() -> RenderPassCreationFeedbackCreateInfoEXTBuilder<'a> {
+ RenderPassCreationFeedbackCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassCreationFeedbackCreateInfoEXTBuilder<'a> {
+ inner: RenderPassCreationFeedbackCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassCreationFeedbackCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRenderPassCreateInfo2 for RenderPassCreationFeedbackCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for RenderPassCreationFeedbackCreateInfoEXTBuilder<'a> {
+ type Target = RenderPassCreationFeedbackCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassCreationFeedbackCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassCreationFeedbackCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn render_pass_feedback(
+ mut self,
+ render_pass_feedback: &'a mut RenderPassCreationFeedbackInfoEXT,
+ ) -> Self {
+ self.inner.p_render_pass_feedback = render_pass_feedback;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassCreationFeedbackCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassSubpassFeedbackInfoEXT.html>"]
+pub struct RenderPassSubpassFeedbackInfoEXT {
+ pub subpass_merge_status: SubpassMergeStatusEXT,
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub post_merge_index: u32,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for RenderPassSubpassFeedbackInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("RenderPassSubpassFeedbackInfoEXT")
+ .field("subpass_merge_status", &self.subpass_merge_status)
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("post_merge_index", &self.post_merge_index)
+ .finish()
+ }
+}
+impl ::std::default::Default for RenderPassSubpassFeedbackInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ subpass_merge_status: SubpassMergeStatusEXT::default(),
+ description: unsafe { ::std::mem::zeroed() },
+ post_merge_index: u32::default(),
+ }
+ }
+}
+impl RenderPassSubpassFeedbackInfoEXT {
+ pub fn builder<'a>() -> RenderPassSubpassFeedbackInfoEXTBuilder<'a> {
+ RenderPassSubpassFeedbackInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassSubpassFeedbackInfoEXTBuilder<'a> {
+ inner: RenderPassSubpassFeedbackInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for RenderPassSubpassFeedbackInfoEXTBuilder<'a> {
+ type Target = RenderPassSubpassFeedbackInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassSubpassFeedbackInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassSubpassFeedbackInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn subpass_merge_status(mut self, subpass_merge_status: SubpassMergeStatusEXT) -> Self {
+ self.inner.subpass_merge_status = subpass_merge_status;
+ self
+ }
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn post_merge_index(mut self, post_merge_index: u32) -> Self {
+ self.inner.post_merge_index = post_merge_index;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassSubpassFeedbackInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRenderPassSubpassFeedbackCreateInfoEXT.html>"]
+pub struct RenderPassSubpassFeedbackCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_subpass_feedback: *mut RenderPassSubpassFeedbackInfoEXT,
+}
+impl ::std::default::Default for RenderPassSubpassFeedbackCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_subpass_feedback: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for RenderPassSubpassFeedbackCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT;
+}
+impl RenderPassSubpassFeedbackCreateInfoEXT {
+ pub fn builder<'a>() -> RenderPassSubpassFeedbackCreateInfoEXTBuilder<'a> {
+ RenderPassSubpassFeedbackCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct RenderPassSubpassFeedbackCreateInfoEXTBuilder<'a> {
+ inner: RenderPassSubpassFeedbackCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubpassDescription2 for RenderPassSubpassFeedbackCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSubpassDescription2 for RenderPassSubpassFeedbackCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for RenderPassSubpassFeedbackCreateInfoEXTBuilder<'a> {
+ type Target = RenderPassSubpassFeedbackCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for RenderPassSubpassFeedbackCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> RenderPassSubpassFeedbackCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn subpass_feedback(
+ mut self,
+ subpass_feedback: &'a mut RenderPassSubpassFeedbackInfoEXT,
+ ) -> Self {
+ self.inner.p_subpass_feedback = subpass_feedback;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> RenderPassSubpassFeedbackCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT.html>"]
+pub struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub subpass_merge_feedback: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ subpass_merge_feedback: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT;
+}
+impl PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'a> {
+ PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceSubpassMergeFeedbackFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSubpassMergeFeedbackFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn subpass_merge_feedback(mut self, subpass_merge_feedback: bool) -> Self {
+ self.inner.subpass_merge_feedback = subpass_merge_feedback.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapBuildInfoEXT.html>"]
+pub struct MicromapBuildInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub ty: MicromapTypeEXT,
+ pub flags: BuildMicromapFlagsEXT,
+ pub mode: BuildMicromapModeEXT,
+ pub dst_micromap: MicromapEXT,
+ pub usage_counts_count: u32,
+ pub p_usage_counts: *const MicromapUsageEXT,
+ pub pp_usage_counts: *const *const MicromapUsageEXT,
+ pub data: DeviceOrHostAddressConstKHR,
+ pub scratch_data: DeviceOrHostAddressKHR,
+ pub triangle_array: DeviceOrHostAddressConstKHR,
+ pub triangle_array_stride: DeviceSize,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for MicromapBuildInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("MicromapBuildInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("ty", &self.ty)
+ .field("flags", &self.flags)
+ .field("mode", &self.mode)
+ .field("dst_micromap", &self.dst_micromap)
+ .field("usage_counts_count", &self.usage_counts_count)
+ .field("p_usage_counts", &self.p_usage_counts)
+ .field("pp_usage_counts", &self.pp_usage_counts)
+ .field("data", &"union")
+ .field("scratch_data", &"union")
+ .field("triangle_array", &"union")
+ .field("triangle_array_stride", &self.triangle_array_stride)
+ .finish()
+ }
+}
+impl ::std::default::Default for MicromapBuildInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ ty: MicromapTypeEXT::default(),
+ flags: BuildMicromapFlagsEXT::default(),
+ mode: BuildMicromapModeEXT::default(),
+ dst_micromap: MicromapEXT::default(),
+ usage_counts_count: u32::default(),
+ p_usage_counts: ::std::ptr::null(),
+ pp_usage_counts: ::std::ptr::null(),
+ data: DeviceOrHostAddressConstKHR::default(),
+ scratch_data: DeviceOrHostAddressKHR::default(),
+ triangle_array: DeviceOrHostAddressConstKHR::default(),
+ triangle_array_stride: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MicromapBuildInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_BUILD_INFO_EXT;
+}
+impl MicromapBuildInfoEXT {
+ pub fn builder<'a>() -> MicromapBuildInfoEXTBuilder<'a> {
+ MicromapBuildInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MicromapBuildInfoEXTBuilder<'a> {
+ inner: MicromapBuildInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MicromapBuildInfoEXTBuilder<'a> {
+ type Target = MicromapBuildInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MicromapBuildInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MicromapBuildInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn ty(mut self, ty: MicromapTypeEXT) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: BuildMicromapFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: BuildMicromapModeEXT) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[inline]
+ pub fn dst_micromap(mut self, dst_micromap: MicromapEXT) -> Self {
+ self.inner.dst_micromap = dst_micromap;
+ self
+ }
+ #[inline]
+ pub fn usage_counts(mut self, usage_counts: &'a [MicromapUsageEXT]) -> Self {
+ self.inner.usage_counts_count = usage_counts.len() as _;
+ self.inner.p_usage_counts = usage_counts.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn usage_counts_ptrs(mut self, usage_counts_ptrs: &'a [&'a MicromapUsageEXT]) -> Self {
+ self.inner.usage_counts_count = usage_counts_ptrs.len() as _;
+ self.inner.pp_usage_counts = usage_counts_ptrs.as_ptr().cast();
+ self
+ }
+ #[inline]
+ pub fn data(mut self, data: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.data = data;
+ self
+ }
+ #[inline]
+ pub fn scratch_data(mut self, scratch_data: DeviceOrHostAddressKHR) -> Self {
+ self.inner.scratch_data = scratch_data;
+ self
+ }
+ #[inline]
+ pub fn triangle_array(mut self, triangle_array: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.triangle_array = triangle_array;
+ self
+ }
+ #[inline]
+ pub fn triangle_array_stride(mut self, triangle_array_stride: DeviceSize) -> Self {
+ self.inner.triangle_array_stride = triangle_array_stride;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MicromapBuildInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapCreateInfoEXT.html>"]
+pub struct MicromapCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub create_flags: MicromapCreateFlagsEXT,
+ pub buffer: Buffer,
+ pub offset: DeviceSize,
+ pub size: DeviceSize,
+ pub ty: MicromapTypeEXT,
+ pub device_address: DeviceAddress,
+}
+impl ::std::default::Default for MicromapCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ create_flags: MicromapCreateFlagsEXT::default(),
+ buffer: Buffer::default(),
+ offset: DeviceSize::default(),
+ size: DeviceSize::default(),
+ ty: MicromapTypeEXT::default(),
+ device_address: DeviceAddress::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MicromapCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_CREATE_INFO_EXT;
+}
+impl MicromapCreateInfoEXT {
+ pub fn builder<'a>() -> MicromapCreateInfoEXTBuilder<'a> {
+ MicromapCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MicromapCreateInfoEXTBuilder<'a> {
+ inner: MicromapCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MicromapCreateInfoEXTBuilder<'a> {
+ type Target = MicromapCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MicromapCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MicromapCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn create_flags(mut self, create_flags: MicromapCreateFlagsEXT) -> Self {
+ self.inner.create_flags = create_flags;
+ self
+ }
+ #[inline]
+ pub fn buffer(mut self, buffer: Buffer) -> Self {
+ self.inner.buffer = buffer;
+ self
+ }
+ #[inline]
+ pub fn offset(mut self, offset: DeviceSize) -> Self {
+ self.inner.offset = offset;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn ty(mut self, ty: MicromapTypeEXT) -> Self {
+ self.inner.ty = ty;
+ self
+ }
+ #[inline]
+ pub fn device_address(mut self, device_address: DeviceAddress) -> Self {
+ self.inner.device_address = device_address;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MicromapCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapVersionInfoEXT.html>"]
+pub struct MicromapVersionInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub p_version_data: *const [u8; 2 * UUID_SIZE],
+}
+impl ::std::default::Default for MicromapVersionInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ p_version_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MicromapVersionInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_VERSION_INFO_EXT;
+}
+impl MicromapVersionInfoEXT {
+ pub fn builder<'a>() -> MicromapVersionInfoEXTBuilder<'a> {
+ MicromapVersionInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MicromapVersionInfoEXTBuilder<'a> {
+ inner: MicromapVersionInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MicromapVersionInfoEXTBuilder<'a> {
+ type Target = MicromapVersionInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MicromapVersionInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MicromapVersionInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn version_data(mut self, version_data: &'a [u8; 2 * UUID_SIZE]) -> Self {
+ self.inner.p_version_data = version_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MicromapVersionInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMicromapInfoEXT.html>"]
+pub struct CopyMicromapInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src: MicromapEXT,
+ pub dst: MicromapEXT,
+ pub mode: CopyMicromapModeEXT,
+}
+impl ::std::default::Default for CopyMicromapInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src: MicromapEXT::default(),
+ dst: MicromapEXT::default(),
+ mode: CopyMicromapModeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyMicromapInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_MICROMAP_INFO_EXT;
+}
+impl CopyMicromapInfoEXT {
+ pub fn builder<'a>() -> CopyMicromapInfoEXTBuilder<'a> {
+ CopyMicromapInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyMicromapInfoEXTBuilder<'a> {
+ inner: CopyMicromapInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyMicromapInfoEXTBuilder<'a> {
+ type Target = CopyMicromapInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyMicromapInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyMicromapInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn src(mut self, src: MicromapEXT) -> Self {
+ self.inner.src = src;
+ self
+ }
+ #[inline]
+ pub fn dst(mut self, dst: MicromapEXT) -> Self {
+ self.inner.dst = dst;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: CopyMicromapModeEXT) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyMicromapInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMicromapToMemoryInfoEXT.html>"]
+pub struct CopyMicromapToMemoryInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src: MicromapEXT,
+ pub dst: DeviceOrHostAddressKHR,
+ pub mode: CopyMicromapModeEXT,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for CopyMicromapToMemoryInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("CopyMicromapToMemoryInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("src", &self.src)
+ .field("dst", &"union")
+ .field("mode", &self.mode)
+ .finish()
+ }
+}
+impl ::std::default::Default for CopyMicromapToMemoryInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src: MicromapEXT::default(),
+ dst: DeviceOrHostAddressKHR::default(),
+ mode: CopyMicromapModeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyMicromapToMemoryInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_MICROMAP_TO_MEMORY_INFO_EXT;
+}
+impl CopyMicromapToMemoryInfoEXT {
+ pub fn builder<'a>() -> CopyMicromapToMemoryInfoEXTBuilder<'a> {
+ CopyMicromapToMemoryInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyMicromapToMemoryInfoEXTBuilder<'a> {
+ inner: CopyMicromapToMemoryInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyMicromapToMemoryInfoEXTBuilder<'a> {
+ type Target = CopyMicromapToMemoryInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyMicromapToMemoryInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyMicromapToMemoryInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn src(mut self, src: MicromapEXT) -> Self {
+ self.inner.src = src;
+ self
+ }
+ #[inline]
+ pub fn dst(mut self, dst: DeviceOrHostAddressKHR) -> Self {
+ self.inner.dst = dst;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: CopyMicromapModeEXT) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyMicromapToMemoryInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMemoryToMicromapInfoEXT.html>"]
+pub struct CopyMemoryToMicromapInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub src: DeviceOrHostAddressConstKHR,
+ pub dst: MicromapEXT,
+ pub mode: CopyMicromapModeEXT,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for CopyMemoryToMicromapInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("CopyMemoryToMicromapInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("src", &"union")
+ .field("dst", &self.dst)
+ .field("mode", &self.mode)
+ .finish()
+ }
+}
+impl ::std::default::Default for CopyMemoryToMicromapInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ src: DeviceOrHostAddressConstKHR::default(),
+ dst: MicromapEXT::default(),
+ mode: CopyMicromapModeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for CopyMemoryToMicromapInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::COPY_MEMORY_TO_MICROMAP_INFO_EXT;
+}
+impl CopyMemoryToMicromapInfoEXT {
+ pub fn builder<'a>() -> CopyMemoryToMicromapInfoEXTBuilder<'a> {
+ CopyMemoryToMicromapInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct CopyMemoryToMicromapInfoEXTBuilder<'a> {
+ inner: CopyMemoryToMicromapInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for CopyMemoryToMicromapInfoEXTBuilder<'a> {
+ type Target = CopyMemoryToMicromapInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for CopyMemoryToMicromapInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> CopyMemoryToMicromapInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn src(mut self, src: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.src = src;
+ self
+ }
+ #[inline]
+ pub fn dst(mut self, dst: MicromapEXT) -> Self {
+ self.inner.dst = dst;
+ self
+ }
+ #[inline]
+ pub fn mode(mut self, mode: CopyMicromapModeEXT) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> CopyMemoryToMicromapInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapBuildSizesInfoEXT.html>"]
+pub struct MicromapBuildSizesInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub micromap_size: DeviceSize,
+ pub build_scratch_size: DeviceSize,
+ pub discardable: Bool32,
+}
+impl ::std::default::Default for MicromapBuildSizesInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ micromap_size: DeviceSize::default(),
+ build_scratch_size: DeviceSize::default(),
+ discardable: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for MicromapBuildSizesInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::MICROMAP_BUILD_SIZES_INFO_EXT;
+}
+impl MicromapBuildSizesInfoEXT {
+ pub fn builder<'a>() -> MicromapBuildSizesInfoEXTBuilder<'a> {
+ MicromapBuildSizesInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MicromapBuildSizesInfoEXTBuilder<'a> {
+ inner: MicromapBuildSizesInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MicromapBuildSizesInfoEXTBuilder<'a> {
+ type Target = MicromapBuildSizesInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MicromapBuildSizesInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MicromapBuildSizesInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn micromap_size(mut self, micromap_size: DeviceSize) -> Self {
+ self.inner.micromap_size = micromap_size;
+ self
+ }
+ #[inline]
+ pub fn build_scratch_size(mut self, build_scratch_size: DeviceSize) -> Self {
+ self.inner.build_scratch_size = build_scratch_size;
+ self
+ }
+ #[inline]
+ pub fn discardable(mut self, discardable: bool) -> Self {
+ self.inner.discardable = discardable.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MicromapBuildSizesInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapUsageEXT.html>"]
+pub struct MicromapUsageEXT {
+ pub count: u32,
+ pub subdivision_level: u32,
+ pub format: u32,
+}
+impl MicromapUsageEXT {
+ pub fn builder<'a>() -> MicromapUsageEXTBuilder<'a> {
+ MicromapUsageEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MicromapUsageEXTBuilder<'a> {
+ inner: MicromapUsageEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MicromapUsageEXTBuilder<'a> {
+ type Target = MicromapUsageEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MicromapUsageEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MicromapUsageEXTBuilder<'a> {
+ #[inline]
+ pub fn count(mut self, count: u32) -> Self {
+ self.inner.count = count;
+ self
+ }
+ #[inline]
+ pub fn subdivision_level(mut self, subdivision_level: u32) -> Self {
+ self.inner.subdivision_level = subdivision_level;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: u32) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MicromapUsageEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapTriangleEXT.html>"]
+pub struct MicromapTriangleEXT {
+ pub data_offset: u32,
+ pub subdivision_level: u16,
+ pub format: u16,
+}
+impl MicromapTriangleEXT {
+ pub fn builder<'a>() -> MicromapTriangleEXTBuilder<'a> {
+ MicromapTriangleEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct MicromapTriangleEXTBuilder<'a> {
+ inner: MicromapTriangleEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for MicromapTriangleEXTBuilder<'a> {
+ type Target = MicromapTriangleEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for MicromapTriangleEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> MicromapTriangleEXTBuilder<'a> {
+ #[inline]
+ pub fn data_offset(mut self, data_offset: u32) -> Self {
+ self.inner.data_offset = data_offset;
+ self
+ }
+ #[inline]
+ pub fn subdivision_level(mut self, subdivision_level: u16) -> Self {
+ self.inner.subdivision_level = subdivision_level;
+ self
+ }
+ #[inline]
+ pub fn format(mut self, format: u16) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> MicromapTriangleEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceOpacityMicromapFeaturesEXT.html>"]
+pub struct PhysicalDeviceOpacityMicromapFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub micromap: Bool32,
+ pub micromap_capture_replay: Bool32,
+ pub micromap_host_commands: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceOpacityMicromapFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ micromap: Bool32::default(),
+ micromap_capture_replay: Bool32::default(),
+ micromap_host_commands: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceOpacityMicromapFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT;
+}
+impl PhysicalDeviceOpacityMicromapFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'a> {
+ PhysicalDeviceOpacityMicromapFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceOpacityMicromapFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceOpacityMicromapFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceOpacityMicromapFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceOpacityMicromapFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceOpacityMicromapFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn micromap(mut self, micromap: bool) -> Self {
+ self.inner.micromap = micromap.into();
+ self
+ }
+ #[inline]
+ pub fn micromap_capture_replay(mut self, micromap_capture_replay: bool) -> Self {
+ self.inner.micromap_capture_replay = micromap_capture_replay.into();
+ self
+ }
+ #[inline]
+ pub fn micromap_host_commands(mut self, micromap_host_commands: bool) -> Self {
+ self.inner.micromap_host_commands = micromap_host_commands.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceOpacityMicromapFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceOpacityMicromapPropertiesEXT.html>"]
+pub struct PhysicalDeviceOpacityMicromapPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_opacity2_state_subdivision_level: u32,
+ pub max_opacity4_state_subdivision_level: u32,
+}
+impl ::std::default::Default for PhysicalDeviceOpacityMicromapPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_opacity2_state_subdivision_level: u32::default(),
+ max_opacity4_state_subdivision_level: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceOpacityMicromapPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT;
+}
+impl PhysicalDeviceOpacityMicromapPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceOpacityMicromapPropertiesEXTBuilder<'a> {
+ PhysicalDeviceOpacityMicromapPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceOpacityMicromapPropertiesEXTBuilder<'a> {
+ inner: PhysicalDeviceOpacityMicromapPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceOpacityMicromapPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceOpacityMicromapPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceOpacityMicromapPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDeviceOpacityMicromapPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceOpacityMicromapPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceOpacityMicromapPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn max_opacity2_state_subdivision_level(
+ mut self,
+ max_opacity2_state_subdivision_level: u32,
+ ) -> Self {
+ self.inner.max_opacity2_state_subdivision_level = max_opacity2_state_subdivision_level;
+ self
+ }
+ #[inline]
+ pub fn max_opacity4_state_subdivision_level(
+ mut self,
+ max_opacity4_state_subdivision_level: u32,
+ ) -> Self {
+ self.inner.max_opacity4_state_subdivision_level = max_opacity4_state_subdivision_level;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceOpacityMicromapPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureTrianglesOpacityMicromapEXT.html>"]
+pub struct AccelerationStructureTrianglesOpacityMicromapEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub index_type: IndexType,
+ pub index_buffer: DeviceOrHostAddressConstKHR,
+ pub index_stride: DeviceSize,
+ pub base_triangle: u32,
+ pub usage_counts_count: u32,
+ pub p_usage_counts: *const MicromapUsageEXT,
+ pub pp_usage_counts: *const *const MicromapUsageEXT,
+ pub micromap: MicromapEXT,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for AccelerationStructureTrianglesOpacityMicromapEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("AccelerationStructureTrianglesOpacityMicromapEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("index_type", &self.index_type)
+ .field("index_buffer", &"union")
+ .field("index_stride", &self.index_stride)
+ .field("base_triangle", &self.base_triangle)
+ .field("usage_counts_count", &self.usage_counts_count)
+ .field("p_usage_counts", &self.p_usage_counts)
+ .field("pp_usage_counts", &self.pp_usage_counts)
+ .field("micromap", &self.micromap)
+ .finish()
+ }
+}
+impl ::std::default::Default for AccelerationStructureTrianglesOpacityMicromapEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ index_type: IndexType::default(),
+ index_buffer: DeviceOrHostAddressConstKHR::default(),
+ index_stride: DeviceSize::default(),
+ base_triangle: u32::default(),
+ usage_counts_count: u32::default(),
+ p_usage_counts: ::std::ptr::null(),
+ pp_usage_counts: ::std::ptr::null(),
+ micromap: MicromapEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AccelerationStructureTrianglesOpacityMicromapEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT;
+}
+impl AccelerationStructureTrianglesOpacityMicromapEXT {
+ pub fn builder<'a>() -> AccelerationStructureTrianglesOpacityMicromapEXTBuilder<'a> {
+ AccelerationStructureTrianglesOpacityMicromapEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AccelerationStructureTrianglesOpacityMicromapEXTBuilder<'a> {
+ inner: AccelerationStructureTrianglesOpacityMicromapEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR
+ for AccelerationStructureTrianglesOpacityMicromapEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsAccelerationStructureGeometryTrianglesDataKHR
+ for AccelerationStructureTrianglesOpacityMicromapEXT
+{
+}
+impl<'a> ::std::ops::Deref for AccelerationStructureTrianglesOpacityMicromapEXTBuilder<'a> {
+ type Target = AccelerationStructureTrianglesOpacityMicromapEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AccelerationStructureTrianglesOpacityMicromapEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AccelerationStructureTrianglesOpacityMicromapEXTBuilder<'a> {
+ #[inline]
+ pub fn index_type(mut self, index_type: IndexType) -> Self {
+ self.inner.index_type = index_type;
+ self
+ }
+ #[inline]
+ pub fn index_buffer(mut self, index_buffer: DeviceOrHostAddressConstKHR) -> Self {
+ self.inner.index_buffer = index_buffer;
+ self
+ }
+ #[inline]
+ pub fn index_stride(mut self, index_stride: DeviceSize) -> Self {
+ self.inner.index_stride = index_stride;
+ self
+ }
+ #[inline]
+ pub fn base_triangle(mut self, base_triangle: u32) -> Self {
+ self.inner.base_triangle = base_triangle;
+ self
+ }
+ #[inline]
+ pub fn usage_counts(mut self, usage_counts: &'a [MicromapUsageEXT]) -> Self {
+ self.inner.usage_counts_count = usage_counts.len() as _;
+ self.inner.p_usage_counts = usage_counts.as_ptr();
+ self
+ }
+ #[inline]
+ pub fn usage_counts_ptrs(mut self, usage_counts_ptrs: &'a [&'a MicromapUsageEXT]) -> Self {
+ self.inner.usage_counts_count = usage_counts_ptrs.len() as _;
+ self.inner.pp_usage_counts = usage_counts_ptrs.as_ptr().cast();
+ self
+ }
+ #[inline]
+ pub fn micromap(mut self, micromap: MicromapEXT) -> Self {
+ self.inner.micromap = micromap;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AccelerationStructureTrianglesOpacityMicromapEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelinePropertiesIdentifierEXT.html>"]
+pub struct PipelinePropertiesIdentifierEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_identifier: [u8; UUID_SIZE],
+}
+impl ::std::default::Default for PipelinePropertiesIdentifierEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_identifier: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelinePropertiesIdentifierEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_PROPERTIES_IDENTIFIER_EXT;
+}
+impl PipelinePropertiesIdentifierEXT {
+ pub fn builder<'a>() -> PipelinePropertiesIdentifierEXTBuilder<'a> {
+ PipelinePropertiesIdentifierEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelinePropertiesIdentifierEXTBuilder<'a> {
+ inner: PipelinePropertiesIdentifierEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for PipelinePropertiesIdentifierEXTBuilder<'a> {
+ type Target = PipelinePropertiesIdentifierEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelinePropertiesIdentifierEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelinePropertiesIdentifierEXTBuilder<'a> {
+ #[inline]
+ pub fn pipeline_identifier(mut self, pipeline_identifier: [u8; UUID_SIZE]) -> Self {
+ self.inner.pipeline_identifier = pipeline_identifier;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelinePropertiesIdentifierEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePipelinePropertiesFeaturesEXT.html>"]
+pub struct PhysicalDevicePipelinePropertiesFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_properties_identifier: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePipelinePropertiesFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_properties_identifier: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePipelinePropertiesFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT;
+}
+impl PhysicalDevicePipelinePropertiesFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'a> {
+ PhysicalDevicePipelinePropertiesFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevicePipelinePropertiesFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelinePropertiesFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelinePropertiesFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevicePipelinePropertiesFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePipelinePropertiesFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn pipeline_properties_identifier(mut self, pipeline_properties_identifier: bool) -> Self {
+ self.inner.pipeline_properties_identifier = pipeline_properties_identifier.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePipelinePropertiesFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD.html>"]
+pub struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_early_and_late_fragment_tests: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_early_and_late_fragment_tests: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD;
+}
+impl PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+ pub fn builder<'a>() -> PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'a> {
+ PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'a> {
+ inner: PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'a> {
+ type Target = PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut
+ for PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'a>
+{
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMDBuilder<'a> {
+ #[inline]
+ pub fn shader_early_and_late_fragment_tests(
+ mut self,
+ shader_early_and_late_fragment_tests: bool,
+ ) -> Self {
+ self.inner.shader_early_and_late_fragment_tests =
+ shader_early_and_late_fragment_tests.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalObjectCreateInfoEXT.html>"]
+pub struct ExportMetalObjectCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub export_object_type: ExportMetalObjectTypeFlagsEXT,
+}
+impl ::std::default::Default for ExportMetalObjectCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ export_object_type: ExportMetalObjectTypeFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalObjectCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_OBJECT_CREATE_INFO_EXT;
+}
+impl ExportMetalObjectCreateInfoEXT {
+ pub fn builder<'a>() -> ExportMetalObjectCreateInfoEXTBuilder<'a> {
+ ExportMetalObjectCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalObjectCreateInfoEXTBuilder<'a> {
+ inner: ExportMetalObjectCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsInstanceCreateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsInstanceCreateInfo for ExportMetalObjectCreateInfoEXT {}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ExportMetalObjectCreateInfoEXT {}
+unsafe impl ExtendsImageCreateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ExportMetalObjectCreateInfoEXT {}
+unsafe impl ExtendsImageViewCreateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for ExportMetalObjectCreateInfoEXT {}
+unsafe impl ExtendsBufferViewCreateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsBufferViewCreateInfo for ExportMetalObjectCreateInfoEXT {}
+unsafe impl ExtendsSemaphoreCreateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSemaphoreCreateInfo for ExportMetalObjectCreateInfoEXT {}
+unsafe impl ExtendsEventCreateInfo for ExportMetalObjectCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsEventCreateInfo for ExportMetalObjectCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalObjectCreateInfoEXTBuilder<'a> {
+ type Target = ExportMetalObjectCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalObjectCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalObjectCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn export_object_type(mut self, export_object_type: ExportMetalObjectTypeFlagsEXT) -> Self {
+ self.inner.export_object_type = export_object_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalObjectCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalObjectsInfoEXT.html>"]
+pub struct ExportMetalObjectsInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+}
+impl ::std::default::Default for ExportMetalObjectsInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalObjectsInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_OBJECTS_INFO_EXT;
+}
+impl ExportMetalObjectsInfoEXT {
+ pub fn builder<'a>() -> ExportMetalObjectsInfoEXTBuilder<'a> {
+ ExportMetalObjectsInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalObjectsInfoEXTBuilder<'a> {
+ inner: ExportMetalObjectsInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsExportMetalObjectsInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalObjectsInfoEXTBuilder<'a> {
+ type Target = ExportMetalObjectsInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalObjectsInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalObjectsInfoEXTBuilder<'a> {
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsExportMetalObjectsInfoEXT>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*const T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalObjectsInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalDeviceInfoEXT.html>"]
+pub struct ExportMetalDeviceInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub mtl_device: MTLDevice_id,
+}
+impl ::std::default::Default for ExportMetalDeviceInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ mtl_device: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalDeviceInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_DEVICE_INFO_EXT;
+}
+impl ExportMetalDeviceInfoEXT {
+ pub fn builder<'a>() -> ExportMetalDeviceInfoEXTBuilder<'a> {
+ ExportMetalDeviceInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalDeviceInfoEXTBuilder<'a> {
+ inner: ExportMetalDeviceInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalDeviceInfoEXTBuilder<'_> {}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalDeviceInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalDeviceInfoEXTBuilder<'a> {
+ type Target = ExportMetalDeviceInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalDeviceInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalDeviceInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn mtl_device(mut self, mtl_device: MTLDevice_id) -> Self {
+ self.inner.mtl_device = mtl_device;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalDeviceInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalCommandQueueInfoEXT.html>"]
+pub struct ExportMetalCommandQueueInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub queue: Queue,
+ pub mtl_command_queue: MTLCommandQueue_id,
+}
+impl ::std::default::Default for ExportMetalCommandQueueInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ queue: Queue::default(),
+ mtl_command_queue: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalCommandQueueInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_COMMAND_QUEUE_INFO_EXT;
+}
+impl ExportMetalCommandQueueInfoEXT {
+ pub fn builder<'a>() -> ExportMetalCommandQueueInfoEXTBuilder<'a> {
+ ExportMetalCommandQueueInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalCommandQueueInfoEXTBuilder<'a> {
+ inner: ExportMetalCommandQueueInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalCommandQueueInfoEXTBuilder<'_> {}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalCommandQueueInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalCommandQueueInfoEXTBuilder<'a> {
+ type Target = ExportMetalCommandQueueInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalCommandQueueInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalCommandQueueInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn queue(mut self, queue: Queue) -> Self {
+ self.inner.queue = queue;
+ self
+ }
+ #[inline]
+ pub fn mtl_command_queue(mut self, mtl_command_queue: MTLCommandQueue_id) -> Self {
+ self.inner.mtl_command_queue = mtl_command_queue;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalCommandQueueInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalBufferInfoEXT.html>"]
+pub struct ExportMetalBufferInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub memory: DeviceMemory,
+ pub mtl_buffer: MTLBuffer_id,
+}
+impl ::std::default::Default for ExportMetalBufferInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ memory: DeviceMemory::default(),
+ mtl_buffer: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalBufferInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_BUFFER_INFO_EXT;
+}
+impl ExportMetalBufferInfoEXT {
+ pub fn builder<'a>() -> ExportMetalBufferInfoEXTBuilder<'a> {
+ ExportMetalBufferInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalBufferInfoEXTBuilder<'a> {
+ inner: ExportMetalBufferInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalBufferInfoEXTBuilder<'_> {}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalBufferInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalBufferInfoEXTBuilder<'a> {
+ type Target = ExportMetalBufferInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalBufferInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalBufferInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn memory(mut self, memory: DeviceMemory) -> Self {
+ self.inner.memory = memory;
+ self
+ }
+ #[inline]
+ pub fn mtl_buffer(mut self, mtl_buffer: MTLBuffer_id) -> Self {
+ self.inner.mtl_buffer = mtl_buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalBufferInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMetalBufferInfoEXT.html>"]
+pub struct ImportMetalBufferInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub mtl_buffer: MTLBuffer_id,
+}
+impl ::std::default::Default for ImportMetalBufferInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ mtl_buffer: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMetalBufferInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_BUFFER_INFO_EXT;
+}
+impl ImportMetalBufferInfoEXT {
+ pub fn builder<'a>() -> ImportMetalBufferInfoEXTBuilder<'a> {
+ ImportMetalBufferInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMetalBufferInfoEXTBuilder<'a> {
+ inner: ImportMetalBufferInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMetalBufferInfoEXTBuilder<'_> {}
+unsafe impl ExtendsMemoryAllocateInfo for ImportMetalBufferInfoEXT {}
+impl<'a> ::std::ops::Deref for ImportMetalBufferInfoEXTBuilder<'a> {
+ type Target = ImportMetalBufferInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMetalBufferInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMetalBufferInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn mtl_buffer(mut self, mtl_buffer: MTLBuffer_id) -> Self {
+ self.inner.mtl_buffer = mtl_buffer;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMetalBufferInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalTextureInfoEXT.html>"]
+pub struct ExportMetalTextureInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+ pub image_view: ImageView,
+ pub buffer_view: BufferView,
+ pub plane: ImageAspectFlags,
+ pub mtl_texture: MTLTexture_id,
+}
+impl ::std::default::Default for ExportMetalTextureInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ image_view: ImageView::default(),
+ buffer_view: BufferView::default(),
+ plane: ImageAspectFlags::default(),
+ mtl_texture: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalTextureInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_TEXTURE_INFO_EXT;
+}
+impl ExportMetalTextureInfoEXT {
+ pub fn builder<'a>() -> ExportMetalTextureInfoEXTBuilder<'a> {
+ ExportMetalTextureInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalTextureInfoEXTBuilder<'a> {
+ inner: ExportMetalTextureInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalTextureInfoEXTBuilder<'_> {}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalTextureInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalTextureInfoEXTBuilder<'a> {
+ type Target = ExportMetalTextureInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalTextureInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalTextureInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn image_view(mut self, image_view: ImageView) -> Self {
+ self.inner.image_view = image_view;
+ self
+ }
+ #[inline]
+ pub fn buffer_view(mut self, buffer_view: BufferView) -> Self {
+ self.inner.buffer_view = buffer_view;
+ self
+ }
+ #[inline]
+ pub fn plane(mut self, plane: ImageAspectFlags) -> Self {
+ self.inner.plane = plane;
+ self
+ }
+ #[inline]
+ pub fn mtl_texture(mut self, mtl_texture: MTLTexture_id) -> Self {
+ self.inner.mtl_texture = mtl_texture;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalTextureInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMetalTextureInfoEXT.html>"]
+pub struct ImportMetalTextureInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub plane: ImageAspectFlags,
+ pub mtl_texture: MTLTexture_id,
+}
+impl ::std::default::Default for ImportMetalTextureInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ plane: ImageAspectFlags::default(),
+ mtl_texture: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMetalTextureInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_TEXTURE_INFO_EXT;
+}
+impl ImportMetalTextureInfoEXT {
+ pub fn builder<'a>() -> ImportMetalTextureInfoEXTBuilder<'a> {
+ ImportMetalTextureInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMetalTextureInfoEXTBuilder<'a> {
+ inner: ImportMetalTextureInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImportMetalTextureInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImportMetalTextureInfoEXT {}
+impl<'a> ::std::ops::Deref for ImportMetalTextureInfoEXTBuilder<'a> {
+ type Target = ImportMetalTextureInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMetalTextureInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMetalTextureInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn plane(mut self, plane: ImageAspectFlags) -> Self {
+ self.inner.plane = plane;
+ self
+ }
+ #[inline]
+ pub fn mtl_texture(mut self, mtl_texture: MTLTexture_id) -> Self {
+ self.inner.mtl_texture = mtl_texture;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMetalTextureInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalIOSurfaceInfoEXT.html>"]
+pub struct ExportMetalIOSurfaceInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub image: Image,
+ pub io_surface: IOSurfaceRef,
+}
+impl ::std::default::Default for ExportMetalIOSurfaceInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ image: Image::default(),
+ io_surface: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalIOSurfaceInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_IO_SURFACE_INFO_EXT;
+}
+impl ExportMetalIOSurfaceInfoEXT {
+ pub fn builder<'a>() -> ExportMetalIOSurfaceInfoEXTBuilder<'a> {
+ ExportMetalIOSurfaceInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalIOSurfaceInfoEXTBuilder<'a> {
+ inner: ExportMetalIOSurfaceInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalIOSurfaceInfoEXTBuilder<'_> {}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalIOSurfaceInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalIOSurfaceInfoEXTBuilder<'a> {
+ type Target = ExportMetalIOSurfaceInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalIOSurfaceInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalIOSurfaceInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn image(mut self, image: Image) -> Self {
+ self.inner.image = image;
+ self
+ }
+ #[inline]
+ pub fn io_surface(mut self, io_surface: IOSurfaceRef) -> Self {
+ self.inner.io_surface = io_surface;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalIOSurfaceInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMetalIOSurfaceInfoEXT.html>"]
+pub struct ImportMetalIOSurfaceInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub io_surface: IOSurfaceRef,
+}
+impl ::std::default::Default for ImportMetalIOSurfaceInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ io_surface: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMetalIOSurfaceInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_IO_SURFACE_INFO_EXT;
+}
+impl ImportMetalIOSurfaceInfoEXT {
+ pub fn builder<'a>() -> ImportMetalIOSurfaceInfoEXTBuilder<'a> {
+ ImportMetalIOSurfaceInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMetalIOSurfaceInfoEXTBuilder<'a> {
+ inner: ImportMetalIOSurfaceInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageCreateInfo for ImportMetalIOSurfaceInfoEXTBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for ImportMetalIOSurfaceInfoEXT {}
+impl<'a> ::std::ops::Deref for ImportMetalIOSurfaceInfoEXTBuilder<'a> {
+ type Target = ImportMetalIOSurfaceInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMetalIOSurfaceInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMetalIOSurfaceInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn io_surface(mut self, io_surface: IOSurfaceRef) -> Self {
+ self.inner.io_surface = io_surface;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMetalIOSurfaceInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkExportMetalSharedEventInfoEXT.html>"]
+pub struct ExportMetalSharedEventInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub semaphore: Semaphore,
+ pub event: Event,
+ pub mtl_shared_event: MTLSharedEvent_id,
+}
+impl ::std::default::Default for ExportMetalSharedEventInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ semaphore: Semaphore::default(),
+ event: Event::default(),
+ mtl_shared_event: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ExportMetalSharedEventInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::EXPORT_METAL_SHARED_EVENT_INFO_EXT;
+}
+impl ExportMetalSharedEventInfoEXT {
+ pub fn builder<'a>() -> ExportMetalSharedEventInfoEXTBuilder<'a> {
+ ExportMetalSharedEventInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ExportMetalSharedEventInfoEXTBuilder<'a> {
+ inner: ExportMetalSharedEventInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalSharedEventInfoEXTBuilder<'_> {}
+unsafe impl ExtendsExportMetalObjectsInfoEXT for ExportMetalSharedEventInfoEXT {}
+impl<'a> ::std::ops::Deref for ExportMetalSharedEventInfoEXTBuilder<'a> {
+ type Target = ExportMetalSharedEventInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ExportMetalSharedEventInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ExportMetalSharedEventInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn semaphore(mut self, semaphore: Semaphore) -> Self {
+ self.inner.semaphore = semaphore;
+ self
+ }
+ #[inline]
+ pub fn event(mut self, event: Event) -> Self {
+ self.inner.event = event;
+ self
+ }
+ #[inline]
+ pub fn mtl_shared_event(mut self, mtl_shared_event: MTLSharedEvent_id) -> Self {
+ self.inner.mtl_shared_event = mtl_shared_event;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ExportMetalSharedEventInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImportMetalSharedEventInfoEXT.html>"]
+pub struct ImportMetalSharedEventInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub mtl_shared_event: MTLSharedEvent_id,
+}
+impl ::std::default::Default for ImportMetalSharedEventInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ mtl_shared_event: unsafe { ::std::mem::zeroed() },
+ }
+ }
+}
+unsafe impl TaggedStructure for ImportMetalSharedEventInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMPORT_METAL_SHARED_EVENT_INFO_EXT;
+}
+impl ImportMetalSharedEventInfoEXT {
+ pub fn builder<'a>() -> ImportMetalSharedEventInfoEXTBuilder<'a> {
+ ImportMetalSharedEventInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImportMetalSharedEventInfoEXTBuilder<'a> {
+ inner: ImportMetalSharedEventInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSemaphoreCreateInfo for ImportMetalSharedEventInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSemaphoreCreateInfo for ImportMetalSharedEventInfoEXT {}
+unsafe impl ExtendsEventCreateInfo for ImportMetalSharedEventInfoEXTBuilder<'_> {}
+unsafe impl ExtendsEventCreateInfo for ImportMetalSharedEventInfoEXT {}
+impl<'a> ::std::ops::Deref for ImportMetalSharedEventInfoEXTBuilder<'a> {
+ type Target = ImportMetalSharedEventInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImportMetalSharedEventInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImportMetalSharedEventInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn mtl_shared_event(mut self, mtl_shared_event: MTLSharedEvent_id) -> Self {
+ self.inner.mtl_shared_event = mtl_shared_event;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImportMetalSharedEventInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT.html>"]
+pub struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub non_seamless_cube_map: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ non_seamless_cube_map: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT;
+}
+impl PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'a> {
+ PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceNonSeamlessCubeMapFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceNonSeamlessCubeMapFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn non_seamless_cube_map(mut self, non_seamless_cube_map: bool) -> Self {
+ self.inner.non_seamless_cube_map = non_seamless_cube_map.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePipelineRobustnessFeaturesEXT.html>"]
+pub struct PhysicalDevicePipelineRobustnessFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub pipeline_robustness: Bool32,
+}
+impl ::std::default::Default for PhysicalDevicePipelineRobustnessFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ pipeline_robustness: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePipelineRobustnessFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT;
+}
+impl PhysicalDevicePipelineRobustnessFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'a> {
+ PhysicalDevicePipelineRobustnessFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'a> {
+ inner: PhysicalDevicePipelineRobustnessFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDevicePipelineRobustnessFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDevicePipelineRobustnessFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDevicePipelineRobustnessFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePipelineRobustnessFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn pipeline_robustness(mut self, pipeline_robustness: bool) -> Self {
+ self.inner.pipeline_robustness = pipeline_robustness.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePipelineRobustnessFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRobustnessCreateInfoEXT.html>"]
+pub struct PipelineRobustnessCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub storage_buffers: PipelineRobustnessBufferBehaviorEXT,
+ pub uniform_buffers: PipelineRobustnessBufferBehaviorEXT,
+ pub vertex_inputs: PipelineRobustnessBufferBehaviorEXT,
+ pub images: PipelineRobustnessImageBehaviorEXT,
+}
+impl ::std::default::Default for PipelineRobustnessCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ storage_buffers: PipelineRobustnessBufferBehaviorEXT::default(),
+ uniform_buffers: PipelineRobustnessBufferBehaviorEXT::default(),
+ vertex_inputs: PipelineRobustnessBufferBehaviorEXT::default(),
+ images: PipelineRobustnessImageBehaviorEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PipelineRobustnessCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PIPELINE_ROBUSTNESS_CREATE_INFO_EXT;
+}
+impl PipelineRobustnessCreateInfoEXT {
+ pub fn builder<'a>() -> PipelineRobustnessCreateInfoEXTBuilder<'a> {
+ PipelineRobustnessCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PipelineRobustnessCreateInfoEXTBuilder<'a> {
+ inner: PipelineRobustnessCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRobustnessCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsGraphicsPipelineCreateInfo for PipelineRobustnessCreateInfoEXT {}
+unsafe impl ExtendsComputePipelineCreateInfo for PipelineRobustnessCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsComputePipelineCreateInfo for PipelineRobustnessCreateInfoEXT {}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for PipelineRobustnessCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsPipelineShaderStageCreateInfo for PipelineRobustnessCreateInfoEXT {}
+unsafe impl ExtendsRayTracingPipelineCreateInfoKHR for PipelineRobustnessCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsRayTracingPipelineCreateInfoKHR for PipelineRobustnessCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for PipelineRobustnessCreateInfoEXTBuilder<'a> {
+ type Target = PipelineRobustnessCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PipelineRobustnessCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PipelineRobustnessCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn storage_buffers(mut self, storage_buffers: PipelineRobustnessBufferBehaviorEXT) -> Self {
+ self.inner.storage_buffers = storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn uniform_buffers(mut self, uniform_buffers: PipelineRobustnessBufferBehaviorEXT) -> Self {
+ self.inner.uniform_buffers = uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn vertex_inputs(mut self, vertex_inputs: PipelineRobustnessBufferBehaviorEXT) -> Self {
+ self.inner.vertex_inputs = vertex_inputs;
+ self
+ }
+ #[inline]
+ pub fn images(mut self, images: PipelineRobustnessImageBehaviorEXT) -> Self {
+ self.inner.images = images;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PipelineRobustnessCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDevicePipelineRobustnessPropertiesEXT.html>"]
+pub struct PhysicalDevicePipelineRobustnessPropertiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub default_robustness_storage_buffers: PipelineRobustnessBufferBehaviorEXT,
+ pub default_robustness_uniform_buffers: PipelineRobustnessBufferBehaviorEXT,
+ pub default_robustness_vertex_inputs: PipelineRobustnessBufferBehaviorEXT,
+ pub default_robustness_images: PipelineRobustnessImageBehaviorEXT,
+}
+impl ::std::default::Default for PhysicalDevicePipelineRobustnessPropertiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ default_robustness_storage_buffers: PipelineRobustnessBufferBehaviorEXT::default(),
+ default_robustness_uniform_buffers: PipelineRobustnessBufferBehaviorEXT::default(),
+ default_robustness_vertex_inputs: PipelineRobustnessBufferBehaviorEXT::default(),
+ default_robustness_images: PipelineRobustnessImageBehaviorEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDevicePipelineRobustnessPropertiesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT;
+}
+impl PhysicalDevicePipelineRobustnessPropertiesEXT {
+ pub fn builder<'a>() -> PhysicalDevicePipelineRobustnessPropertiesEXTBuilder<'a> {
+ PhysicalDevicePipelineRobustnessPropertiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDevicePipelineRobustnessPropertiesEXTBuilder<'a> {
+ inner: PhysicalDevicePipelineRobustnessPropertiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDevicePipelineRobustnessPropertiesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDevicePipelineRobustnessPropertiesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDevicePipelineRobustnessPropertiesEXTBuilder<'a> {
+ type Target = PhysicalDevicePipelineRobustnessPropertiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDevicePipelineRobustnessPropertiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDevicePipelineRobustnessPropertiesEXTBuilder<'a> {
+ #[inline]
+ pub fn default_robustness_storage_buffers(
+ mut self,
+ default_robustness_storage_buffers: PipelineRobustnessBufferBehaviorEXT,
+ ) -> Self {
+ self.inner.default_robustness_storage_buffers = default_robustness_storage_buffers;
+ self
+ }
+ #[inline]
+ pub fn default_robustness_uniform_buffers(
+ mut self,
+ default_robustness_uniform_buffers: PipelineRobustnessBufferBehaviorEXT,
+ ) -> Self {
+ self.inner.default_robustness_uniform_buffers = default_robustness_uniform_buffers;
+ self
+ }
+ #[inline]
+ pub fn default_robustness_vertex_inputs(
+ mut self,
+ default_robustness_vertex_inputs: PipelineRobustnessBufferBehaviorEXT,
+ ) -> Self {
+ self.inner.default_robustness_vertex_inputs = default_robustness_vertex_inputs;
+ self
+ }
+ #[inline]
+ pub fn default_robustness_images(
+ mut self,
+ default_robustness_images: PipelineRobustnessImageBehaviorEXT,
+ ) -> Self {
+ self.inner.default_robustness_images = default_robustness_images;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDevicePipelineRobustnessPropertiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewSampleWeightCreateInfoQCOM.html>"]
+pub struct ImageViewSampleWeightCreateInfoQCOM {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub filter_center: Offset2D,
+ pub filter_size: Extent2D,
+ pub num_phases: u32,
+}
+impl ::std::default::Default for ImageViewSampleWeightCreateInfoQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ filter_center: Offset2D::default(),
+ filter_size: Extent2D::default(),
+ num_phases: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ImageViewSampleWeightCreateInfoQCOM {
+ const STRUCTURE_TYPE: StructureType = StructureType::IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM;
+}
+impl ImageViewSampleWeightCreateInfoQCOM {
+ pub fn builder<'a>() -> ImageViewSampleWeightCreateInfoQCOMBuilder<'a> {
+ ImageViewSampleWeightCreateInfoQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ImageViewSampleWeightCreateInfoQCOMBuilder<'a> {
+ inner: ImageViewSampleWeightCreateInfoQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewSampleWeightCreateInfoQCOMBuilder<'_> {}
+unsafe impl ExtendsImageViewCreateInfo for ImageViewSampleWeightCreateInfoQCOM {}
+impl<'a> ::std::ops::Deref for ImageViewSampleWeightCreateInfoQCOMBuilder<'a> {
+ type Target = ImageViewSampleWeightCreateInfoQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ImageViewSampleWeightCreateInfoQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ImageViewSampleWeightCreateInfoQCOMBuilder<'a> {
+ #[inline]
+ pub fn filter_center(mut self, filter_center: Offset2D) -> Self {
+ self.inner.filter_center = filter_center;
+ self
+ }
+ #[inline]
+ pub fn filter_size(mut self, filter_size: Extent2D) -> Self {
+ self.inner.filter_size = filter_size;
+ self
+ }
+ #[inline]
+ pub fn num_phases(mut self, num_phases: u32) -> Self {
+ self.inner.num_phases = num_phases;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ImageViewSampleWeightCreateInfoQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageProcessingFeaturesQCOM.html>"]
+pub struct PhysicalDeviceImageProcessingFeaturesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub texture_sample_weighted: Bool32,
+ pub texture_box_filter: Bool32,
+ pub texture_block_match: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceImageProcessingFeaturesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ texture_sample_weighted: Bool32::default(),
+ texture_box_filter: Bool32::default(),
+ texture_block_match: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageProcessingFeaturesQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM;
+}
+impl PhysicalDeviceImageProcessingFeaturesQCOM {
+ pub fn builder<'a>() -> PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'a> {
+ PhysicalDeviceImageProcessingFeaturesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'a> {
+ inner: PhysicalDeviceImageProcessingFeaturesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceImageProcessingFeaturesQCOM {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceImageProcessingFeaturesQCOM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'a> {
+ type Target = PhysicalDeviceImageProcessingFeaturesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageProcessingFeaturesQCOMBuilder<'a> {
+ #[inline]
+ pub fn texture_sample_weighted(mut self, texture_sample_weighted: bool) -> Self {
+ self.inner.texture_sample_weighted = texture_sample_weighted.into();
+ self
+ }
+ #[inline]
+ pub fn texture_box_filter(mut self, texture_box_filter: bool) -> Self {
+ self.inner.texture_box_filter = texture_box_filter.into();
+ self
+ }
+ #[inline]
+ pub fn texture_block_match(mut self, texture_block_match: bool) -> Self {
+ self.inner.texture_block_match = texture_block_match.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageProcessingFeaturesQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceImageProcessingPropertiesQCOM.html>"]
+pub struct PhysicalDeviceImageProcessingPropertiesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub max_weight_filter_phases: u32,
+ pub max_weight_filter_dimension: Extent2D,
+ pub max_block_match_region: Extent2D,
+ pub max_box_filter_block_size: Extent2D,
+}
+impl ::std::default::Default for PhysicalDeviceImageProcessingPropertiesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ max_weight_filter_phases: u32::default(),
+ max_weight_filter_dimension: Extent2D::default(),
+ max_block_match_region: Extent2D::default(),
+ max_box_filter_block_size: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceImageProcessingPropertiesQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM;
+}
+impl PhysicalDeviceImageProcessingPropertiesQCOM {
+ pub fn builder<'a>() -> PhysicalDeviceImageProcessingPropertiesQCOMBuilder<'a> {
+ PhysicalDeviceImageProcessingPropertiesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceImageProcessingPropertiesQCOMBuilder<'a> {
+ inner: PhysicalDeviceImageProcessingPropertiesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceImageProcessingPropertiesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceImageProcessingPropertiesQCOM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceImageProcessingPropertiesQCOMBuilder<'a> {
+ type Target = PhysicalDeviceImageProcessingPropertiesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceImageProcessingPropertiesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceImageProcessingPropertiesQCOMBuilder<'a> {
+ #[inline]
+ pub fn max_weight_filter_phases(mut self, max_weight_filter_phases: u32) -> Self {
+ self.inner.max_weight_filter_phases = max_weight_filter_phases;
+ self
+ }
+ #[inline]
+ pub fn max_weight_filter_dimension(mut self, max_weight_filter_dimension: Extent2D) -> Self {
+ self.inner.max_weight_filter_dimension = max_weight_filter_dimension;
+ self
+ }
+ #[inline]
+ pub fn max_block_match_region(mut self, max_block_match_region: Extent2D) -> Self {
+ self.inner.max_block_match_region = max_block_match_region;
+ self
+ }
+ #[inline]
+ pub fn max_box_filter_block_size(mut self, max_box_filter_block_size: Extent2D) -> Self {
+ self.inner.max_box_filter_block_size = max_box_filter_block_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceImageProcessingPropertiesQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceTilePropertiesFeaturesQCOM.html>"]
+pub struct PhysicalDeviceTilePropertiesFeaturesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub tile_properties: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceTilePropertiesFeaturesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ tile_properties: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceTilePropertiesFeaturesQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM;
+}
+impl PhysicalDeviceTilePropertiesFeaturesQCOM {
+ pub fn builder<'a>() -> PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'a> {
+ PhysicalDeviceTilePropertiesFeaturesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'a> {
+ inner: PhysicalDeviceTilePropertiesFeaturesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceTilePropertiesFeaturesQCOM {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceTilePropertiesFeaturesQCOM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'a> {
+ type Target = PhysicalDeviceTilePropertiesFeaturesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceTilePropertiesFeaturesQCOMBuilder<'a> {
+ #[inline]
+ pub fn tile_properties(mut self, tile_properties: bool) -> Self {
+ self.inner.tile_properties = tile_properties.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceTilePropertiesFeaturesQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTilePropertiesQCOM.html>"]
+pub struct TilePropertiesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub tile_size: Extent3D,
+ pub apron_size: Extent2D,
+ pub origin: Offset2D,
+}
+impl ::std::default::Default for TilePropertiesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ tile_size: Extent3D::default(),
+ apron_size: Extent2D::default(),
+ origin: Offset2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for TilePropertiesQCOM {
+ const STRUCTURE_TYPE: StructureType = StructureType::TILE_PROPERTIES_QCOM;
+}
+impl TilePropertiesQCOM {
+ pub fn builder<'a>() -> TilePropertiesQCOMBuilder<'a> {
+ TilePropertiesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct TilePropertiesQCOMBuilder<'a> {
+ inner: TilePropertiesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for TilePropertiesQCOMBuilder<'a> {
+ type Target = TilePropertiesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for TilePropertiesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> TilePropertiesQCOMBuilder<'a> {
+ #[inline]
+ pub fn tile_size(mut self, tile_size: Extent3D) -> Self {
+ self.inner.tile_size = tile_size;
+ self
+ }
+ #[inline]
+ pub fn apron_size(mut self, apron_size: Extent2D) -> Self {
+ self.inner.apron_size = apron_size;
+ self
+ }
+ #[inline]
+ pub fn origin(mut self, origin: Offset2D) -> Self {
+ self.inner.origin = origin;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> TilePropertiesQCOM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceAmigoProfilingFeaturesSEC.html>"]
+pub struct PhysicalDeviceAmigoProfilingFeaturesSEC {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub amigo_profiling: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceAmigoProfilingFeaturesSEC {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ amigo_profiling: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceAmigoProfilingFeaturesSEC {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC;
+}
+impl PhysicalDeviceAmigoProfilingFeaturesSEC {
+ pub fn builder<'a>() -> PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'a> {
+ PhysicalDeviceAmigoProfilingFeaturesSECBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'a> {
+ inner: PhysicalDeviceAmigoProfilingFeaturesSEC,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAmigoProfilingFeaturesSEC {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAmigoProfilingFeaturesSEC {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'a> {
+ type Target = PhysicalDeviceAmigoProfilingFeaturesSEC;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceAmigoProfilingFeaturesSECBuilder<'a> {
+ #[inline]
+ pub fn amigo_profiling(mut self, amigo_profiling: bool) -> Self {
+ self.inner.amigo_profiling = amigo_profiling.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceAmigoProfilingFeaturesSEC {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAmigoProfilingSubmitInfoSEC.html>"]
+pub struct AmigoProfilingSubmitInfoSEC {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub first_draw_timestamp: u64,
+ pub swap_buffer_timestamp: u64,
+}
+impl ::std::default::Default for AmigoProfilingSubmitInfoSEC {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ first_draw_timestamp: u64::default(),
+ swap_buffer_timestamp: u64::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for AmigoProfilingSubmitInfoSEC {
+ const STRUCTURE_TYPE: StructureType = StructureType::AMIGO_PROFILING_SUBMIT_INFO_SEC;
+}
+impl AmigoProfilingSubmitInfoSEC {
+ pub fn builder<'a>() -> AmigoProfilingSubmitInfoSECBuilder<'a> {
+ AmigoProfilingSubmitInfoSECBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct AmigoProfilingSubmitInfoSECBuilder<'a> {
+ inner: AmigoProfilingSubmitInfoSEC,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSubmitInfo for AmigoProfilingSubmitInfoSECBuilder<'_> {}
+unsafe impl ExtendsSubmitInfo for AmigoProfilingSubmitInfoSEC {}
+impl<'a> ::std::ops::Deref for AmigoProfilingSubmitInfoSECBuilder<'a> {
+ type Target = AmigoProfilingSubmitInfoSEC;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for AmigoProfilingSubmitInfoSECBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> AmigoProfilingSubmitInfoSECBuilder<'a> {
+ #[inline]
+ pub fn first_draw_timestamp(mut self, first_draw_timestamp: u64) -> Self {
+ self.inner.first_draw_timestamp = first_draw_timestamp;
+ self
+ }
+ #[inline]
+ pub fn swap_buffer_timestamp(mut self, swap_buffer_timestamp: u64) -> Self {
+ self.inner.swap_buffer_timestamp = swap_buffer_timestamp;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> AmigoProfilingSubmitInfoSEC {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT.html>"]
+pub struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub attachment_feedback_loop_layout: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ attachment_feedback_loop_layout: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT;
+}
+impl PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'a> {
+ PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT
+{
+}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn attachment_feedback_loop_layout(
+ mut self,
+ attachment_feedback_loop_layout: bool,
+ ) -> Self {
+ self.inner.attachment_feedback_loop_layout = attachment_feedback_loop_layout.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceDepthClampZeroOneFeaturesEXT.html>"]
+pub struct PhysicalDeviceDepthClampZeroOneFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub depth_clamp_zero_one: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceDepthClampZeroOneFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ depth_clamp_zero_one: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceDepthClampZeroOneFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT;
+}
+impl PhysicalDeviceDepthClampZeroOneFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'a> {
+ PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceDepthClampZeroOneFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceDepthClampZeroOneFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceDepthClampZeroOneFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceDepthClampZeroOneFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceDepthClampZeroOneFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn depth_clamp_zero_one(mut self, depth_clamp_zero_one: bool) -> Self {
+ self.inner.depth_clamp_zero_one = depth_clamp_zero_one.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceDepthClampZeroOneFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceAddressBindingReportFeaturesEXT.html>"]
+pub struct PhysicalDeviceAddressBindingReportFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub report_address_binding: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceAddressBindingReportFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ report_address_binding: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceAddressBindingReportFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT;
+}
+impl PhysicalDeviceAddressBindingReportFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'a> {
+ PhysicalDeviceAddressBindingReportFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceAddressBindingReportFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceAddressBindingReportFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceAddressBindingReportFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceAddressBindingReportFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceAddressBindingReportFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn report_address_binding(mut self, report_address_binding: bool) -> Self {
+ self.inner.report_address_binding = report_address_binding.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceAddressBindingReportFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceAddressBindingCallbackDataEXT.html>"]
+pub struct DeviceAddressBindingCallbackDataEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: DeviceAddressBindingFlagsEXT,
+ pub base_address: DeviceAddress,
+ pub size: DeviceSize,
+ pub binding_type: DeviceAddressBindingTypeEXT,
+}
+impl ::std::default::Default for DeviceAddressBindingCallbackDataEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: DeviceAddressBindingFlagsEXT::default(),
+ base_address: DeviceAddress::default(),
+ size: DeviceSize::default(),
+ binding_type: DeviceAddressBindingTypeEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceAddressBindingCallbackDataEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT;
+}
+impl DeviceAddressBindingCallbackDataEXT {
+ pub fn builder<'a>() -> DeviceAddressBindingCallbackDataEXTBuilder<'a> {
+ DeviceAddressBindingCallbackDataEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceAddressBindingCallbackDataEXTBuilder<'a> {
+ inner: DeviceAddressBindingCallbackDataEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsDebugUtilsMessengerCallbackDataEXT
+ for DeviceAddressBindingCallbackDataEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsDebugUtilsMessengerCallbackDataEXT for DeviceAddressBindingCallbackDataEXT {}
+impl<'a> ::std::ops::Deref for DeviceAddressBindingCallbackDataEXTBuilder<'a> {
+ type Target = DeviceAddressBindingCallbackDataEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceAddressBindingCallbackDataEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceAddressBindingCallbackDataEXTBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DeviceAddressBindingFlagsEXT) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn base_address(mut self, base_address: DeviceAddress) -> Self {
+ self.inner.base_address = base_address;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: DeviceSize) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn binding_type(mut self, binding_type: DeviceAddressBindingTypeEXT) -> Self {
+ self.inner.binding_type = binding_type;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceAddressBindingCallbackDataEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceOpticalFlowFeaturesNV.html>"]
+pub struct PhysicalDeviceOpticalFlowFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub optical_flow: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceOpticalFlowFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ optical_flow: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceOpticalFlowFeaturesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV;
+}
+impl PhysicalDeviceOpticalFlowFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceOpticalFlowFeaturesNVBuilder<'a> {
+ PhysicalDeviceOpticalFlowFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceOpticalFlowFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceOpticalFlowFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceOpticalFlowFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceOpticalFlowFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceOpticalFlowFeaturesNVBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceOpticalFlowFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceOpticalFlowFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceOpticalFlowFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceOpticalFlowFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceOpticalFlowFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn optical_flow(mut self, optical_flow: bool) -> Self {
+ self.inner.optical_flow = optical_flow.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceOpticalFlowFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceOpticalFlowPropertiesNV.html>"]
+pub struct PhysicalDeviceOpticalFlowPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub supported_output_grid_sizes: OpticalFlowGridSizeFlagsNV,
+ pub supported_hint_grid_sizes: OpticalFlowGridSizeFlagsNV,
+ pub hint_supported: Bool32,
+ pub cost_supported: Bool32,
+ pub bidirectional_flow_supported: Bool32,
+ pub global_flow_supported: Bool32,
+ pub min_width: u32,
+ pub min_height: u32,
+ pub max_width: u32,
+ pub max_height: u32,
+ pub max_num_regions_of_interest: u32,
+}
+impl ::std::default::Default for PhysicalDeviceOpticalFlowPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ supported_output_grid_sizes: OpticalFlowGridSizeFlagsNV::default(),
+ supported_hint_grid_sizes: OpticalFlowGridSizeFlagsNV::default(),
+ hint_supported: Bool32::default(),
+ cost_supported: Bool32::default(),
+ bidirectional_flow_supported: Bool32::default(),
+ global_flow_supported: Bool32::default(),
+ min_width: u32::default(),
+ min_height: u32::default(),
+ max_width: u32::default(),
+ max_height: u32::default(),
+ max_num_regions_of_interest: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceOpticalFlowPropertiesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV;
+}
+impl PhysicalDeviceOpticalFlowPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceOpticalFlowPropertiesNVBuilder<'a> {
+ PhysicalDeviceOpticalFlowPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceOpticalFlowPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceOpticalFlowPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceOpticalFlowPropertiesNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceOpticalFlowPropertiesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceOpticalFlowPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceOpticalFlowPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceOpticalFlowPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceOpticalFlowPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn supported_output_grid_sizes(
+ mut self,
+ supported_output_grid_sizes: OpticalFlowGridSizeFlagsNV,
+ ) -> Self {
+ self.inner.supported_output_grid_sizes = supported_output_grid_sizes;
+ self
+ }
+ #[inline]
+ pub fn supported_hint_grid_sizes(
+ mut self,
+ supported_hint_grid_sizes: OpticalFlowGridSizeFlagsNV,
+ ) -> Self {
+ self.inner.supported_hint_grid_sizes = supported_hint_grid_sizes;
+ self
+ }
+ #[inline]
+ pub fn hint_supported(mut self, hint_supported: bool) -> Self {
+ self.inner.hint_supported = hint_supported.into();
+ self
+ }
+ #[inline]
+ pub fn cost_supported(mut self, cost_supported: bool) -> Self {
+ self.inner.cost_supported = cost_supported.into();
+ self
+ }
+ #[inline]
+ pub fn bidirectional_flow_supported(mut self, bidirectional_flow_supported: bool) -> Self {
+ self.inner.bidirectional_flow_supported = bidirectional_flow_supported.into();
+ self
+ }
+ #[inline]
+ pub fn global_flow_supported(mut self, global_flow_supported: bool) -> Self {
+ self.inner.global_flow_supported = global_flow_supported.into();
+ self
+ }
+ #[inline]
+ pub fn min_width(mut self, min_width: u32) -> Self {
+ self.inner.min_width = min_width;
+ self
+ }
+ #[inline]
+ pub fn min_height(mut self, min_height: u32) -> Self {
+ self.inner.min_height = min_height;
+ self
+ }
+ #[inline]
+ pub fn max_width(mut self, max_width: u32) -> Self {
+ self.inner.max_width = max_width;
+ self
+ }
+ #[inline]
+ pub fn max_height(mut self, max_height: u32) -> Self {
+ self.inner.max_height = max_height;
+ self
+ }
+ #[inline]
+ pub fn max_num_regions_of_interest(mut self, max_num_regions_of_interest: u32) -> Self {
+ self.inner.max_num_regions_of_interest = max_num_regions_of_interest;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceOpticalFlowPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowImageFormatInfoNV.html>"]
+pub struct OpticalFlowImageFormatInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub usage: OpticalFlowUsageFlagsNV,
+}
+impl ::std::default::Default for OpticalFlowImageFormatInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ usage: OpticalFlowUsageFlagsNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for OpticalFlowImageFormatInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV;
+}
+impl OpticalFlowImageFormatInfoNV {
+ pub fn builder<'a>() -> OpticalFlowImageFormatInfoNVBuilder<'a> {
+ OpticalFlowImageFormatInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct OpticalFlowImageFormatInfoNVBuilder<'a> {
+ inner: OpticalFlowImageFormatInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for OpticalFlowImageFormatInfoNVBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceImageFormatInfo2 for OpticalFlowImageFormatInfoNV {}
+unsafe impl ExtendsImageCreateInfo for OpticalFlowImageFormatInfoNVBuilder<'_> {}
+unsafe impl ExtendsImageCreateInfo for OpticalFlowImageFormatInfoNV {}
+impl<'a> ::std::ops::Deref for OpticalFlowImageFormatInfoNVBuilder<'a> {
+ type Target = OpticalFlowImageFormatInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for OpticalFlowImageFormatInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> OpticalFlowImageFormatInfoNVBuilder<'a> {
+ #[inline]
+ pub fn usage(mut self, usage: OpticalFlowUsageFlagsNV) -> Self {
+ self.inner.usage = usage;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> OpticalFlowImageFormatInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowImageFormatPropertiesNV.html>"]
+pub struct OpticalFlowImageFormatPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub format: Format,
+}
+impl ::std::default::Default for OpticalFlowImageFormatPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ format: Format::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for OpticalFlowImageFormatPropertiesNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV;
+}
+impl OpticalFlowImageFormatPropertiesNV {
+ pub fn builder<'a>() -> OpticalFlowImageFormatPropertiesNVBuilder<'a> {
+ OpticalFlowImageFormatPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct OpticalFlowImageFormatPropertiesNVBuilder<'a> {
+ inner: OpticalFlowImageFormatPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for OpticalFlowImageFormatPropertiesNVBuilder<'a> {
+ type Target = OpticalFlowImageFormatPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for OpticalFlowImageFormatPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> OpticalFlowImageFormatPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn format(mut self, format: Format) -> Self {
+ self.inner.format = format;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> OpticalFlowImageFormatPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowSessionCreateInfoNV.html>"]
+pub struct OpticalFlowSessionCreateInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub width: u32,
+ pub height: u32,
+ pub image_format: Format,
+ pub flow_vector_format: Format,
+ pub cost_format: Format,
+ pub output_grid_size: OpticalFlowGridSizeFlagsNV,
+ pub hint_grid_size: OpticalFlowGridSizeFlagsNV,
+ pub performance_level: OpticalFlowPerformanceLevelNV,
+ pub flags: OpticalFlowSessionCreateFlagsNV,
+}
+impl ::std::default::Default for OpticalFlowSessionCreateInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ width: u32::default(),
+ height: u32::default(),
+ image_format: Format::default(),
+ flow_vector_format: Format::default(),
+ cost_format: Format::default(),
+ output_grid_size: OpticalFlowGridSizeFlagsNV::default(),
+ hint_grid_size: OpticalFlowGridSizeFlagsNV::default(),
+ performance_level: OpticalFlowPerformanceLevelNV::default(),
+ flags: OpticalFlowSessionCreateFlagsNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for OpticalFlowSessionCreateInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_SESSION_CREATE_INFO_NV;
+}
+impl OpticalFlowSessionCreateInfoNV {
+ pub fn builder<'a>() -> OpticalFlowSessionCreateInfoNVBuilder<'a> {
+ OpticalFlowSessionCreateInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct OpticalFlowSessionCreateInfoNVBuilder<'a> {
+ inner: OpticalFlowSessionCreateInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+pub unsafe trait ExtendsOpticalFlowSessionCreateInfoNV {}
+impl<'a> ::std::ops::Deref for OpticalFlowSessionCreateInfoNVBuilder<'a> {
+ type Target = OpticalFlowSessionCreateInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for OpticalFlowSessionCreateInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> OpticalFlowSessionCreateInfoNVBuilder<'a> {
+ #[inline]
+ pub fn width(mut self, width: u32) -> Self {
+ self.inner.width = width;
+ self
+ }
+ #[inline]
+ pub fn height(mut self, height: u32) -> Self {
+ self.inner.height = height;
+ self
+ }
+ #[inline]
+ pub fn image_format(mut self, image_format: Format) -> Self {
+ self.inner.image_format = image_format;
+ self
+ }
+ #[inline]
+ pub fn flow_vector_format(mut self, flow_vector_format: Format) -> Self {
+ self.inner.flow_vector_format = flow_vector_format;
+ self
+ }
+ #[inline]
+ pub fn cost_format(mut self, cost_format: Format) -> Self {
+ self.inner.cost_format = cost_format;
+ self
+ }
+ #[inline]
+ pub fn output_grid_size(mut self, output_grid_size: OpticalFlowGridSizeFlagsNV) -> Self {
+ self.inner.output_grid_size = output_grid_size;
+ self
+ }
+ #[inline]
+ pub fn hint_grid_size(mut self, hint_grid_size: OpticalFlowGridSizeFlagsNV) -> Self {
+ self.inner.hint_grid_size = hint_grid_size;
+ self
+ }
+ #[inline]
+ pub fn performance_level(mut self, performance_level: OpticalFlowPerformanceLevelNV) -> Self {
+ self.inner.performance_level = performance_level;
+ self
+ }
+ #[inline]
+ pub fn flags(mut self, flags: OpticalFlowSessionCreateFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[doc = r" Prepends the given extension struct between the root and the first pointer. This"]
+ #[doc = r" method only exists on structs that can be passed to a function directly. Only"]
+ #[doc = r" valid extension structs can be pushed into the chain."]
+ #[doc = r" If the chain looks like `A -> B -> C`, and you call `builder.push_next(&mut D)`, then the"]
+ #[doc = r" chain will look like `A -> D -> B -> C`."]
+ pub fn push_next<T: ExtendsOpticalFlowSessionCreateInfoNV>(mut self, next: &'a mut T) -> Self {
+ unsafe {
+ let next_ptr = <*mut T>::cast(next);
+ let last_next = ptr_chain_iter(next).last().unwrap();
+ (*last_next).p_next = self.inner.p_next as _;
+ self.inner.p_next = next_ptr;
+ }
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> OpticalFlowSessionCreateInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowSessionCreatePrivateDataInfoNV.html>"]
+pub struct OpticalFlowSessionCreatePrivateDataInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub id: u32,
+ pub size: u32,
+ pub p_private_data: *const c_void,
+}
+impl ::std::default::Default for OpticalFlowSessionCreatePrivateDataInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ id: u32::default(),
+ size: u32::default(),
+ p_private_data: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for OpticalFlowSessionCreatePrivateDataInfoNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV;
+}
+impl OpticalFlowSessionCreatePrivateDataInfoNV {
+ pub fn builder<'a>() -> OpticalFlowSessionCreatePrivateDataInfoNVBuilder<'a> {
+ OpticalFlowSessionCreatePrivateDataInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct OpticalFlowSessionCreatePrivateDataInfoNVBuilder<'a> {
+ inner: OpticalFlowSessionCreatePrivateDataInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsOpticalFlowSessionCreateInfoNV
+ for OpticalFlowSessionCreatePrivateDataInfoNVBuilder<'_>
+{
+}
+unsafe impl ExtendsOpticalFlowSessionCreateInfoNV for OpticalFlowSessionCreatePrivateDataInfoNV {}
+impl<'a> ::std::ops::Deref for OpticalFlowSessionCreatePrivateDataInfoNVBuilder<'a> {
+ type Target = OpticalFlowSessionCreatePrivateDataInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for OpticalFlowSessionCreatePrivateDataInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> OpticalFlowSessionCreatePrivateDataInfoNVBuilder<'a> {
+ #[inline]
+ pub fn id(mut self, id: u32) -> Self {
+ self.inner.id = id;
+ self
+ }
+ #[inline]
+ pub fn size(mut self, size: u32) -> Self {
+ self.inner.size = size;
+ self
+ }
+ #[inline]
+ pub fn private_data(mut self, private_data: *const c_void) -> Self {
+ self.inner.p_private_data = private_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> OpticalFlowSessionCreatePrivateDataInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowExecuteInfoNV.html>"]
+pub struct OpticalFlowExecuteInfoNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: OpticalFlowExecuteFlagsNV,
+ pub region_count: u32,
+ pub p_regions: *const Rect2D,
+}
+impl ::std::default::Default for OpticalFlowExecuteInfoNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: OpticalFlowExecuteFlagsNV::default(),
+ region_count: u32::default(),
+ p_regions: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for OpticalFlowExecuteInfoNV {
+ const STRUCTURE_TYPE: StructureType = StructureType::OPTICAL_FLOW_EXECUTE_INFO_NV;
+}
+impl OpticalFlowExecuteInfoNV {
+ pub fn builder<'a>() -> OpticalFlowExecuteInfoNVBuilder<'a> {
+ OpticalFlowExecuteInfoNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct OpticalFlowExecuteInfoNVBuilder<'a> {
+ inner: OpticalFlowExecuteInfoNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for OpticalFlowExecuteInfoNVBuilder<'a> {
+ type Target = OpticalFlowExecuteInfoNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for OpticalFlowExecuteInfoNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> OpticalFlowExecuteInfoNVBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: OpticalFlowExecuteFlagsNV) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn regions(mut self, regions: &'a [Rect2D]) -> Self {
+ self.inner.region_count = regions.len() as _;
+ self.inner.p_regions = regions.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> OpticalFlowExecuteInfoNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceFaultFeaturesEXT.html>"]
+pub struct PhysicalDeviceFaultFeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub device_fault: Bool32,
+ pub device_fault_vendor_binary: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceFaultFeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ device_fault: Bool32::default(),
+ device_fault_vendor_binary: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceFaultFeaturesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::PHYSICAL_DEVICE_FAULT_FEATURES_EXT;
+}
+impl PhysicalDeviceFaultFeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceFaultFeaturesEXTBuilder<'a> {
+ PhysicalDeviceFaultFeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceFaultFeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceFaultFeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFaultFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceFaultFeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFaultFeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceFaultFeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceFaultFeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceFaultFeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceFaultFeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceFaultFeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn device_fault(mut self, device_fault: bool) -> Self {
+ self.inner.device_fault = device_fault.into();
+ self
+ }
+ #[inline]
+ pub fn device_fault_vendor_binary(mut self, device_fault_vendor_binary: bool) -> Self {
+ self.inner.device_fault_vendor_binary = device_fault_vendor_binary.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceFaultFeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultAddressInfoEXT.html>"]
+pub struct DeviceFaultAddressInfoEXT {
+ pub address_type: DeviceFaultAddressTypeEXT,
+ pub reported_address: DeviceAddress,
+ pub address_precision: DeviceSize,
+}
+impl DeviceFaultAddressInfoEXT {
+ pub fn builder<'a>() -> DeviceFaultAddressInfoEXTBuilder<'a> {
+ DeviceFaultAddressInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceFaultAddressInfoEXTBuilder<'a> {
+ inner: DeviceFaultAddressInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceFaultAddressInfoEXTBuilder<'a> {
+ type Target = DeviceFaultAddressInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceFaultAddressInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceFaultAddressInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn address_type(mut self, address_type: DeviceFaultAddressTypeEXT) -> Self {
+ self.inner.address_type = address_type;
+ self
+ }
+ #[inline]
+ pub fn reported_address(mut self, reported_address: DeviceAddress) -> Self {
+ self.inner.reported_address = reported_address;
+ self
+ }
+ #[inline]
+ pub fn address_precision(mut self, address_precision: DeviceSize) -> Self {
+ self.inner.address_precision = address_precision;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceFaultAddressInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultVendorInfoEXT.html>"]
+pub struct DeviceFaultVendorInfoEXT {
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub vendor_fault_code: u64,
+ pub vendor_fault_data: u64,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DeviceFaultVendorInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DeviceFaultVendorInfoEXT")
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("vendor_fault_code", &self.vendor_fault_code)
+ .field("vendor_fault_data", &self.vendor_fault_data)
+ .finish()
+ }
+}
+impl ::std::default::Default for DeviceFaultVendorInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ description: unsafe { ::std::mem::zeroed() },
+ vendor_fault_code: u64::default(),
+ vendor_fault_data: u64::default(),
+ }
+ }
+}
+impl DeviceFaultVendorInfoEXT {
+ pub fn builder<'a>() -> DeviceFaultVendorInfoEXTBuilder<'a> {
+ DeviceFaultVendorInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceFaultVendorInfoEXTBuilder<'a> {
+ inner: DeviceFaultVendorInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceFaultVendorInfoEXTBuilder<'a> {
+ type Target = DeviceFaultVendorInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceFaultVendorInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceFaultVendorInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn vendor_fault_code(mut self, vendor_fault_code: u64) -> Self {
+ self.inner.vendor_fault_code = vendor_fault_code;
+ self
+ }
+ #[inline]
+ pub fn vendor_fault_data(mut self, vendor_fault_data: u64) -> Self {
+ self.inner.vendor_fault_data = vendor_fault_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceFaultVendorInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultCountsEXT.html>"]
+pub struct DeviceFaultCountsEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub address_info_count: u32,
+ pub vendor_info_count: u32,
+ pub vendor_binary_size: DeviceSize,
+}
+impl ::std::default::Default for DeviceFaultCountsEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ address_info_count: u32::default(),
+ vendor_info_count: u32::default(),
+ vendor_binary_size: DeviceSize::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceFaultCountsEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_FAULT_COUNTS_EXT;
+}
+impl DeviceFaultCountsEXT {
+ pub fn builder<'a>() -> DeviceFaultCountsEXTBuilder<'a> {
+ DeviceFaultCountsEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceFaultCountsEXTBuilder<'a> {
+ inner: DeviceFaultCountsEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceFaultCountsEXTBuilder<'a> {
+ type Target = DeviceFaultCountsEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceFaultCountsEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceFaultCountsEXTBuilder<'a> {
+ #[inline]
+ pub fn address_info_count(mut self, address_info_count: u32) -> Self {
+ self.inner.address_info_count = address_info_count;
+ self
+ }
+ #[inline]
+ pub fn vendor_info_count(mut self, vendor_info_count: u32) -> Self {
+ self.inner.vendor_info_count = vendor_info_count;
+ self
+ }
+ #[inline]
+ pub fn vendor_binary_size(mut self, vendor_binary_size: DeviceSize) -> Self {
+ self.inner.vendor_binary_size = vendor_binary_size;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceFaultCountsEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultInfoEXT.html>"]
+pub struct DeviceFaultInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub description: [c_char; MAX_DESCRIPTION_SIZE],
+ pub p_address_infos: *mut DeviceFaultAddressInfoEXT,
+ pub p_vendor_infos: *mut DeviceFaultVendorInfoEXT,
+ pub p_vendor_binary_data: *mut c_void,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DeviceFaultInfoEXT {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DeviceFaultInfoEXT")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("description", &unsafe {
+ ::std::ffi::CStr::from_ptr(self.description.as_ptr())
+ })
+ .field("p_address_infos", &self.p_address_infos)
+ .field("p_vendor_infos", &self.p_vendor_infos)
+ .field("p_vendor_binary_data", &self.p_vendor_binary_data)
+ .finish()
+ }
+}
+impl ::std::default::Default for DeviceFaultInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ description: unsafe { ::std::mem::zeroed() },
+ p_address_infos: ::std::ptr::null_mut(),
+ p_vendor_infos: ::std::ptr::null_mut(),
+ p_vendor_binary_data: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DeviceFaultInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::DEVICE_FAULT_INFO_EXT;
+}
+impl DeviceFaultInfoEXT {
+ pub fn builder<'a>() -> DeviceFaultInfoEXTBuilder<'a> {
+ DeviceFaultInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceFaultInfoEXTBuilder<'a> {
+ inner: DeviceFaultInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceFaultInfoEXTBuilder<'a> {
+ type Target = DeviceFaultInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceFaultInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceFaultInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn description(mut self, description: [c_char; MAX_DESCRIPTION_SIZE]) -> Self {
+ self.inner.description = description;
+ self
+ }
+ #[inline]
+ pub fn address_infos(mut self, address_infos: &'a mut DeviceFaultAddressInfoEXT) -> Self {
+ self.inner.p_address_infos = address_infos;
+ self
+ }
+ #[inline]
+ pub fn vendor_infos(mut self, vendor_infos: &'a mut DeviceFaultVendorInfoEXT) -> Self {
+ self.inner.p_vendor_infos = vendor_infos;
+ self
+ }
+ #[inline]
+ pub fn vendor_binary_data(mut self, vendor_binary_data: *mut c_void) -> Self {
+ self.inner.p_vendor_binary_data = vendor_binary_data;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceFaultInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultVendorBinaryHeaderVersionOneEXT.html>"]
+pub struct DeviceFaultVendorBinaryHeaderVersionOneEXT {
+ pub header_size: u32,
+ pub header_version: DeviceFaultVendorBinaryHeaderVersionEXT,
+ pub vendor_id: u32,
+ pub device_id: u32,
+ pub driver_version: u32,
+ pub pipeline_cache_uuid: [u8; UUID_SIZE],
+ pub application_name_offset: u32,
+ pub application_version: u32,
+ pub engine_name_offset: u32,
+}
+impl ::std::default::Default for DeviceFaultVendorBinaryHeaderVersionOneEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ header_size: u32::default(),
+ header_version: DeviceFaultVendorBinaryHeaderVersionEXT::default(),
+ vendor_id: u32::default(),
+ device_id: u32::default(),
+ driver_version: u32::default(),
+ pipeline_cache_uuid: unsafe { ::std::mem::zeroed() },
+ application_name_offset: u32::default(),
+ application_version: u32::default(),
+ engine_name_offset: u32::default(),
+ }
+ }
+}
+impl DeviceFaultVendorBinaryHeaderVersionOneEXT {
+ pub fn builder<'a>() -> DeviceFaultVendorBinaryHeaderVersionOneEXTBuilder<'a> {
+ DeviceFaultVendorBinaryHeaderVersionOneEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DeviceFaultVendorBinaryHeaderVersionOneEXTBuilder<'a> {
+ inner: DeviceFaultVendorBinaryHeaderVersionOneEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DeviceFaultVendorBinaryHeaderVersionOneEXTBuilder<'a> {
+ type Target = DeviceFaultVendorBinaryHeaderVersionOneEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DeviceFaultVendorBinaryHeaderVersionOneEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DeviceFaultVendorBinaryHeaderVersionOneEXTBuilder<'a> {
+ #[inline]
+ pub fn header_size(mut self, header_size: u32) -> Self {
+ self.inner.header_size = header_size;
+ self
+ }
+ #[inline]
+ pub fn header_version(
+ mut self,
+ header_version: DeviceFaultVendorBinaryHeaderVersionEXT,
+ ) -> Self {
+ self.inner.header_version = header_version;
+ self
+ }
+ #[inline]
+ pub fn vendor_id(mut self, vendor_id: u32) -> Self {
+ self.inner.vendor_id = vendor_id;
+ self
+ }
+ #[inline]
+ pub fn device_id(mut self, device_id: u32) -> Self {
+ self.inner.device_id = device_id;
+ self
+ }
+ #[inline]
+ pub fn driver_version(mut self, driver_version: u32) -> Self {
+ self.inner.driver_version = driver_version;
+ self
+ }
+ #[inline]
+ pub fn pipeline_cache_uuid(mut self, pipeline_cache_uuid: [u8; UUID_SIZE]) -> Self {
+ self.inner.pipeline_cache_uuid = pipeline_cache_uuid;
+ self
+ }
+ #[inline]
+ pub fn application_name_offset(mut self, application_name_offset: u32) -> Self {
+ self.inner.application_name_offset = application_name_offset;
+ self
+ }
+ #[inline]
+ pub fn application_version(mut self, application_version: u32) -> Self {
+ self.inner.application_version = application_version;
+ self
+ }
+ #[inline]
+ pub fn engine_name_offset(mut self, engine_name_offset: u32) -> Self {
+ self.inner.engine_name_offset = engine_name_offset;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DeviceFaultVendorBinaryHeaderVersionOneEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone, Default)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDecompressMemoryRegionNV.html>"]
+pub struct DecompressMemoryRegionNV {
+ pub src_address: DeviceAddress,
+ pub dst_address: DeviceAddress,
+ pub compressed_size: DeviceSize,
+ pub decompressed_size: DeviceSize,
+ pub decompression_method: MemoryDecompressionMethodFlagsNV,
+}
+impl DecompressMemoryRegionNV {
+ pub fn builder<'a>() -> DecompressMemoryRegionNVBuilder<'a> {
+ DecompressMemoryRegionNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DecompressMemoryRegionNVBuilder<'a> {
+ inner: DecompressMemoryRegionNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DecompressMemoryRegionNVBuilder<'a> {
+ type Target = DecompressMemoryRegionNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DecompressMemoryRegionNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DecompressMemoryRegionNVBuilder<'a> {
+ #[inline]
+ pub fn src_address(mut self, src_address: DeviceAddress) -> Self {
+ self.inner.src_address = src_address;
+ self
+ }
+ #[inline]
+ pub fn dst_address(mut self, dst_address: DeviceAddress) -> Self {
+ self.inner.dst_address = dst_address;
+ self
+ }
+ #[inline]
+ pub fn compressed_size(mut self, compressed_size: DeviceSize) -> Self {
+ self.inner.compressed_size = compressed_size;
+ self
+ }
+ #[inline]
+ pub fn decompressed_size(mut self, decompressed_size: DeviceSize) -> Self {
+ self.inner.decompressed_size = decompressed_size;
+ self
+ }
+ #[inline]
+ pub fn decompression_method(
+ mut self,
+ decompression_method: MemoryDecompressionMethodFlagsNV,
+ ) -> Self {
+ self.inner.decompression_method = decompression_method;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DecompressMemoryRegionNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM.html>"]
+pub struct PhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_core_mask: u64,
+ pub shader_core_count: u32,
+ pub shader_warps_per_core: u32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_core_mask: u64::default(),
+ shader_core_count: u32::default(),
+ shader_warps_per_core: u32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM;
+}
+impl PhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+ pub fn builder<'a>() -> PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder<'a> {
+ PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder<'a> {
+ inner: PhysicalDeviceShaderCoreBuiltinsPropertiesARM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2 for PhysicalDeviceShaderCoreBuiltinsPropertiesARM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder<'a> {
+ type Target = PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderCoreBuiltinsPropertiesARMBuilder<'a> {
+ #[inline]
+ pub fn shader_core_mask(mut self, shader_core_mask: u64) -> Self {
+ self.inner.shader_core_mask = shader_core_mask;
+ self
+ }
+ #[inline]
+ pub fn shader_core_count(mut self, shader_core_count: u32) -> Self {
+ self.inner.shader_core_count = shader_core_count;
+ self
+ }
+ #[inline]
+ pub fn shader_warps_per_core(mut self, shader_warps_per_core: u32) -> Self {
+ self.inner.shader_warps_per_core = shader_warps_per_core;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM.html>"]
+pub struct PhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub shader_core_builtins: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ shader_core_builtins: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM;
+}
+impl PhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+ pub fn builder<'a>() -> PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'a> {
+ PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'a> {
+ inner: PhysicalDeviceShaderCoreBuiltinsFeaturesARM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceShaderCoreBuiltinsFeaturesARM {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceShaderCoreBuiltinsFeaturesARM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'a> {
+ type Target = PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceShaderCoreBuiltinsFeaturesARMBuilder<'a> {
+ #[inline]
+ pub fn shader_core_builtins(mut self, shader_core_builtins: bool) -> Self {
+ self.inner.shader_core_builtins = shader_core_builtins.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfacePresentModeEXT.html>"]
+pub struct SurfacePresentModeEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_mode: PresentModeKHR,
+}
+impl ::std::default::Default for SurfacePresentModeEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_mode: PresentModeKHR::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfacePresentModeEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PRESENT_MODE_EXT;
+}
+impl SurfacePresentModeEXT {
+ pub fn builder<'a>() -> SurfacePresentModeEXTBuilder<'a> {
+ SurfacePresentModeEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfacePresentModeEXTBuilder<'a> {
+ inner: SurfacePresentModeEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfacePresentModeEXTBuilder<'_> {}
+unsafe impl ExtendsPhysicalDeviceSurfaceInfo2KHR for SurfacePresentModeEXT {}
+impl<'a> ::std::ops::Deref for SurfacePresentModeEXTBuilder<'a> {
+ type Target = SurfacePresentModeEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfacePresentModeEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfacePresentModeEXTBuilder<'a> {
+ #[inline]
+ pub fn present_mode(mut self, present_mode: PresentModeKHR) -> Self {
+ self.inner.present_mode = present_mode;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfacePresentModeEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfacePresentScalingCapabilitiesEXT.html>"]
+pub struct SurfacePresentScalingCapabilitiesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub supported_present_scaling: PresentScalingFlagsEXT,
+ pub supported_present_gravity_x: PresentGravityFlagsEXT,
+ pub supported_present_gravity_y: PresentGravityFlagsEXT,
+ pub min_scaled_image_extent: Extent2D,
+ pub max_scaled_image_extent: Extent2D,
+}
+impl ::std::default::Default for SurfacePresentScalingCapabilitiesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ supported_present_scaling: PresentScalingFlagsEXT::default(),
+ supported_present_gravity_x: PresentGravityFlagsEXT::default(),
+ supported_present_gravity_y: PresentGravityFlagsEXT::default(),
+ min_scaled_image_extent: Extent2D::default(),
+ max_scaled_image_extent: Extent2D::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfacePresentScalingCapabilitiesEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PRESENT_SCALING_CAPABILITIES_EXT;
+}
+impl SurfacePresentScalingCapabilitiesEXT {
+ pub fn builder<'a>() -> SurfacePresentScalingCapabilitiesEXTBuilder<'a> {
+ SurfacePresentScalingCapabilitiesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfacePresentScalingCapabilitiesEXTBuilder<'a> {
+ inner: SurfacePresentScalingCapabilitiesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfacePresentScalingCapabilitiesEXTBuilder<'_> {}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfacePresentScalingCapabilitiesEXT {}
+impl<'a> ::std::ops::Deref for SurfacePresentScalingCapabilitiesEXTBuilder<'a> {
+ type Target = SurfacePresentScalingCapabilitiesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfacePresentScalingCapabilitiesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfacePresentScalingCapabilitiesEXTBuilder<'a> {
+ #[inline]
+ pub fn supported_present_scaling(
+ mut self,
+ supported_present_scaling: PresentScalingFlagsEXT,
+ ) -> Self {
+ self.inner.supported_present_scaling = supported_present_scaling;
+ self
+ }
+ #[inline]
+ pub fn supported_present_gravity_x(
+ mut self,
+ supported_present_gravity_x: PresentGravityFlagsEXT,
+ ) -> Self {
+ self.inner.supported_present_gravity_x = supported_present_gravity_x;
+ self
+ }
+ #[inline]
+ pub fn supported_present_gravity_y(
+ mut self,
+ supported_present_gravity_y: PresentGravityFlagsEXT,
+ ) -> Self {
+ self.inner.supported_present_gravity_y = supported_present_gravity_y;
+ self
+ }
+ #[inline]
+ pub fn min_scaled_image_extent(mut self, min_scaled_image_extent: Extent2D) -> Self {
+ self.inner.min_scaled_image_extent = min_scaled_image_extent;
+ self
+ }
+ #[inline]
+ pub fn max_scaled_image_extent(mut self, max_scaled_image_extent: Extent2D) -> Self {
+ self.inner.max_scaled_image_extent = max_scaled_image_extent;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfacePresentScalingCapabilitiesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSurfacePresentModeCompatibilityEXT.html>"]
+pub struct SurfacePresentModeCompatibilityEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_mode_count: u32,
+ pub p_present_modes: *mut PresentModeKHR,
+}
+impl ::std::default::Default for SurfacePresentModeCompatibilityEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_mode_count: u32::default(),
+ p_present_modes: ::std::ptr::null_mut(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SurfacePresentModeCompatibilityEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SURFACE_PRESENT_MODE_COMPATIBILITY_EXT;
+}
+impl SurfacePresentModeCompatibilityEXT {
+ pub fn builder<'a>() -> SurfacePresentModeCompatibilityEXTBuilder<'a> {
+ SurfacePresentModeCompatibilityEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SurfacePresentModeCompatibilityEXTBuilder<'a> {
+ inner: SurfacePresentModeCompatibilityEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfacePresentModeCompatibilityEXTBuilder<'_> {}
+unsafe impl ExtendsSurfaceCapabilities2KHR for SurfacePresentModeCompatibilityEXT {}
+impl<'a> ::std::ops::Deref for SurfacePresentModeCompatibilityEXTBuilder<'a> {
+ type Target = SurfacePresentModeCompatibilityEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SurfacePresentModeCompatibilityEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SurfacePresentModeCompatibilityEXTBuilder<'a> {
+ #[inline]
+ pub fn present_modes(mut self, present_modes: &'a mut [PresentModeKHR]) -> Self {
+ self.inner.present_mode_count = present_modes.len() as _;
+ self.inner.p_present_modes = present_modes.as_mut_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SurfacePresentModeCompatibilityEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT.html>"]
+pub struct PhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub swapchain_maintenance1: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ swapchain_maintenance1: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT;
+}
+impl PhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+ pub fn builder<'a>() -> PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'a> {
+ PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'a> {
+ inner: PhysicalDeviceSwapchainMaintenance1FeaturesEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceSwapchainMaintenance1FeaturesEXT {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'_> {}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceSwapchainMaintenance1FeaturesEXT {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'a> {
+ type Target = PhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceSwapchainMaintenance1FeaturesEXTBuilder<'a> {
+ #[inline]
+ pub fn swapchain_maintenance1(mut self, swapchain_maintenance1: bool) -> Self {
+ self.inner.swapchain_maintenance1 = swapchain_maintenance1.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainPresentFenceInfoEXT.html>"]
+pub struct SwapchainPresentFenceInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub swapchain_count: u32,
+ pub p_fences: *const Fence,
+}
+impl ::std::default::Default for SwapchainPresentFenceInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ swapchain_count: u32::default(),
+ p_fences: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainPresentFenceInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_FENCE_INFO_EXT;
+}
+impl SwapchainPresentFenceInfoEXT {
+ pub fn builder<'a>() -> SwapchainPresentFenceInfoEXTBuilder<'a> {
+ SwapchainPresentFenceInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainPresentFenceInfoEXTBuilder<'a> {
+ inner: SwapchainPresentFenceInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for SwapchainPresentFenceInfoEXTBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for SwapchainPresentFenceInfoEXT {}
+impl<'a> ::std::ops::Deref for SwapchainPresentFenceInfoEXTBuilder<'a> {
+ type Target = SwapchainPresentFenceInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainPresentFenceInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainPresentFenceInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn fences(mut self, fences: &'a [Fence]) -> Self {
+ self.inner.swapchain_count = fences.len() as _;
+ self.inner.p_fences = fences.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainPresentFenceInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainPresentModesCreateInfoEXT.html>"]
+pub struct SwapchainPresentModesCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub present_mode_count: u32,
+ pub p_present_modes: *const PresentModeKHR,
+}
+impl ::std::default::Default for SwapchainPresentModesCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ present_mode_count: u32::default(),
+ p_present_modes: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainPresentModesCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT;
+}
+impl SwapchainPresentModesCreateInfoEXT {
+ pub fn builder<'a>() -> SwapchainPresentModesCreateInfoEXTBuilder<'a> {
+ SwapchainPresentModesCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainPresentModesCreateInfoEXTBuilder<'a> {
+ inner: SwapchainPresentModesCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentModesCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentModesCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for SwapchainPresentModesCreateInfoEXTBuilder<'a> {
+ type Target = SwapchainPresentModesCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainPresentModesCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainPresentModesCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn present_modes(mut self, present_modes: &'a [PresentModeKHR]) -> Self {
+ self.inner.present_mode_count = present_modes.len() as _;
+ self.inner.p_present_modes = present_modes.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainPresentModesCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainPresentModeInfoEXT.html>"]
+pub struct SwapchainPresentModeInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub swapchain_count: u32,
+ pub p_present_modes: *const PresentModeKHR,
+}
+impl ::std::default::Default for SwapchainPresentModeInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ swapchain_count: u32::default(),
+ p_present_modes: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainPresentModeInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_MODE_INFO_EXT;
+}
+impl SwapchainPresentModeInfoEXT {
+ pub fn builder<'a>() -> SwapchainPresentModeInfoEXTBuilder<'a> {
+ SwapchainPresentModeInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainPresentModeInfoEXTBuilder<'a> {
+ inner: SwapchainPresentModeInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPresentInfoKHR for SwapchainPresentModeInfoEXTBuilder<'_> {}
+unsafe impl ExtendsPresentInfoKHR for SwapchainPresentModeInfoEXT {}
+impl<'a> ::std::ops::Deref for SwapchainPresentModeInfoEXTBuilder<'a> {
+ type Target = SwapchainPresentModeInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainPresentModeInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainPresentModeInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn present_modes(mut self, present_modes: &'a [PresentModeKHR]) -> Self {
+ self.inner.swapchain_count = present_modes.len() as _;
+ self.inner.p_present_modes = present_modes.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainPresentModeInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSwapchainPresentScalingCreateInfoEXT.html>"]
+pub struct SwapchainPresentScalingCreateInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub scaling_behavior: PresentScalingFlagsEXT,
+ pub present_gravity_x: PresentGravityFlagsEXT,
+ pub present_gravity_y: PresentGravityFlagsEXT,
+}
+impl ::std::default::Default for SwapchainPresentScalingCreateInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ scaling_behavior: PresentScalingFlagsEXT::default(),
+ present_gravity_x: PresentGravityFlagsEXT::default(),
+ present_gravity_y: PresentGravityFlagsEXT::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for SwapchainPresentScalingCreateInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT;
+}
+impl SwapchainPresentScalingCreateInfoEXT {
+ pub fn builder<'a>() -> SwapchainPresentScalingCreateInfoEXTBuilder<'a> {
+ SwapchainPresentScalingCreateInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct SwapchainPresentScalingCreateInfoEXTBuilder<'a> {
+ inner: SwapchainPresentScalingCreateInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentScalingCreateInfoEXTBuilder<'_> {}
+unsafe impl ExtendsSwapchainCreateInfoKHR for SwapchainPresentScalingCreateInfoEXT {}
+impl<'a> ::std::ops::Deref for SwapchainPresentScalingCreateInfoEXTBuilder<'a> {
+ type Target = SwapchainPresentScalingCreateInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for SwapchainPresentScalingCreateInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> SwapchainPresentScalingCreateInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn scaling_behavior(mut self, scaling_behavior: PresentScalingFlagsEXT) -> Self {
+ self.inner.scaling_behavior = scaling_behavior;
+ self
+ }
+ #[inline]
+ pub fn present_gravity_x(mut self, present_gravity_x: PresentGravityFlagsEXT) -> Self {
+ self.inner.present_gravity_x = present_gravity_x;
+ self
+ }
+ #[inline]
+ pub fn present_gravity_y(mut self, present_gravity_y: PresentGravityFlagsEXT) -> Self {
+ self.inner.present_gravity_y = present_gravity_y;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> SwapchainPresentScalingCreateInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkReleaseSwapchainImagesInfoEXT.html>"]
+pub struct ReleaseSwapchainImagesInfoEXT {
+ pub s_type: StructureType,
+ pub p_next: *const c_void,
+ pub swapchain: SwapchainKHR,
+ pub image_index_count: u32,
+ pub p_image_indices: *const u32,
+}
+impl ::std::default::Default for ReleaseSwapchainImagesInfoEXT {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null(),
+ swapchain: SwapchainKHR::default(),
+ image_index_count: u32::default(),
+ p_image_indices: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for ReleaseSwapchainImagesInfoEXT {
+ const STRUCTURE_TYPE: StructureType = StructureType::RELEASE_SWAPCHAIN_IMAGES_INFO_EXT;
+}
+impl ReleaseSwapchainImagesInfoEXT {
+ pub fn builder<'a>() -> ReleaseSwapchainImagesInfoEXTBuilder<'a> {
+ ReleaseSwapchainImagesInfoEXTBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct ReleaseSwapchainImagesInfoEXTBuilder<'a> {
+ inner: ReleaseSwapchainImagesInfoEXT,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for ReleaseSwapchainImagesInfoEXTBuilder<'a> {
+ type Target = ReleaseSwapchainImagesInfoEXT;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for ReleaseSwapchainImagesInfoEXTBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> ReleaseSwapchainImagesInfoEXTBuilder<'a> {
+ #[inline]
+ pub fn swapchain(mut self, swapchain: SwapchainKHR) -> Self {
+ self.inner.swapchain = swapchain;
+ self
+ }
+ #[inline]
+ pub fn image_indices(mut self, image_indices: &'a [u32]) -> Self {
+ self.inner.image_index_count = image_indices.len() as _;
+ self.inner.p_image_indices = image_indices.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> ReleaseSwapchainImagesInfoEXT {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV.html>"]
+pub struct PhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ray_tracing_invocation_reorder: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ray_tracing_invocation_reorder: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV;
+}
+impl PhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'a> {
+ PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'a> {
+ inner: PhysicalDeviceRayTracingInvocationReorderFeaturesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceRayTracingInvocationReorderFeaturesNV {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceRayTracingInvocationReorderFeaturesNV {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingInvocationReorderFeaturesNVBuilder<'a> {
+ #[inline]
+ pub fn ray_tracing_invocation_reorder(mut self, ray_tracing_invocation_reorder: bool) -> Self {
+ self.inner.ray_tracing_invocation_reorder = ray_tracing_invocation_reorder.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV.html>"]
+pub struct PhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub ray_tracing_invocation_reorder_reordering_hint: RayTracingInvocationReorderModeNV,
+}
+impl ::std::default::Default for PhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ ray_tracing_invocation_reorder_reordering_hint:
+ RayTracingInvocationReorderModeNV::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV;
+}
+impl PhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+ pub fn builder<'a>() -> PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder<'a> {
+ PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder<'a> {
+ inner: PhysicalDeviceRayTracingInvocationReorderPropertiesNV,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceProperties2
+ for PhysicalDeviceRayTracingInvocationReorderPropertiesNV
+{
+}
+impl<'a> ::std::ops::Deref for PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder<'a> {
+ type Target = PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceRayTracingInvocationReorderPropertiesNVBuilder<'a> {
+ #[inline]
+ pub fn ray_tracing_invocation_reorder_reordering_hint(
+ mut self,
+ ray_tracing_invocation_reorder_reordering_hint: RayTracingInvocationReorderModeNV,
+ ) -> Self {
+ self.inner.ray_tracing_invocation_reorder_reordering_hint =
+ ray_tracing_invocation_reorder_reordering_hint;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+ self.inner
+ }
+}
+#[repr(C)]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDirectDriverLoadingInfoLUNARG.html>"]
+pub struct DirectDriverLoadingInfoLUNARG {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub flags: DirectDriverLoadingFlagsLUNARG,
+ pub pfn_get_instance_proc_addr: PFN_vkGetInstanceProcAddrLUNARG,
+}
+#[cfg(feature = "debug")]
+impl fmt::Debug for DirectDriverLoadingInfoLUNARG {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ fmt.debug_struct("DirectDriverLoadingInfoLUNARG")
+ .field("s_type", &self.s_type)
+ .field("p_next", &self.p_next)
+ .field("flags", &self.flags)
+ .field(
+ "pfn_get_instance_proc_addr",
+ &(self.pfn_get_instance_proc_addr.map(|x| x as *const ())),
+ )
+ .finish()
+ }
+}
+impl ::std::default::Default for DirectDriverLoadingInfoLUNARG {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ flags: DirectDriverLoadingFlagsLUNARG::default(),
+ pfn_get_instance_proc_addr: PFN_vkGetInstanceProcAddrLUNARG::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DirectDriverLoadingInfoLUNARG {
+ const STRUCTURE_TYPE: StructureType = StructureType::DIRECT_DRIVER_LOADING_INFO_LUNARG;
+}
+impl DirectDriverLoadingInfoLUNARG {
+ pub fn builder<'a>() -> DirectDriverLoadingInfoLUNARGBuilder<'a> {
+ DirectDriverLoadingInfoLUNARGBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DirectDriverLoadingInfoLUNARGBuilder<'a> {
+ inner: DirectDriverLoadingInfoLUNARG,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+impl<'a> ::std::ops::Deref for DirectDriverLoadingInfoLUNARGBuilder<'a> {
+ type Target = DirectDriverLoadingInfoLUNARG;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DirectDriverLoadingInfoLUNARGBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DirectDriverLoadingInfoLUNARGBuilder<'a> {
+ #[inline]
+ pub fn flags(mut self, flags: DirectDriverLoadingFlagsLUNARG) -> Self {
+ self.inner.flags = flags;
+ self
+ }
+ #[inline]
+ pub fn pfn_get_instance_proc_addr(
+ mut self,
+ pfn_get_instance_proc_addr: PFN_vkGetInstanceProcAddrLUNARG,
+ ) -> Self {
+ self.inner.pfn_get_instance_proc_addr = pfn_get_instance_proc_addr;
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DirectDriverLoadingInfoLUNARG {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDirectDriverLoadingListLUNARG.html>"]
+pub struct DirectDriverLoadingListLUNARG {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub mode: DirectDriverLoadingModeLUNARG,
+ pub driver_count: u32,
+ pub p_drivers: *const DirectDriverLoadingInfoLUNARG,
+}
+impl ::std::default::Default for DirectDriverLoadingListLUNARG {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ mode: DirectDriverLoadingModeLUNARG::default(),
+ driver_count: u32::default(),
+ p_drivers: ::std::ptr::null(),
+ }
+ }
+}
+unsafe impl TaggedStructure for DirectDriverLoadingListLUNARG {
+ const STRUCTURE_TYPE: StructureType = StructureType::DIRECT_DRIVER_LOADING_LIST_LUNARG;
+}
+impl DirectDriverLoadingListLUNARG {
+ pub fn builder<'a>() -> DirectDriverLoadingListLUNARGBuilder<'a> {
+ DirectDriverLoadingListLUNARGBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct DirectDriverLoadingListLUNARGBuilder<'a> {
+ inner: DirectDriverLoadingListLUNARG,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsInstanceCreateInfo for DirectDriverLoadingListLUNARGBuilder<'_> {}
+unsafe impl ExtendsInstanceCreateInfo for DirectDriverLoadingListLUNARG {}
+impl<'a> ::std::ops::Deref for DirectDriverLoadingListLUNARGBuilder<'a> {
+ type Target = DirectDriverLoadingListLUNARG;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for DirectDriverLoadingListLUNARGBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> DirectDriverLoadingListLUNARGBuilder<'a> {
+ #[inline]
+ pub fn mode(mut self, mode: DirectDriverLoadingModeLUNARG) -> Self {
+ self.inner.mode = mode;
+ self
+ }
+ #[inline]
+ pub fn drivers(mut self, drivers: &'a [DirectDriverLoadingInfoLUNARG]) -> Self {
+ self.inner.driver_count = drivers.len() as _;
+ self.inner.p_drivers = drivers.as_ptr();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> DirectDriverLoadingListLUNARG {
+ self.inner
+ }
+}
+#[repr(C)]
+#[cfg_attr(feature = "debug", derive(Debug))]
+#[derive(Copy, Clone)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM.html>"]
+pub struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+ pub s_type: StructureType,
+ pub p_next: *mut c_void,
+ pub multiview_per_view_viewports: Bool32,
+}
+impl ::std::default::Default for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+ #[inline]
+ fn default() -> Self {
+ Self {
+ s_type: Self::STRUCTURE_TYPE,
+ p_next: ::std::ptr::null_mut(),
+ multiview_per_view_viewports: Bool32::default(),
+ }
+ }
+}
+unsafe impl TaggedStructure for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+ const STRUCTURE_TYPE: StructureType =
+ StructureType::PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM;
+}
+impl PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+ pub fn builder<'a>() -> PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'a> {
+ PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder {
+ inner: Self::default(),
+ marker: ::std::marker::PhantomData,
+ }
+ }
+}
+#[repr(transparent)]
+pub struct PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'a> {
+ inner: PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM,
+ marker: ::std::marker::PhantomData<&'a ()>,
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2
+ for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsPhysicalDeviceFeatures2 for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {}
+unsafe impl ExtendsDeviceCreateInfo
+ for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'_>
+{
+}
+unsafe impl ExtendsDeviceCreateInfo for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {}
+impl<'a> ::std::ops::Deref for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'a> {
+ type Target = PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl<'a> ::std::ops::DerefMut for PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'a> {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.inner
+ }
+}
+impl<'a> PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOMBuilder<'a> {
+ #[inline]
+ pub fn multiview_per_view_viewports(mut self, multiview_per_view_viewports: bool) -> Self {
+ self.inner.multiview_per_view_viewports = multiview_per_view_viewports.into();
+ self
+ }
+ #[doc = r" Calling build will **discard** all the lifetime information. Only call this if"]
+ #[doc = r" necessary! Builders implement `Deref` targeting their corresponding Vulkan struct,"]
+ #[doc = r" so references to builders can be passed directly to Vulkan functions."]
+ pub fn build(self) -> PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+ self.inner
+ }
+}
diff --git a/third_party/rust/ash/src/vk/enums.rs b/third_party/rust/ash/src/vk/enums.rs
new file mode 100644
index 0000000000..a27a55b40c
--- /dev/null
+++ b/third_party/rust/ash/src/vk/enums.rs
@@ -0,0 +1,2917 @@
+use std::fmt;
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageLayout.html>"]
+pub struct ImageLayout(pub(crate) i32);
+impl ImageLayout {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ImageLayout {
+ #[doc = "Implicit layout an image is when its contents are undefined due to various reasons (e.g. right after creation)"]
+ pub const UNDEFINED: Self = Self(0);
+ #[doc = "General layout when image can be used for any kind of access"]
+ pub const GENERAL: Self = Self(1);
+ #[doc = "Optimal layout when image is only used for color attachment read/write"]
+ pub const COLOR_ATTACHMENT_OPTIMAL: Self = Self(2);
+ #[doc = "Optimal layout when image is only used for depth/stencil attachment read/write"]
+ pub const DEPTH_STENCIL_ATTACHMENT_OPTIMAL: Self = Self(3);
+ #[doc = "Optimal layout when image is used for read only depth/stencil attachment and shader access"]
+ pub const DEPTH_STENCIL_READ_ONLY_OPTIMAL: Self = Self(4);
+ #[doc = "Optimal layout when image is used for read only shader access"]
+ pub const SHADER_READ_ONLY_OPTIMAL: Self = Self(5);
+ #[doc = "Optimal layout when image is used only as source of transfer operations"]
+ pub const TRANSFER_SRC_OPTIMAL: Self = Self(6);
+ #[doc = "Optimal layout when image is used only as destination of transfer operations"]
+ pub const TRANSFER_DST_OPTIMAL: Self = Self(7);
+ #[doc = "Initial layout used when the data is populated by the CPU"]
+ pub const PREINITIALIZED: Self = Self(8);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentLoadOp.html>"]
+pub struct AttachmentLoadOp(pub(crate) i32);
+impl AttachmentLoadOp {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AttachmentLoadOp {
+ pub const LOAD: Self = Self(0);
+ pub const CLEAR: Self = Self(1);
+ pub const DONT_CARE: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAttachmentStoreOp.html>"]
+pub struct AttachmentStoreOp(pub(crate) i32);
+impl AttachmentStoreOp {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AttachmentStoreOp {
+ pub const STORE: Self = Self(0);
+ pub const DONT_CARE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageType.html>"]
+pub struct ImageType(pub(crate) i32);
+impl ImageType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ImageType {
+ pub const TYPE_1D: Self = Self(0);
+ pub const TYPE_2D: Self = Self(1);
+ pub const TYPE_3D: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageTiling.html>"]
+pub struct ImageTiling(pub(crate) i32);
+impl ImageTiling {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ImageTiling {
+ pub const OPTIMAL: Self = Self(0);
+ pub const LINEAR: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkImageViewType.html>"]
+pub struct ImageViewType(pub(crate) i32);
+impl ImageViewType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ImageViewType {
+ pub const TYPE_1D: Self = Self(0);
+ pub const TYPE_2D: Self = Self(1);
+ pub const TYPE_3D: Self = Self(2);
+ pub const CUBE: Self = Self(3);
+ pub const TYPE_1D_ARRAY: Self = Self(4);
+ pub const TYPE_2D_ARRAY: Self = Self(5);
+ pub const CUBE_ARRAY: Self = Self(6);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCommandBufferLevel.html>"]
+pub struct CommandBufferLevel(pub(crate) i32);
+impl CommandBufferLevel {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CommandBufferLevel {
+ pub const PRIMARY: Self = Self(0);
+ pub const SECONDARY: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkComponentSwizzle.html>"]
+pub struct ComponentSwizzle(pub(crate) i32);
+impl ComponentSwizzle {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ComponentSwizzle {
+ pub const IDENTITY: Self = Self(0);
+ pub const ZERO: Self = Self(1);
+ pub const ONE: Self = Self(2);
+ pub const R: Self = Self(3);
+ pub const G: Self = Self(4);
+ pub const B: Self = Self(5);
+ pub const A: Self = Self(6);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorType.html>"]
+pub struct DescriptorType(pub(crate) i32);
+impl DescriptorType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DescriptorType {
+ pub const SAMPLER: Self = Self(0);
+ pub const COMBINED_IMAGE_SAMPLER: Self = Self(1);
+ pub const SAMPLED_IMAGE: Self = Self(2);
+ pub const STORAGE_IMAGE: Self = Self(3);
+ pub const UNIFORM_TEXEL_BUFFER: Self = Self(4);
+ pub const STORAGE_TEXEL_BUFFER: Self = Self(5);
+ pub const UNIFORM_BUFFER: Self = Self(6);
+ pub const STORAGE_BUFFER: Self = Self(7);
+ pub const UNIFORM_BUFFER_DYNAMIC: Self = Self(8);
+ pub const STORAGE_BUFFER_DYNAMIC: Self = Self(9);
+ pub const INPUT_ATTACHMENT: Self = Self(10);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryType.html>"]
+pub struct QueryType(pub(crate) i32);
+impl QueryType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl QueryType {
+ pub const OCCLUSION: Self = Self(0);
+ #[doc = "Optional"]
+ pub const PIPELINE_STATISTICS: Self = Self(1);
+ pub const TIMESTAMP: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBorderColor.html>"]
+pub struct BorderColor(pub(crate) i32);
+impl BorderColor {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl BorderColor {
+ pub const FLOAT_TRANSPARENT_BLACK: Self = Self(0);
+ pub const INT_TRANSPARENT_BLACK: Self = Self(1);
+ pub const FLOAT_OPAQUE_BLACK: Self = Self(2);
+ pub const INT_OPAQUE_BLACK: Self = Self(3);
+ pub const FLOAT_OPAQUE_WHITE: Self = Self(4);
+ pub const INT_OPAQUE_WHITE: Self = Self(5);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineBindPoint.html>"]
+pub struct PipelineBindPoint(pub(crate) i32);
+impl PipelineBindPoint {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PipelineBindPoint {
+ pub const GRAPHICS: Self = Self(0);
+ pub const COMPUTE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineCacheHeaderVersion.html>"]
+pub struct PipelineCacheHeaderVersion(pub(crate) i32);
+impl PipelineCacheHeaderVersion {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PipelineCacheHeaderVersion {
+ pub const ONE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPrimitiveTopology.html>"]
+pub struct PrimitiveTopology(pub(crate) i32);
+impl PrimitiveTopology {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PrimitiveTopology {
+ pub const POINT_LIST: Self = Self(0);
+ pub const LINE_LIST: Self = Self(1);
+ pub const LINE_STRIP: Self = Self(2);
+ pub const TRIANGLE_LIST: Self = Self(3);
+ pub const TRIANGLE_STRIP: Self = Self(4);
+ pub const TRIANGLE_FAN: Self = Self(5);
+ pub const LINE_LIST_WITH_ADJACENCY: Self = Self(6);
+ pub const LINE_STRIP_WITH_ADJACENCY: Self = Self(7);
+ pub const TRIANGLE_LIST_WITH_ADJACENCY: Self = Self(8);
+ pub const TRIANGLE_STRIP_WITH_ADJACENCY: Self = Self(9);
+ pub const PATCH_LIST: Self = Self(10);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSharingMode.html>"]
+pub struct SharingMode(pub(crate) i32);
+impl SharingMode {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SharingMode {
+ pub const EXCLUSIVE: Self = Self(0);
+ pub const CONCURRENT: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndexType.html>"]
+pub struct IndexType(pub(crate) i32);
+impl IndexType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl IndexType {
+ pub const UINT16: Self = Self(0);
+ pub const UINT32: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFilter.html>"]
+pub struct Filter(pub(crate) i32);
+impl Filter {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl Filter {
+ pub const NEAREST: Self = Self(0);
+ pub const LINEAR: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerMipmapMode.html>"]
+pub struct SamplerMipmapMode(pub(crate) i32);
+impl SamplerMipmapMode {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SamplerMipmapMode {
+ #[doc = "Choose nearest mip level"]
+ pub const NEAREST: Self = Self(0);
+ #[doc = "Linear filter between mip levels"]
+ pub const LINEAR: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerAddressMode.html>"]
+pub struct SamplerAddressMode(pub(crate) i32);
+impl SamplerAddressMode {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SamplerAddressMode {
+ pub const REPEAT: Self = Self(0);
+ pub const MIRRORED_REPEAT: Self = Self(1);
+ pub const CLAMP_TO_EDGE: Self = Self(2);
+ pub const CLAMP_TO_BORDER: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCompareOp.html>"]
+pub struct CompareOp(pub(crate) i32);
+impl CompareOp {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CompareOp {
+ pub const NEVER: Self = Self(0);
+ pub const LESS: Self = Self(1);
+ pub const EQUAL: Self = Self(2);
+ pub const LESS_OR_EQUAL: Self = Self(3);
+ pub const GREATER: Self = Self(4);
+ pub const NOT_EQUAL: Self = Self(5);
+ pub const GREATER_OR_EQUAL: Self = Self(6);
+ pub const ALWAYS: Self = Self(7);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPolygonMode.html>"]
+pub struct PolygonMode(pub(crate) i32);
+impl PolygonMode {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PolygonMode {
+ pub const FILL: Self = Self(0);
+ pub const LINE: Self = Self(1);
+ pub const POINT: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFrontFace.html>"]
+pub struct FrontFace(pub(crate) i32);
+impl FrontFace {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl FrontFace {
+ pub const COUNTER_CLOCKWISE: Self = Self(0);
+ pub const CLOCKWISE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBlendFactor.html>"]
+pub struct BlendFactor(pub(crate) i32);
+impl BlendFactor {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl BlendFactor {
+ pub const ZERO: Self = Self(0);
+ pub const ONE: Self = Self(1);
+ pub const SRC_COLOR: Self = Self(2);
+ pub const ONE_MINUS_SRC_COLOR: Self = Self(3);
+ pub const DST_COLOR: Self = Self(4);
+ pub const ONE_MINUS_DST_COLOR: Self = Self(5);
+ pub const SRC_ALPHA: Self = Self(6);
+ pub const ONE_MINUS_SRC_ALPHA: Self = Self(7);
+ pub const DST_ALPHA: Self = Self(8);
+ pub const ONE_MINUS_DST_ALPHA: Self = Self(9);
+ pub const CONSTANT_COLOR: Self = Self(10);
+ pub const ONE_MINUS_CONSTANT_COLOR: Self = Self(11);
+ pub const CONSTANT_ALPHA: Self = Self(12);
+ pub const ONE_MINUS_CONSTANT_ALPHA: Self = Self(13);
+ pub const SRC_ALPHA_SATURATE: Self = Self(14);
+ pub const SRC1_COLOR: Self = Self(15);
+ pub const ONE_MINUS_SRC1_COLOR: Self = Self(16);
+ pub const SRC1_ALPHA: Self = Self(17);
+ pub const ONE_MINUS_SRC1_ALPHA: Self = Self(18);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBlendOp.html>"]
+pub struct BlendOp(pub(crate) i32);
+impl BlendOp {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl BlendOp {
+ pub const ADD: Self = Self(0);
+ pub const SUBTRACT: Self = Self(1);
+ pub const REVERSE_SUBTRACT: Self = Self(2);
+ pub const MIN: Self = Self(3);
+ pub const MAX: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStencilOp.html>"]
+pub struct StencilOp(pub(crate) i32);
+impl StencilOp {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl StencilOp {
+ pub const KEEP: Self = Self(0);
+ pub const ZERO: Self = Self(1);
+ pub const REPLACE: Self = Self(2);
+ pub const INCREMENT_AND_CLAMP: Self = Self(3);
+ pub const DECREMENT_AND_CLAMP: Self = Self(4);
+ pub const INVERT: Self = Self(5);
+ pub const INCREMENT_AND_WRAP: Self = Self(6);
+ pub const DECREMENT_AND_WRAP: Self = Self(7);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkLogicOp.html>"]
+pub struct LogicOp(pub(crate) i32);
+impl LogicOp {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl LogicOp {
+ pub const CLEAR: Self = Self(0);
+ pub const AND: Self = Self(1);
+ pub const AND_REVERSE: Self = Self(2);
+ pub const COPY: Self = Self(3);
+ pub const AND_INVERTED: Self = Self(4);
+ pub const NO_OP: Self = Self(5);
+ pub const XOR: Self = Self(6);
+ pub const OR: Self = Self(7);
+ pub const NOR: Self = Self(8);
+ pub const EQUIVALENT: Self = Self(9);
+ pub const INVERT: Self = Self(10);
+ pub const OR_REVERSE: Self = Self(11);
+ pub const COPY_INVERTED: Self = Self(12);
+ pub const OR_INVERTED: Self = Self(13);
+ pub const NAND: Self = Self(14);
+ pub const SET: Self = Self(15);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkInternalAllocationType.html>"]
+pub struct InternalAllocationType(pub(crate) i32);
+impl InternalAllocationType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl InternalAllocationType {
+ pub const EXECUTABLE: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSystemAllocationScope.html>"]
+pub struct SystemAllocationScope(pub(crate) i32);
+impl SystemAllocationScope {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SystemAllocationScope {
+ pub const COMMAND: Self = Self(0);
+ pub const OBJECT: Self = Self(1);
+ pub const CACHE: Self = Self(2);
+ pub const DEVICE: Self = Self(3);
+ pub const INSTANCE: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceType.html>"]
+pub struct PhysicalDeviceType(pub(crate) i32);
+impl PhysicalDeviceType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PhysicalDeviceType {
+ pub const OTHER: Self = Self(0);
+ pub const INTEGRATED_GPU: Self = Self(1);
+ pub const DISCRETE_GPU: Self = Self(2);
+ pub const VIRTUAL_GPU: Self = Self(3);
+ pub const CPU: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVertexInputRate.html>"]
+pub struct VertexInputRate(pub(crate) i32);
+impl VertexInputRate {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl VertexInputRate {
+ pub const VERTEX: Self = Self(0);
+ pub const INSTANCE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFormat.html>"]
+pub struct Format(pub(crate) i32);
+impl Format {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl Format {
+ pub const UNDEFINED: Self = Self(0);
+ pub const R4G4_UNORM_PACK8: Self = Self(1);
+ pub const R4G4B4A4_UNORM_PACK16: Self = Self(2);
+ pub const B4G4R4A4_UNORM_PACK16: Self = Self(3);
+ pub const R5G6B5_UNORM_PACK16: Self = Self(4);
+ pub const B5G6R5_UNORM_PACK16: Self = Self(5);
+ pub const R5G5B5A1_UNORM_PACK16: Self = Self(6);
+ pub const B5G5R5A1_UNORM_PACK16: Self = Self(7);
+ pub const A1R5G5B5_UNORM_PACK16: Self = Self(8);
+ pub const R8_UNORM: Self = Self(9);
+ pub const R8_SNORM: Self = Self(10);
+ pub const R8_USCALED: Self = Self(11);
+ pub const R8_SSCALED: Self = Self(12);
+ pub const R8_UINT: Self = Self(13);
+ pub const R8_SINT: Self = Self(14);
+ pub const R8_SRGB: Self = Self(15);
+ pub const R8G8_UNORM: Self = Self(16);
+ pub const R8G8_SNORM: Self = Self(17);
+ pub const R8G8_USCALED: Self = Self(18);
+ pub const R8G8_SSCALED: Self = Self(19);
+ pub const R8G8_UINT: Self = Self(20);
+ pub const R8G8_SINT: Self = Self(21);
+ pub const R8G8_SRGB: Self = Self(22);
+ pub const R8G8B8_UNORM: Self = Self(23);
+ pub const R8G8B8_SNORM: Self = Self(24);
+ pub const R8G8B8_USCALED: Self = Self(25);
+ pub const R8G8B8_SSCALED: Self = Self(26);
+ pub const R8G8B8_UINT: Self = Self(27);
+ pub const R8G8B8_SINT: Self = Self(28);
+ pub const R8G8B8_SRGB: Self = Self(29);
+ pub const B8G8R8_UNORM: Self = Self(30);
+ pub const B8G8R8_SNORM: Self = Self(31);
+ pub const B8G8R8_USCALED: Self = Self(32);
+ pub const B8G8R8_SSCALED: Self = Self(33);
+ pub const B8G8R8_UINT: Self = Self(34);
+ pub const B8G8R8_SINT: Self = Self(35);
+ pub const B8G8R8_SRGB: Self = Self(36);
+ pub const R8G8B8A8_UNORM: Self = Self(37);
+ pub const R8G8B8A8_SNORM: Self = Self(38);
+ pub const R8G8B8A8_USCALED: Self = Self(39);
+ pub const R8G8B8A8_SSCALED: Self = Self(40);
+ pub const R8G8B8A8_UINT: Self = Self(41);
+ pub const R8G8B8A8_SINT: Self = Self(42);
+ pub const R8G8B8A8_SRGB: Self = Self(43);
+ pub const B8G8R8A8_UNORM: Self = Self(44);
+ pub const B8G8R8A8_SNORM: Self = Self(45);
+ pub const B8G8R8A8_USCALED: Self = Self(46);
+ pub const B8G8R8A8_SSCALED: Self = Self(47);
+ pub const B8G8R8A8_UINT: Self = Self(48);
+ pub const B8G8R8A8_SINT: Self = Self(49);
+ pub const B8G8R8A8_SRGB: Self = Self(50);
+ pub const A8B8G8R8_UNORM_PACK32: Self = Self(51);
+ pub const A8B8G8R8_SNORM_PACK32: Self = Self(52);
+ pub const A8B8G8R8_USCALED_PACK32: Self = Self(53);
+ pub const A8B8G8R8_SSCALED_PACK32: Self = Self(54);
+ pub const A8B8G8R8_UINT_PACK32: Self = Self(55);
+ pub const A8B8G8R8_SINT_PACK32: Self = Self(56);
+ pub const A8B8G8R8_SRGB_PACK32: Self = Self(57);
+ pub const A2R10G10B10_UNORM_PACK32: Self = Self(58);
+ pub const A2R10G10B10_SNORM_PACK32: Self = Self(59);
+ pub const A2R10G10B10_USCALED_PACK32: Self = Self(60);
+ pub const A2R10G10B10_SSCALED_PACK32: Self = Self(61);
+ pub const A2R10G10B10_UINT_PACK32: Self = Self(62);
+ pub const A2R10G10B10_SINT_PACK32: Self = Self(63);
+ pub const A2B10G10R10_UNORM_PACK32: Self = Self(64);
+ pub const A2B10G10R10_SNORM_PACK32: Self = Self(65);
+ pub const A2B10G10R10_USCALED_PACK32: Self = Self(66);
+ pub const A2B10G10R10_SSCALED_PACK32: Self = Self(67);
+ pub const A2B10G10R10_UINT_PACK32: Self = Self(68);
+ pub const A2B10G10R10_SINT_PACK32: Self = Self(69);
+ pub const R16_UNORM: Self = Self(70);
+ pub const R16_SNORM: Self = Self(71);
+ pub const R16_USCALED: Self = Self(72);
+ pub const R16_SSCALED: Self = Self(73);
+ pub const R16_UINT: Self = Self(74);
+ pub const R16_SINT: Self = Self(75);
+ pub const R16_SFLOAT: Self = Self(76);
+ pub const R16G16_UNORM: Self = Self(77);
+ pub const R16G16_SNORM: Self = Self(78);
+ pub const R16G16_USCALED: Self = Self(79);
+ pub const R16G16_SSCALED: Self = Self(80);
+ pub const R16G16_UINT: Self = Self(81);
+ pub const R16G16_SINT: Self = Self(82);
+ pub const R16G16_SFLOAT: Self = Self(83);
+ pub const R16G16B16_UNORM: Self = Self(84);
+ pub const R16G16B16_SNORM: Self = Self(85);
+ pub const R16G16B16_USCALED: Self = Self(86);
+ pub const R16G16B16_SSCALED: Self = Self(87);
+ pub const R16G16B16_UINT: Self = Self(88);
+ pub const R16G16B16_SINT: Self = Self(89);
+ pub const R16G16B16_SFLOAT: Self = Self(90);
+ pub const R16G16B16A16_UNORM: Self = Self(91);
+ pub const R16G16B16A16_SNORM: Self = Self(92);
+ pub const R16G16B16A16_USCALED: Self = Self(93);
+ pub const R16G16B16A16_SSCALED: Self = Self(94);
+ pub const R16G16B16A16_UINT: Self = Self(95);
+ pub const R16G16B16A16_SINT: Self = Self(96);
+ pub const R16G16B16A16_SFLOAT: Self = Self(97);
+ pub const R32_UINT: Self = Self(98);
+ pub const R32_SINT: Self = Self(99);
+ pub const R32_SFLOAT: Self = Self(100);
+ pub const R32G32_UINT: Self = Self(101);
+ pub const R32G32_SINT: Self = Self(102);
+ pub const R32G32_SFLOAT: Self = Self(103);
+ pub const R32G32B32_UINT: Self = Self(104);
+ pub const R32G32B32_SINT: Self = Self(105);
+ pub const R32G32B32_SFLOAT: Self = Self(106);
+ pub const R32G32B32A32_UINT: Self = Self(107);
+ pub const R32G32B32A32_SINT: Self = Self(108);
+ pub const R32G32B32A32_SFLOAT: Self = Self(109);
+ pub const R64_UINT: Self = Self(110);
+ pub const R64_SINT: Self = Self(111);
+ pub const R64_SFLOAT: Self = Self(112);
+ pub const R64G64_UINT: Self = Self(113);
+ pub const R64G64_SINT: Self = Self(114);
+ pub const R64G64_SFLOAT: Self = Self(115);
+ pub const R64G64B64_UINT: Self = Self(116);
+ pub const R64G64B64_SINT: Self = Self(117);
+ pub const R64G64B64_SFLOAT: Self = Self(118);
+ pub const R64G64B64A64_UINT: Self = Self(119);
+ pub const R64G64B64A64_SINT: Self = Self(120);
+ pub const R64G64B64A64_SFLOAT: Self = Self(121);
+ pub const B10G11R11_UFLOAT_PACK32: Self = Self(122);
+ pub const E5B9G9R9_UFLOAT_PACK32: Self = Self(123);
+ pub const D16_UNORM: Self = Self(124);
+ pub const X8_D24_UNORM_PACK32: Self = Self(125);
+ pub const D32_SFLOAT: Self = Self(126);
+ pub const S8_UINT: Self = Self(127);
+ pub const D16_UNORM_S8_UINT: Self = Self(128);
+ pub const D24_UNORM_S8_UINT: Self = Self(129);
+ pub const D32_SFLOAT_S8_UINT: Self = Self(130);
+ pub const BC1_RGB_UNORM_BLOCK: Self = Self(131);
+ pub const BC1_RGB_SRGB_BLOCK: Self = Self(132);
+ pub const BC1_RGBA_UNORM_BLOCK: Self = Self(133);
+ pub const BC1_RGBA_SRGB_BLOCK: Self = Self(134);
+ pub const BC2_UNORM_BLOCK: Self = Self(135);
+ pub const BC2_SRGB_BLOCK: Self = Self(136);
+ pub const BC3_UNORM_BLOCK: Self = Self(137);
+ pub const BC3_SRGB_BLOCK: Self = Self(138);
+ pub const BC4_UNORM_BLOCK: Self = Self(139);
+ pub const BC4_SNORM_BLOCK: Self = Self(140);
+ pub const BC5_UNORM_BLOCK: Self = Self(141);
+ pub const BC5_SNORM_BLOCK: Self = Self(142);
+ pub const BC6H_UFLOAT_BLOCK: Self = Self(143);
+ pub const BC6H_SFLOAT_BLOCK: Self = Self(144);
+ pub const BC7_UNORM_BLOCK: Self = Self(145);
+ pub const BC7_SRGB_BLOCK: Self = Self(146);
+ pub const ETC2_R8G8B8_UNORM_BLOCK: Self = Self(147);
+ pub const ETC2_R8G8B8_SRGB_BLOCK: Self = Self(148);
+ pub const ETC2_R8G8B8A1_UNORM_BLOCK: Self = Self(149);
+ pub const ETC2_R8G8B8A1_SRGB_BLOCK: Self = Self(150);
+ pub const ETC2_R8G8B8A8_UNORM_BLOCK: Self = Self(151);
+ pub const ETC2_R8G8B8A8_SRGB_BLOCK: Self = Self(152);
+ pub const EAC_R11_UNORM_BLOCK: Self = Self(153);
+ pub const EAC_R11_SNORM_BLOCK: Self = Self(154);
+ pub const EAC_R11G11_UNORM_BLOCK: Self = Self(155);
+ pub const EAC_R11G11_SNORM_BLOCK: Self = Self(156);
+ pub const ASTC_4X4_UNORM_BLOCK: Self = Self(157);
+ pub const ASTC_4X4_SRGB_BLOCK: Self = Self(158);
+ pub const ASTC_5X4_UNORM_BLOCK: Self = Self(159);
+ pub const ASTC_5X4_SRGB_BLOCK: Self = Self(160);
+ pub const ASTC_5X5_UNORM_BLOCK: Self = Self(161);
+ pub const ASTC_5X5_SRGB_BLOCK: Self = Self(162);
+ pub const ASTC_6X5_UNORM_BLOCK: Self = Self(163);
+ pub const ASTC_6X5_SRGB_BLOCK: Self = Self(164);
+ pub const ASTC_6X6_UNORM_BLOCK: Self = Self(165);
+ pub const ASTC_6X6_SRGB_BLOCK: Self = Self(166);
+ pub const ASTC_8X5_UNORM_BLOCK: Self = Self(167);
+ pub const ASTC_8X5_SRGB_BLOCK: Self = Self(168);
+ pub const ASTC_8X6_UNORM_BLOCK: Self = Self(169);
+ pub const ASTC_8X6_SRGB_BLOCK: Self = Self(170);
+ pub const ASTC_8X8_UNORM_BLOCK: Self = Self(171);
+ pub const ASTC_8X8_SRGB_BLOCK: Self = Self(172);
+ pub const ASTC_10X5_UNORM_BLOCK: Self = Self(173);
+ pub const ASTC_10X5_SRGB_BLOCK: Self = Self(174);
+ pub const ASTC_10X6_UNORM_BLOCK: Self = Self(175);
+ pub const ASTC_10X6_SRGB_BLOCK: Self = Self(176);
+ pub const ASTC_10X8_UNORM_BLOCK: Self = Self(177);
+ pub const ASTC_10X8_SRGB_BLOCK: Self = Self(178);
+ pub const ASTC_10X10_UNORM_BLOCK: Self = Self(179);
+ pub const ASTC_10X10_SRGB_BLOCK: Self = Self(180);
+ pub const ASTC_12X10_UNORM_BLOCK: Self = Self(181);
+ pub const ASTC_12X10_SRGB_BLOCK: Self = Self(182);
+ pub const ASTC_12X12_UNORM_BLOCK: Self = Self(183);
+ pub const ASTC_12X12_SRGB_BLOCK: Self = Self(184);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkStructureType.html>"]
+pub struct StructureType(pub(crate) i32);
+impl StructureType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl StructureType {
+ pub const APPLICATION_INFO: Self = Self(0);
+ pub const INSTANCE_CREATE_INFO: Self = Self(1);
+ pub const DEVICE_QUEUE_CREATE_INFO: Self = Self(2);
+ pub const DEVICE_CREATE_INFO: Self = Self(3);
+ pub const SUBMIT_INFO: Self = Self(4);
+ pub const MEMORY_ALLOCATE_INFO: Self = Self(5);
+ pub const MAPPED_MEMORY_RANGE: Self = Self(6);
+ pub const BIND_SPARSE_INFO: Self = Self(7);
+ pub const FENCE_CREATE_INFO: Self = Self(8);
+ pub const SEMAPHORE_CREATE_INFO: Self = Self(9);
+ pub const EVENT_CREATE_INFO: Self = Self(10);
+ pub const QUERY_POOL_CREATE_INFO: Self = Self(11);
+ pub const BUFFER_CREATE_INFO: Self = Self(12);
+ pub const BUFFER_VIEW_CREATE_INFO: Self = Self(13);
+ pub const IMAGE_CREATE_INFO: Self = Self(14);
+ pub const IMAGE_VIEW_CREATE_INFO: Self = Self(15);
+ pub const SHADER_MODULE_CREATE_INFO: Self = Self(16);
+ pub const PIPELINE_CACHE_CREATE_INFO: Self = Self(17);
+ pub const PIPELINE_SHADER_STAGE_CREATE_INFO: Self = Self(18);
+ pub const PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO: Self = Self(19);
+ pub const PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO: Self = Self(20);
+ pub const PIPELINE_TESSELLATION_STATE_CREATE_INFO: Self = Self(21);
+ pub const PIPELINE_VIEWPORT_STATE_CREATE_INFO: Self = Self(22);
+ pub const PIPELINE_RASTERIZATION_STATE_CREATE_INFO: Self = Self(23);
+ pub const PIPELINE_MULTISAMPLE_STATE_CREATE_INFO: Self = Self(24);
+ pub const PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO: Self = Self(25);
+ pub const PIPELINE_COLOR_BLEND_STATE_CREATE_INFO: Self = Self(26);
+ pub const PIPELINE_DYNAMIC_STATE_CREATE_INFO: Self = Self(27);
+ pub const GRAPHICS_PIPELINE_CREATE_INFO: Self = Self(28);
+ pub const COMPUTE_PIPELINE_CREATE_INFO: Self = Self(29);
+ pub const PIPELINE_LAYOUT_CREATE_INFO: Self = Self(30);
+ pub const SAMPLER_CREATE_INFO: Self = Self(31);
+ pub const DESCRIPTOR_SET_LAYOUT_CREATE_INFO: Self = Self(32);
+ pub const DESCRIPTOR_POOL_CREATE_INFO: Self = Self(33);
+ pub const DESCRIPTOR_SET_ALLOCATE_INFO: Self = Self(34);
+ pub const WRITE_DESCRIPTOR_SET: Self = Self(35);
+ pub const COPY_DESCRIPTOR_SET: Self = Self(36);
+ pub const FRAMEBUFFER_CREATE_INFO: Self = Self(37);
+ pub const RENDER_PASS_CREATE_INFO: Self = Self(38);
+ pub const COMMAND_POOL_CREATE_INFO: Self = Self(39);
+ pub const COMMAND_BUFFER_ALLOCATE_INFO: Self = Self(40);
+ pub const COMMAND_BUFFER_INHERITANCE_INFO: Self = Self(41);
+ pub const COMMAND_BUFFER_BEGIN_INFO: Self = Self(42);
+ pub const RENDER_PASS_BEGIN_INFO: Self = Self(43);
+ pub const BUFFER_MEMORY_BARRIER: Self = Self(44);
+ pub const IMAGE_MEMORY_BARRIER: Self = Self(45);
+ pub const MEMORY_BARRIER: Self = Self(46);
+ #[doc = "Reserved for internal use by the loader, layers, and ICDs"]
+ pub const LOADER_INSTANCE_CREATE_INFO: Self = Self(47);
+ #[doc = "Reserved for internal use by the loader, layers, and ICDs"]
+ pub const LOADER_DEVICE_CREATE_INFO: Self = Self(48);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassContents.html>"]
+pub struct SubpassContents(pub(crate) i32);
+impl SubpassContents {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SubpassContents {
+ pub const INLINE: Self = Self(0);
+ pub const SECONDARY_COMMAND_BUFFERS: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkResult.html>"]
+#[must_use]
+pub struct Result(pub(crate) i32);
+impl Result {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl Result {
+ #[doc = "Command completed successfully"]
+ pub const SUCCESS: Self = Self(0);
+ #[doc = "A fence or query has not yet completed"]
+ pub const NOT_READY: Self = Self(1);
+ #[doc = "A wait operation has not completed in the specified time"]
+ pub const TIMEOUT: Self = Self(2);
+ #[doc = "An event is signaled"]
+ pub const EVENT_SET: Self = Self(3);
+ #[doc = "An event is unsignaled"]
+ pub const EVENT_RESET: Self = Self(4);
+ #[doc = "A return array was too small for the result"]
+ pub const INCOMPLETE: Self = Self(5);
+ #[doc = "A host memory allocation has failed"]
+ pub const ERROR_OUT_OF_HOST_MEMORY: Self = Self(-1);
+ #[doc = "A device memory allocation has failed"]
+ pub const ERROR_OUT_OF_DEVICE_MEMORY: Self = Self(-2);
+ #[doc = "Initialization of an object has failed"]
+ pub const ERROR_INITIALIZATION_FAILED: Self = Self(-3);
+ #[doc = "The logical device has been lost. See <https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-lost-device>"]
+ pub const ERROR_DEVICE_LOST: Self = Self(-4);
+ #[doc = "Mapping of a memory object has failed"]
+ pub const ERROR_MEMORY_MAP_FAILED: Self = Self(-5);
+ #[doc = "Layer specified does not exist"]
+ pub const ERROR_LAYER_NOT_PRESENT: Self = Self(-6);
+ #[doc = "Extension specified does not exist"]
+ pub const ERROR_EXTENSION_NOT_PRESENT: Self = Self(-7);
+ #[doc = "Requested feature is not available on this device"]
+ pub const ERROR_FEATURE_NOT_PRESENT: Self = Self(-8);
+ #[doc = "Unable to find a Vulkan driver"]
+ pub const ERROR_INCOMPATIBLE_DRIVER: Self = Self(-9);
+ #[doc = "Too many objects of the type have already been created"]
+ pub const ERROR_TOO_MANY_OBJECTS: Self = Self(-10);
+ #[doc = "Requested format is not supported on this device"]
+ pub const ERROR_FORMAT_NOT_SUPPORTED: Self = Self(-11);
+ #[doc = "A requested pool allocation has failed due to fragmentation of the pool's memory"]
+ pub const ERROR_FRAGMENTED_POOL: Self = Self(-12);
+ #[doc = "An unknown error has occurred, due to an implementation or application bug"]
+ pub const ERROR_UNKNOWN: Self = Self(-13);
+}
+impl ::std::error::Error for Result {}
+impl fmt::Display for Result {
+ fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
+ let name = match * self { Self :: SUCCESS => Some ("Command completed successfully") , Self :: NOT_READY => Some ("A fence or query has not yet completed") , Self :: TIMEOUT => Some ("A wait operation has not completed in the specified time") , Self :: EVENT_SET => Some ("An event is signaled") , Self :: EVENT_RESET => Some ("An event is unsignaled") , Self :: INCOMPLETE => Some ("A return array was too small for the result") , Self :: ERROR_OUT_OF_HOST_MEMORY => Some ("A host memory allocation has failed") , Self :: ERROR_OUT_OF_DEVICE_MEMORY => Some ("A device memory allocation has failed") , Self :: ERROR_INITIALIZATION_FAILED => Some ("Initialization of an object has failed") , Self :: ERROR_DEVICE_LOST => Some ("The logical device has been lost. See <https://registry.khronos.org/vulkan/specs/1.3-extensions/html/vkspec.html#devsandqueues-lost-device>") , Self :: ERROR_MEMORY_MAP_FAILED => Some ("Mapping of a memory object has failed") , Self :: ERROR_LAYER_NOT_PRESENT => Some ("Layer specified does not exist") , Self :: ERROR_EXTENSION_NOT_PRESENT => Some ("Extension specified does not exist") , Self :: ERROR_FEATURE_NOT_PRESENT => Some ("Requested feature is not available on this device") , Self :: ERROR_INCOMPATIBLE_DRIVER => Some ("Unable to find a Vulkan driver") , Self :: ERROR_TOO_MANY_OBJECTS => Some ("Too many objects of the type have already been created") , Self :: ERROR_FORMAT_NOT_SUPPORTED => Some ("Requested format is not supported on this device") , Self :: ERROR_FRAGMENTED_POOL => Some ("A requested pool allocation has failed due to fragmentation of the pool's memory") , Self :: ERROR_UNKNOWN => Some ("An unknown error has occurred, due to an implementation or application bug") , _ => None , } ;
+ if let Some(x) = name {
+ fmt.write_str(x)
+ } else {
+ <Self as fmt::Debug>::fmt(self, fmt)
+ }
+ }
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDynamicState.html>"]
+pub struct DynamicState(pub(crate) i32);
+impl DynamicState {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DynamicState {
+ pub const VIEWPORT: Self = Self(0);
+ pub const SCISSOR: Self = Self(1);
+ pub const LINE_WIDTH: Self = Self(2);
+ pub const DEPTH_BIAS: Self = Self(3);
+ pub const BLEND_CONSTANTS: Self = Self(4);
+ pub const DEPTH_BOUNDS: Self = Self(5);
+ pub const STENCIL_COMPARE_MASK: Self = Self(6);
+ pub const STENCIL_WRITE_MASK: Self = Self(7);
+ pub const STENCIL_REFERENCE: Self = Self(8);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDescriptorUpdateTemplateType.html>"]
+pub struct DescriptorUpdateTemplateType(pub(crate) i32);
+impl DescriptorUpdateTemplateType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DescriptorUpdateTemplateType {
+ #[doc = "Create descriptor update template for descriptor set updates"]
+ pub const DESCRIPTOR_SET: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkObjectType.html>"]
+pub struct ObjectType(pub(crate) i32);
+impl ObjectType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ObjectType {
+ pub const UNKNOWN: Self = Self(0);
+ pub const INSTANCE: Self = Self(1);
+ pub const PHYSICAL_DEVICE: Self = Self(2);
+ pub const DEVICE: Self = Self(3);
+ pub const QUEUE: Self = Self(4);
+ pub const SEMAPHORE: Self = Self(5);
+ pub const COMMAND_BUFFER: Self = Self(6);
+ pub const FENCE: Self = Self(7);
+ pub const DEVICE_MEMORY: Self = Self(8);
+ pub const BUFFER: Self = Self(9);
+ pub const IMAGE: Self = Self(10);
+ pub const EVENT: Self = Self(11);
+ pub const QUERY_POOL: Self = Self(12);
+ pub const BUFFER_VIEW: Self = Self(13);
+ pub const IMAGE_VIEW: Self = Self(14);
+ pub const SHADER_MODULE: Self = Self(15);
+ pub const PIPELINE_CACHE: Self = Self(16);
+ pub const PIPELINE_LAYOUT: Self = Self(17);
+ pub const RENDER_PASS: Self = Self(18);
+ pub const PIPELINE: Self = Self(19);
+ pub const DESCRIPTOR_SET_LAYOUT: Self = Self(20);
+ pub const SAMPLER: Self = Self(21);
+ pub const DESCRIPTOR_POOL: Self = Self(22);
+ pub const DESCRIPTOR_SET: Self = Self(23);
+ pub const FRAMEBUFFER: Self = Self(24);
+ pub const COMMAND_POOL: Self = Self(25);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingInvocationReorderModeNV.html>"]
+pub struct RayTracingInvocationReorderModeNV(pub(crate) i32);
+impl RayTracingInvocationReorderModeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl RayTracingInvocationReorderModeNV {
+ pub const NONE: Self = Self(0);
+ pub const REORDER: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDirectDriverLoadingModeLUNARG.html>"]
+pub struct DirectDriverLoadingModeLUNARG(pub(crate) i32);
+impl DirectDriverLoadingModeLUNARG {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DirectDriverLoadingModeLUNARG {
+ pub const EXCLUSIVE: Self = Self(0);
+ pub const INCLUSIVE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSemaphoreType.html>"]
+pub struct SemaphoreType(pub(crate) i32);
+impl SemaphoreType {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SemaphoreType {
+ pub const BINARY: Self = Self(0);
+ pub const TIMELINE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPresentModeKHR.html>"]
+pub struct PresentModeKHR(pub(crate) i32);
+impl PresentModeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PresentModeKHR {
+ pub const IMMEDIATE: Self = Self(0);
+ pub const MAILBOX: Self = Self(1);
+ pub const FIFO: Self = Self(2);
+ pub const FIFO_RELAXED: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkColorSpaceKHR.html>"]
+pub struct ColorSpaceKHR(pub(crate) i32);
+impl ColorSpaceKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ColorSpaceKHR {
+ pub const SRGB_NONLINEAR: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTimeDomainEXT.html>"]
+pub struct TimeDomainEXT(pub(crate) i32);
+impl TimeDomainEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl TimeDomainEXT {
+ pub const DEVICE: Self = Self(0);
+ pub const CLOCK_MONOTONIC: Self = Self(1);
+ pub const CLOCK_MONOTONIC_RAW: Self = Self(2);
+ pub const QUERY_PERFORMANCE_COUNTER: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDebugReportObjectTypeEXT.html>"]
+pub struct DebugReportObjectTypeEXT(pub(crate) i32);
+impl DebugReportObjectTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DebugReportObjectTypeEXT {
+ pub const UNKNOWN: Self = Self(0);
+ pub const INSTANCE: Self = Self(1);
+ pub const PHYSICAL_DEVICE: Self = Self(2);
+ pub const DEVICE: Self = Self(3);
+ pub const QUEUE: Self = Self(4);
+ pub const SEMAPHORE: Self = Self(5);
+ pub const COMMAND_BUFFER: Self = Self(6);
+ pub const FENCE: Self = Self(7);
+ pub const DEVICE_MEMORY: Self = Self(8);
+ pub const BUFFER: Self = Self(9);
+ pub const IMAGE: Self = Self(10);
+ pub const EVENT: Self = Self(11);
+ pub const QUERY_POOL: Self = Self(12);
+ pub const BUFFER_VIEW: Self = Self(13);
+ pub const IMAGE_VIEW: Self = Self(14);
+ pub const SHADER_MODULE: Self = Self(15);
+ pub const PIPELINE_CACHE: Self = Self(16);
+ pub const PIPELINE_LAYOUT: Self = Self(17);
+ pub const RENDER_PASS: Self = Self(18);
+ pub const PIPELINE: Self = Self(19);
+ pub const DESCRIPTOR_SET_LAYOUT: Self = Self(20);
+ pub const SAMPLER: Self = Self(21);
+ pub const DESCRIPTOR_POOL: Self = Self(22);
+ pub const DESCRIPTOR_SET: Self = Self(23);
+ pub const FRAMEBUFFER: Self = Self(24);
+ pub const COMMAND_POOL: Self = Self(25);
+ pub const SURFACE_KHR: Self = Self(26);
+ pub const SWAPCHAIN_KHR: Self = Self(27);
+ pub const DEBUG_REPORT_CALLBACK_EXT: Self = Self(28);
+ pub const DISPLAY_KHR: Self = Self(29);
+ pub const DISPLAY_MODE_KHR: Self = Self(30);
+ pub const VALIDATION_CACHE_EXT: Self = Self(33);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceMemoryReportEventTypeEXT.html>"]
+pub struct DeviceMemoryReportEventTypeEXT(pub(crate) i32);
+impl DeviceMemoryReportEventTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DeviceMemoryReportEventTypeEXT {
+ pub const ALLOCATE: Self = Self(0);
+ pub const FREE: Self = Self(1);
+ pub const IMPORT: Self = Self(2);
+ pub const UNIMPORT: Self = Self(3);
+ pub const ALLOCATION_FAILED: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRasterizationOrderAMD.html>"]
+pub struct RasterizationOrderAMD(pub(crate) i32);
+impl RasterizationOrderAMD {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl RasterizationOrderAMD {
+ pub const STRICT: Self = Self(0);
+ pub const RELAXED: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationCheckEXT.html>"]
+pub struct ValidationCheckEXT(pub(crate) i32);
+impl ValidationCheckEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ValidationCheckEXT {
+ pub const ALL: Self = Self(0);
+ pub const SHADERS: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationFeatureEnableEXT.html>"]
+pub struct ValidationFeatureEnableEXT(pub(crate) i32);
+impl ValidationFeatureEnableEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ValidationFeatureEnableEXT {
+ pub const GPU_ASSISTED: Self = Self(0);
+ pub const GPU_ASSISTED_RESERVE_BINDING_SLOT: Self = Self(1);
+ pub const BEST_PRACTICES: Self = Self(2);
+ pub const DEBUG_PRINTF: Self = Self(3);
+ pub const SYNCHRONIZATION_VALIDATION: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationFeatureDisableEXT.html>"]
+pub struct ValidationFeatureDisableEXT(pub(crate) i32);
+impl ValidationFeatureDisableEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ValidationFeatureDisableEXT {
+ pub const ALL: Self = Self(0);
+ pub const SHADERS: Self = Self(1);
+ pub const THREAD_SAFETY: Self = Self(2);
+ pub const API_PARAMETERS: Self = Self(3);
+ pub const OBJECT_LIFETIMES: Self = Self(4);
+ pub const CORE_CHECKS: Self = Self(5);
+ pub const UNIQUE_HANDLES: Self = Self(6);
+ pub const SHADER_VALIDATION_CACHE: Self = Self(7);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkIndirectCommandsTokenTypeNV.html>"]
+pub struct IndirectCommandsTokenTypeNV(pub(crate) i32);
+impl IndirectCommandsTokenTypeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl IndirectCommandsTokenTypeNV {
+ pub const SHADER_GROUP: Self = Self(0);
+ pub const STATE_FLAGS: Self = Self(1);
+ pub const INDEX_BUFFER: Self = Self(2);
+ pub const VERTEX_BUFFER: Self = Self(3);
+ pub const PUSH_CONSTANT: Self = Self(4);
+ pub const DRAW_INDEXED: Self = Self(5);
+ pub const DRAW: Self = Self(6);
+ pub const DRAW_TASKS: Self = Self(7);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayPowerStateEXT.html>"]
+pub struct DisplayPowerStateEXT(pub(crate) i32);
+impl DisplayPowerStateEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DisplayPowerStateEXT {
+ pub const OFF: Self = Self(0);
+ pub const SUSPEND: Self = Self(1);
+ pub const ON: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceEventTypeEXT.html>"]
+pub struct DeviceEventTypeEXT(pub(crate) i32);
+impl DeviceEventTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DeviceEventTypeEXT {
+ pub const DISPLAY_HOTPLUG: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDisplayEventTypeEXT.html>"]
+pub struct DisplayEventTypeEXT(pub(crate) i32);
+impl DisplayEventTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DisplayEventTypeEXT {
+ pub const FIRST_PIXEL_OUT: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkViewportCoordinateSwizzleNV.html>"]
+pub struct ViewportCoordinateSwizzleNV(pub(crate) i32);
+impl ViewportCoordinateSwizzleNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ViewportCoordinateSwizzleNV {
+ pub const POSITIVE_X: Self = Self(0);
+ pub const NEGATIVE_X: Self = Self(1);
+ pub const POSITIVE_Y: Self = Self(2);
+ pub const NEGATIVE_Y: Self = Self(3);
+ pub const POSITIVE_Z: Self = Self(4);
+ pub const NEGATIVE_Z: Self = Self(5);
+ pub const POSITIVE_W: Self = Self(6);
+ pub const NEGATIVE_W: Self = Self(7);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDiscardRectangleModeEXT.html>"]
+pub struct DiscardRectangleModeEXT(pub(crate) i32);
+impl DiscardRectangleModeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DiscardRectangleModeEXT {
+ pub const INCLUSIVE: Self = Self(0);
+ pub const EXCLUSIVE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPointClippingBehavior.html>"]
+pub struct PointClippingBehavior(pub(crate) i32);
+impl PointClippingBehavior {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PointClippingBehavior {
+ pub const ALL_CLIP_PLANES: Self = Self(0);
+ pub const USER_CLIP_PLANES_ONLY: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerReductionMode.html>"]
+pub struct SamplerReductionMode(pub(crate) i32);
+impl SamplerReductionMode {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SamplerReductionMode {
+ pub const WEIGHTED_AVERAGE: Self = Self(0);
+ pub const MIN: Self = Self(1);
+ pub const MAX: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkTessellationDomainOrigin.html>"]
+pub struct TessellationDomainOrigin(pub(crate) i32);
+impl TessellationDomainOrigin {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl TessellationDomainOrigin {
+ pub const UPPER_LEFT: Self = Self(0);
+ pub const LOWER_LEFT: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerYcbcrModelConversion.html>"]
+pub struct SamplerYcbcrModelConversion(pub(crate) i32);
+impl SamplerYcbcrModelConversion {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SamplerYcbcrModelConversion {
+ pub const RGB_IDENTITY: Self = Self(0);
+ #[doc = "just range expansion"]
+ pub const YCBCR_IDENTITY: Self = Self(1);
+ #[doc = "aka HD YUV"]
+ pub const YCBCR_709: Self = Self(2);
+ #[doc = "aka SD YUV"]
+ pub const YCBCR_601: Self = Self(3);
+ #[doc = "aka UHD YUV"]
+ pub const YCBCR_2020: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSamplerYcbcrRange.html>"]
+pub struct SamplerYcbcrRange(pub(crate) i32);
+impl SamplerYcbcrRange {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SamplerYcbcrRange {
+ #[doc = "Luma 0..1 maps to 0..255, chroma -0.5..0.5 to 1..255 (clamped)"]
+ pub const ITU_FULL: Self = Self(0);
+ #[doc = "Luma 0..1 maps to 16..235, chroma -0.5..0.5 to 16..240"]
+ pub const ITU_NARROW: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkChromaLocation.html>"]
+pub struct ChromaLocation(pub(crate) i32);
+impl ChromaLocation {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ChromaLocation {
+ pub const COSITED_EVEN: Self = Self(0);
+ pub const MIDPOINT: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBlendOverlapEXT.html>"]
+pub struct BlendOverlapEXT(pub(crate) i32);
+impl BlendOverlapEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl BlendOverlapEXT {
+ pub const UNCORRELATED: Self = Self(0);
+ pub const DISJOINT: Self = Self(1);
+ pub const CONJOINT: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCoverageModulationModeNV.html>"]
+pub struct CoverageModulationModeNV(pub(crate) i32);
+impl CoverageModulationModeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CoverageModulationModeNV {
+ pub const NONE: Self = Self(0);
+ pub const RGB: Self = Self(1);
+ pub const ALPHA: Self = Self(2);
+ pub const RGBA: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCoverageReductionModeNV.html>"]
+pub struct CoverageReductionModeNV(pub(crate) i32);
+impl CoverageReductionModeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CoverageReductionModeNV {
+ pub const MERGE: Self = Self(0);
+ pub const TRUNCATE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkValidationCacheHeaderVersionEXT.html>"]
+pub struct ValidationCacheHeaderVersionEXT(pub(crate) i32);
+impl ValidationCacheHeaderVersionEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ValidationCacheHeaderVersionEXT {
+ pub const ONE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderInfoTypeAMD.html>"]
+pub struct ShaderInfoTypeAMD(pub(crate) i32);
+impl ShaderInfoTypeAMD {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ShaderInfoTypeAMD {
+ pub const STATISTICS: Self = Self(0);
+ pub const BINARY: Self = Self(1);
+ pub const DISASSEMBLY: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueueGlobalPriorityKHR.html>"]
+pub struct QueueGlobalPriorityKHR(pub(crate) i32);
+impl QueueGlobalPriorityKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl QueueGlobalPriorityKHR {
+ pub const LOW: Self = Self(128);
+ pub const MEDIUM: Self = Self(256);
+ pub const HIGH: Self = Self(512);
+ pub const REALTIME: Self = Self(1_024);
+ pub const LOW_EXT: Self = Self::LOW;
+ pub const MEDIUM_EXT: Self = Self::MEDIUM;
+ pub const HIGH_EXT: Self = Self::HIGH;
+ pub const REALTIME_EXT: Self = Self::REALTIME;
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkConservativeRasterizationModeEXT.html>"]
+pub struct ConservativeRasterizationModeEXT(pub(crate) i32);
+impl ConservativeRasterizationModeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ConservativeRasterizationModeEXT {
+ pub const DISABLED: Self = Self(0);
+ pub const OVERESTIMATE: Self = Self(1);
+ pub const UNDERESTIMATE: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVendorId.html>"]
+pub struct VendorId(pub(crate) i32);
+impl VendorId {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl VendorId {
+ #[doc = "Vivante vendor ID"]
+ pub const VIV: Self = Self(0x1_0001);
+ #[doc = "VeriSilicon vendor ID"]
+ pub const VSI: Self = Self(0x1_0002);
+ #[doc = "Kazan Software Renderer"]
+ pub const KAZAN: Self = Self(0x1_0003);
+ #[doc = "Codeplay Software Ltd. vendor ID"]
+ pub const CODEPLAY: Self = Self(0x1_0004);
+ #[doc = "Mesa vendor ID"]
+ pub const MESA: Self = Self(0x1_0005);
+ #[doc = "PoCL vendor ID"]
+ pub const POCL: Self = Self(0x1_0006);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDriverId.html>"]
+pub struct DriverId(pub(crate) i32);
+impl DriverId {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DriverId {
+ #[doc = "Advanced Micro Devices, Inc."]
+ pub const AMD_PROPRIETARY: Self = Self(1);
+ #[doc = "Advanced Micro Devices, Inc."]
+ pub const AMD_OPEN_SOURCE: Self = Self(2);
+ #[doc = "Mesa open source project"]
+ pub const MESA_RADV: Self = Self(3);
+ #[doc = "NVIDIA Corporation"]
+ pub const NVIDIA_PROPRIETARY: Self = Self(4);
+ #[doc = "Intel Corporation"]
+ pub const INTEL_PROPRIETARY_WINDOWS: Self = Self(5);
+ #[doc = "Intel Corporation"]
+ pub const INTEL_OPEN_SOURCE_MESA: Self = Self(6);
+ #[doc = "Imagination Technologies"]
+ pub const IMAGINATION_PROPRIETARY: Self = Self(7);
+ #[doc = "Qualcomm Technologies, Inc."]
+ pub const QUALCOMM_PROPRIETARY: Self = Self(8);
+ #[doc = "Arm Limited"]
+ pub const ARM_PROPRIETARY: Self = Self(9);
+ #[doc = "Google LLC"]
+ pub const GOOGLE_SWIFTSHADER: Self = Self(10);
+ #[doc = "Google LLC"]
+ pub const GGP_PROPRIETARY: Self = Self(11);
+ #[doc = "Broadcom Inc."]
+ pub const BROADCOM_PROPRIETARY: Self = Self(12);
+ #[doc = "Mesa"]
+ pub const MESA_LLVMPIPE: Self = Self(13);
+ #[doc = "MoltenVK"]
+ pub const MOLTENVK: Self = Self(14);
+ #[doc = "Core Avionics & Industrial Inc."]
+ pub const COREAVI_PROPRIETARY: Self = Self(15);
+ #[doc = "Juice Technologies, Inc."]
+ pub const JUICE_PROPRIETARY: Self = Self(16);
+ #[doc = "Verisilicon, Inc."]
+ pub const VERISILICON_PROPRIETARY: Self = Self(17);
+ #[doc = "Mesa open source project"]
+ pub const MESA_TURNIP: Self = Self(18);
+ #[doc = "Mesa open source project"]
+ pub const MESA_V3DV: Self = Self(19);
+ #[doc = "Mesa open source project"]
+ pub const MESA_PANVK: Self = Self(20);
+ #[doc = "Samsung Electronics Co., Ltd."]
+ pub const SAMSUNG_PROPRIETARY: Self = Self(21);
+ #[doc = "Mesa open source project"]
+ pub const MESA_VENUS: Self = Self(22);
+ #[doc = "Mesa open source project"]
+ pub const MESA_DOZEN: Self = Self(23);
+ #[doc = "Mesa open source project"]
+ pub const MESA_NVK: Self = Self(24);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShadingRatePaletteEntryNV.html>"]
+pub struct ShadingRatePaletteEntryNV(pub(crate) i32);
+impl ShadingRatePaletteEntryNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ShadingRatePaletteEntryNV {
+ pub const NO_INVOCATIONS: Self = Self(0);
+ pub const TYPE_16_INVOCATIONS_PER_PIXEL: Self = Self(1);
+ pub const TYPE_8_INVOCATIONS_PER_PIXEL: Self = Self(2);
+ pub const TYPE_4_INVOCATIONS_PER_PIXEL: Self = Self(3);
+ pub const TYPE_2_INVOCATIONS_PER_PIXEL: Self = Self(4);
+ pub const TYPE_1_INVOCATION_PER_PIXEL: Self = Self(5);
+ pub const TYPE_1_INVOCATION_PER_2X1_PIXELS: Self = Self(6);
+ pub const TYPE_1_INVOCATION_PER_1X2_PIXELS: Self = Self(7);
+ pub const TYPE_1_INVOCATION_PER_2X2_PIXELS: Self = Self(8);
+ pub const TYPE_1_INVOCATION_PER_4X2_PIXELS: Self = Self(9);
+ pub const TYPE_1_INVOCATION_PER_2X4_PIXELS: Self = Self(10);
+ pub const TYPE_1_INVOCATION_PER_4X4_PIXELS: Self = Self(11);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCoarseSampleOrderTypeNV.html>"]
+pub struct CoarseSampleOrderTypeNV(pub(crate) i32);
+impl CoarseSampleOrderTypeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CoarseSampleOrderTypeNV {
+ pub const DEFAULT: Self = Self(0);
+ pub const CUSTOM: Self = Self(1);
+ pub const PIXEL_MAJOR: Self = Self(2);
+ pub const SAMPLE_MAJOR: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyAccelerationStructureModeKHR.html>"]
+pub struct CopyAccelerationStructureModeKHR(pub(crate) i32);
+impl CopyAccelerationStructureModeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CopyAccelerationStructureModeKHR {
+ pub const CLONE: Self = Self(0);
+ pub const COMPACT: Self = Self(1);
+ pub const SERIALIZE: Self = Self(2);
+ pub const DESERIALIZE: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBuildAccelerationStructureModeKHR.html>"]
+pub struct BuildAccelerationStructureModeKHR(pub(crate) i32);
+impl BuildAccelerationStructureModeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl BuildAccelerationStructureModeKHR {
+ pub const BUILD: Self = Self(0);
+ pub const UPDATE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureTypeKHR.html>"]
+pub struct AccelerationStructureTypeKHR(pub(crate) i32);
+impl AccelerationStructureTypeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AccelerationStructureTypeKHR {
+ pub const TOP_LEVEL: Self = Self(0);
+ pub const BOTTOM_LEVEL: Self = Self(1);
+ pub const GENERIC: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkGeometryTypeKHR.html>"]
+pub struct GeometryTypeKHR(pub(crate) i32);
+impl GeometryTypeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl GeometryTypeKHR {
+ pub const TRIANGLES: Self = Self(0);
+ pub const AABBS: Self = Self(1);
+ pub const INSTANCES: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMemoryRequirementsTypeNV.html>"]
+pub struct AccelerationStructureMemoryRequirementsTypeNV(pub(crate) i32);
+impl AccelerationStructureMemoryRequirementsTypeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AccelerationStructureMemoryRequirementsTypeNV {
+ pub const OBJECT: Self = Self(0);
+ pub const BUILD_SCRATCH: Self = Self(1);
+ pub const UPDATE_SCRATCH: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureBuildTypeKHR.html>"]
+pub struct AccelerationStructureBuildTypeKHR(pub(crate) i32);
+impl AccelerationStructureBuildTypeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AccelerationStructureBuildTypeKHR {
+ pub const HOST: Self = Self(0);
+ pub const DEVICE: Self = Self(1);
+ pub const HOST_OR_DEVICE: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkRayTracingShaderGroupTypeKHR.html>"]
+pub struct RayTracingShaderGroupTypeKHR(pub(crate) i32);
+impl RayTracingShaderGroupTypeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl RayTracingShaderGroupTypeKHR {
+ pub const GENERAL: Self = Self(0);
+ pub const TRIANGLES_HIT_GROUP: Self = Self(1);
+ pub const PROCEDURAL_HIT_GROUP: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureCompatibilityKHR.html>"]
+pub struct AccelerationStructureCompatibilityKHR(pub(crate) i32);
+impl AccelerationStructureCompatibilityKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AccelerationStructureCompatibilityKHR {
+ pub const COMPATIBLE: Self = Self(0);
+ pub const INCOMPATIBLE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderGroupShaderKHR.html>"]
+pub struct ShaderGroupShaderKHR(pub(crate) i32);
+impl ShaderGroupShaderKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ShaderGroupShaderKHR {
+ pub const GENERAL: Self = Self(0);
+ pub const CLOSEST_HIT: Self = Self(1);
+ pub const ANY_HIT: Self = Self(2);
+ pub const INTERSECTION: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMemoryOverallocationBehaviorAMD.html>"]
+pub struct MemoryOverallocationBehaviorAMD(pub(crate) i32);
+impl MemoryOverallocationBehaviorAMD {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl MemoryOverallocationBehaviorAMD {
+ pub const DEFAULT: Self = Self(0);
+ pub const ALLOWED: Self = Self(1);
+ pub const DISALLOWED: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkScopeNV.html>"]
+pub struct ScopeNV(pub(crate) i32);
+impl ScopeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ScopeNV {
+ pub const DEVICE: Self = Self(1);
+ pub const WORKGROUP: Self = Self(2);
+ pub const SUBGROUP: Self = Self(3);
+ pub const QUEUE_FAMILY: Self = Self(5);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkComponentTypeNV.html>"]
+pub struct ComponentTypeNV(pub(crate) i32);
+impl ComponentTypeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ComponentTypeNV {
+ pub const FLOAT16: Self = Self(0);
+ pub const FLOAT32: Self = Self(1);
+ pub const FLOAT64: Self = Self(2);
+ pub const SINT8: Self = Self(3);
+ pub const SINT16: Self = Self(4);
+ pub const SINT32: Self = Self(5);
+ pub const SINT64: Self = Self(6);
+ pub const UINT8: Self = Self(7);
+ pub const UINT16: Self = Self(8);
+ pub const UINT32: Self = Self(9);
+ pub const UINT64: Self = Self(10);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFullScreenExclusiveEXT.html>"]
+pub struct FullScreenExclusiveEXT(pub(crate) i32);
+impl FullScreenExclusiveEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl FullScreenExclusiveEXT {
+ pub const DEFAULT: Self = Self(0);
+ pub const ALLOWED: Self = Self(1);
+ pub const DISALLOWED: Self = Self(2);
+ pub const APPLICATION_CONTROLLED: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterScopeKHR.html>"]
+pub struct PerformanceCounterScopeKHR(pub(crate) i32);
+impl PerformanceCounterScopeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceCounterScopeKHR {
+ pub const COMMAND_BUFFER: Self = Self(0);
+ pub const RENDER_PASS: Self = Self(1);
+ pub const COMMAND: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterUnitKHR.html>"]
+pub struct PerformanceCounterUnitKHR(pub(crate) i32);
+impl PerformanceCounterUnitKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceCounterUnitKHR {
+ pub const GENERIC: Self = Self(0);
+ pub const PERCENTAGE: Self = Self(1);
+ pub const NANOSECONDS: Self = Self(2);
+ pub const BYTES: Self = Self(3);
+ pub const BYTES_PER_SECOND: Self = Self(4);
+ pub const KELVIN: Self = Self(5);
+ pub const WATTS: Self = Self(6);
+ pub const VOLTS: Self = Self(7);
+ pub const AMPS: Self = Self(8);
+ pub const HERTZ: Self = Self(9);
+ pub const CYCLES: Self = Self(10);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceCounterStorageKHR.html>"]
+pub struct PerformanceCounterStorageKHR(pub(crate) i32);
+impl PerformanceCounterStorageKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceCounterStorageKHR {
+ pub const INT32: Self = Self(0);
+ pub const INT64: Self = Self(1);
+ pub const UINT32: Self = Self(2);
+ pub const UINT64: Self = Self(3);
+ pub const FLOAT32: Self = Self(4);
+ pub const FLOAT64: Self = Self(5);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceConfigurationTypeINTEL.html>"]
+pub struct PerformanceConfigurationTypeINTEL(pub(crate) i32);
+impl PerformanceConfigurationTypeINTEL {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceConfigurationTypeINTEL {
+ pub const COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryPoolSamplingModeINTEL.html>"]
+pub struct QueryPoolSamplingModeINTEL(pub(crate) i32);
+impl QueryPoolSamplingModeINTEL {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl QueryPoolSamplingModeINTEL {
+ pub const MANUAL: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceOverrideTypeINTEL.html>"]
+pub struct PerformanceOverrideTypeINTEL(pub(crate) i32);
+impl PerformanceOverrideTypeINTEL {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceOverrideTypeINTEL {
+ pub const NULL_HARDWARE: Self = Self(0);
+ pub const FLUSH_GPU_CACHES: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceParameterTypeINTEL.html>"]
+pub struct PerformanceParameterTypeINTEL(pub(crate) i32);
+impl PerformanceParameterTypeINTEL {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceParameterTypeINTEL {
+ pub const HW_COUNTERS_SUPPORTED: Self = Self(0);
+ pub const STREAM_MARKER_VALIDS: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPerformanceValueTypeINTEL.html>"]
+pub struct PerformanceValueTypeINTEL(pub(crate) i32);
+impl PerformanceValueTypeINTEL {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PerformanceValueTypeINTEL {
+ pub const UINT32: Self = Self(0);
+ pub const UINT64: Self = Self(1);
+ pub const FLOAT: Self = Self(2);
+ pub const BOOL: Self = Self(3);
+ pub const STRING: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkShaderFloatControlsIndependence.html>"]
+pub struct ShaderFloatControlsIndependence(pub(crate) i32);
+impl ShaderFloatControlsIndependence {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ShaderFloatControlsIndependence {
+ pub const TYPE_32_ONLY: Self = Self(0);
+ pub const ALL: Self = Self(1);
+ pub const NONE: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineExecutableStatisticFormatKHR.html>"]
+pub struct PipelineExecutableStatisticFormatKHR(pub(crate) i32);
+impl PipelineExecutableStatisticFormatKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PipelineExecutableStatisticFormatKHR {
+ pub const BOOL32: Self = Self(0);
+ pub const INT64: Self = Self(1);
+ pub const UINT64: Self = Self(2);
+ pub const FLOAT64: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkLineRasterizationModeEXT.html>"]
+pub struct LineRasterizationModeEXT(pub(crate) i32);
+impl LineRasterizationModeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl LineRasterizationModeEXT {
+ pub const DEFAULT: Self = Self(0);
+ pub const RECTANGULAR: Self = Self(1);
+ pub const BRESENHAM: Self = Self(2);
+ pub const RECTANGULAR_SMOOTH: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFragmentShadingRateCombinerOpKHR.html>"]
+pub struct FragmentShadingRateCombinerOpKHR(pub(crate) i32);
+impl FragmentShadingRateCombinerOpKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl FragmentShadingRateCombinerOpKHR {
+ pub const KEEP: Self = Self(0);
+ pub const REPLACE: Self = Self(1);
+ pub const MIN: Self = Self(2);
+ pub const MAX: Self = Self(3);
+ pub const MUL: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFragmentShadingRateNV.html>"]
+pub struct FragmentShadingRateNV(pub(crate) i32);
+impl FragmentShadingRateNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl FragmentShadingRateNV {
+ pub const TYPE_1_INVOCATION_PER_PIXEL: Self = Self(0);
+ pub const TYPE_1_INVOCATION_PER_1X2_PIXELS: Self = Self(1);
+ pub const TYPE_1_INVOCATION_PER_2X1_PIXELS: Self = Self(4);
+ pub const TYPE_1_INVOCATION_PER_2X2_PIXELS: Self = Self(5);
+ pub const TYPE_1_INVOCATION_PER_2X4_PIXELS: Self = Self(6);
+ pub const TYPE_1_INVOCATION_PER_4X2_PIXELS: Self = Self(9);
+ pub const TYPE_1_INVOCATION_PER_4X4_PIXELS: Self = Self(10);
+ pub const TYPE_2_INVOCATIONS_PER_PIXEL: Self = Self(11);
+ pub const TYPE_4_INVOCATIONS_PER_PIXEL: Self = Self(12);
+ pub const TYPE_8_INVOCATIONS_PER_PIXEL: Self = Self(13);
+ pub const TYPE_16_INVOCATIONS_PER_PIXEL: Self = Self(14);
+ pub const NO_INVOCATIONS: Self = Self(15);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkFragmentShadingRateTypeNV.html>"]
+pub struct FragmentShadingRateTypeNV(pub(crate) i32);
+impl FragmentShadingRateTypeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl FragmentShadingRateTypeNV {
+ pub const FRAGMENT_SIZE: Self = Self(0);
+ pub const ENUMS: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkSubpassMergeStatusEXT.html>"]
+pub struct SubpassMergeStatusEXT(pub(crate) i32);
+impl SubpassMergeStatusEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl SubpassMergeStatusEXT {
+ pub const MERGED: Self = Self(0);
+ pub const DISALLOWED: Self = Self(1);
+ pub const NOT_MERGED_SIDE_EFFECTS: Self = Self(2);
+ pub const NOT_MERGED_SAMPLES_MISMATCH: Self = Self(3);
+ pub const NOT_MERGED_VIEWS_MISMATCH: Self = Self(4);
+ pub const NOT_MERGED_ALIASING: Self = Self(5);
+ pub const NOT_MERGED_DEPENDENCIES: Self = Self(6);
+ pub const NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT: Self = Self(7);
+ pub const NOT_MERGED_TOO_MANY_ATTACHMENTS: Self = Self(8);
+ pub const NOT_MERGED_INSUFFICIENT_STORAGE: Self = Self(9);
+ pub const NOT_MERGED_DEPTH_STENCIL_COUNT: Self = Self(10);
+ pub const NOT_MERGED_RESOLVE_ATTACHMENT_REUSE: Self = Self(11);
+ pub const NOT_MERGED_SINGLE_SUBPASS: Self = Self(12);
+ pub const NOT_MERGED_UNSPECIFIED: Self = Self(13);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkProvokingVertexModeEXT.html>"]
+pub struct ProvokingVertexModeEXT(pub(crate) i32);
+impl ProvokingVertexModeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl ProvokingVertexModeEXT {
+ pub const FIRST_VERTEX: Self = Self(0);
+ pub const LAST_VERTEX: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureMotionInstanceTypeNV.html>"]
+pub struct AccelerationStructureMotionInstanceTypeNV(pub(crate) i32);
+impl AccelerationStructureMotionInstanceTypeNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl AccelerationStructureMotionInstanceTypeNV {
+ pub const STATIC: Self = Self(0);
+ pub const MATRIX_MOTION: Self = Self(1);
+ pub const SRT_MOTION: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceAddressBindingTypeEXT.html>"]
+pub struct DeviceAddressBindingTypeEXT(pub(crate) i32);
+impl DeviceAddressBindingTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DeviceAddressBindingTypeEXT {
+ pub const BIND: Self = Self(0);
+ pub const UNBIND: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkQueryResultStatusKHR.html>"]
+pub struct QueryResultStatusKHR(pub(crate) i32);
+impl QueryResultStatusKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl QueryResultStatusKHR {
+ pub const ERROR: Self = Self(-1);
+ pub const NOT_READY: Self = Self(0);
+ pub const COMPLETE: Self = Self(1);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeTuningModeKHR.html>"]
+pub struct VideoEncodeTuningModeKHR(pub(crate) i32);
+impl VideoEncodeTuningModeKHR {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl VideoEncodeTuningModeKHR {
+ pub const DEFAULT: Self = Self(0);
+ pub const HIGH_QUALITY: Self = Self(1);
+ pub const LOW_LATENCY: Self = Self(2);
+ pub const ULTRA_LOW_LATENCY: Self = Self(3);
+ pub const LOSSLESS: Self = Self(4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH264RateControlStructureEXT.html>"]
+pub struct VideoEncodeH264RateControlStructureEXT(pub(crate) i32);
+impl VideoEncodeH264RateControlStructureEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl VideoEncodeH264RateControlStructureEXT {
+ pub const UNKNOWN: Self = Self(0);
+ pub const FLAT: Self = Self(1);
+ pub const DYADIC: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkVideoEncodeH265RateControlStructureEXT.html>"]
+pub struct VideoEncodeH265RateControlStructureEXT(pub(crate) i32);
+impl VideoEncodeH265RateControlStructureEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl VideoEncodeH265RateControlStructureEXT {
+ pub const UNKNOWN: Self = Self(0);
+ pub const FLAT: Self = Self(1);
+ pub const DYADIC: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRobustnessBufferBehaviorEXT.html>"]
+pub struct PipelineRobustnessBufferBehaviorEXT(pub(crate) i32);
+impl PipelineRobustnessBufferBehaviorEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PipelineRobustnessBufferBehaviorEXT {
+ pub const DEVICE_DEFAULT: Self = Self(0);
+ pub const DISABLED: Self = Self(1);
+ pub const ROBUST_BUFFER_ACCESS: Self = Self(2);
+ pub const ROBUST_BUFFER_ACCESS_2: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkPipelineRobustnessImageBehaviorEXT.html>"]
+pub struct PipelineRobustnessImageBehaviorEXT(pub(crate) i32);
+impl PipelineRobustnessImageBehaviorEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl PipelineRobustnessImageBehaviorEXT {
+ pub const DEVICE_DEFAULT: Self = Self(0);
+ pub const DISABLED: Self = Self(1);
+ pub const ROBUST_IMAGE_ACCESS: Self = Self(2);
+ pub const ROBUST_IMAGE_ACCESS_2: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowPerformanceLevelNV.html>"]
+pub struct OpticalFlowPerformanceLevelNV(pub(crate) i32);
+impl OpticalFlowPerformanceLevelNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl OpticalFlowPerformanceLevelNV {
+ pub const UNKNOWN: Self = Self(0);
+ pub const SLOW: Self = Self(1);
+ pub const MEDIUM: Self = Self(2);
+ pub const FAST: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpticalFlowSessionBindingPointNV.html>"]
+pub struct OpticalFlowSessionBindingPointNV(pub(crate) i32);
+impl OpticalFlowSessionBindingPointNV {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl OpticalFlowSessionBindingPointNV {
+ pub const UNKNOWN: Self = Self(0);
+ pub const INPUT: Self = Self(1);
+ pub const REFERENCE: Self = Self(2);
+ pub const HINT: Self = Self(3);
+ pub const FLOW_VECTOR: Self = Self(4);
+ pub const BACKWARD_FLOW_VECTOR: Self = Self(5);
+ pub const COST: Self = Self(6);
+ pub const BACKWARD_COST: Self = Self(7);
+ pub const GLOBAL_FLOW: Self = Self(8);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkMicromapTypeEXT.html>"]
+pub struct MicromapTypeEXT(pub(crate) i32);
+impl MicromapTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl MicromapTypeEXT {
+ pub const OPACITY_MICROMAP: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkCopyMicromapModeEXT.html>"]
+pub struct CopyMicromapModeEXT(pub(crate) i32);
+impl CopyMicromapModeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl CopyMicromapModeEXT {
+ pub const CLONE: Self = Self(0);
+ pub const SERIALIZE: Self = Self(1);
+ pub const DESERIALIZE: Self = Self(2);
+ pub const COMPACT: Self = Self(3);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkBuildMicromapModeEXT.html>"]
+pub struct BuildMicromapModeEXT(pub(crate) i32);
+impl BuildMicromapModeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl BuildMicromapModeEXT {
+ pub const BUILD: Self = Self(0);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpacityMicromapFormatEXT.html>"]
+pub struct OpacityMicromapFormatEXT(pub(crate) i32);
+impl OpacityMicromapFormatEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl OpacityMicromapFormatEXT {
+ pub const TYPE_2_STATE: Self = Self(1);
+ pub const TYPE_4_STATE: Self = Self(2);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkOpacityMicromapSpecialIndexEXT.html>"]
+pub struct OpacityMicromapSpecialIndexEXT(pub(crate) i32);
+impl OpacityMicromapSpecialIndexEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl OpacityMicromapSpecialIndexEXT {
+ pub const FULLY_TRANSPARENT: Self = Self(-1);
+ pub const FULLY_OPAQUE: Self = Self(-2);
+ pub const FULLY_UNKNOWN_TRANSPARENT: Self = Self(-3);
+ pub const FULLY_UNKNOWN_OPAQUE: Self = Self(-4);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultAddressTypeEXT.html>"]
+pub struct DeviceFaultAddressTypeEXT(pub(crate) i32);
+impl DeviceFaultAddressTypeEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DeviceFaultAddressTypeEXT {
+ #[doc = "Currently unused"]
+ pub const NONE: Self = Self(0);
+ pub const READ_INVALID: Self = Self(1);
+ pub const WRITE_INVALID: Self = Self(2);
+ pub const EXECUTE_INVALID: Self = Self(3);
+ pub const INSTRUCTION_POINTER_UNKNOWN: Self = Self(4);
+ pub const INSTRUCTION_POINTER_INVALID: Self = Self(5);
+ pub const INSTRUCTION_POINTER_FAULT: Self = Self(6);
+}
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
+#[repr(transparent)]
+#[doc = "<https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkDeviceFaultVendorBinaryHeaderVersionEXT.html>"]
+pub struct DeviceFaultVendorBinaryHeaderVersionEXT(pub(crate) i32);
+impl DeviceFaultVendorBinaryHeaderVersionEXT {
+ #[inline]
+ pub const fn from_raw(x: i32) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> i32 {
+ self.0
+ }
+}
+impl DeviceFaultVendorBinaryHeaderVersionEXT {
+ pub const ONE: Self = Self(1);
+}
+impl fmt::Debug for ObjectType {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::UNKNOWN => Some("UNKNOWN"),
+ Self::INSTANCE => Some("INSTANCE"),
+ Self::PHYSICAL_DEVICE => Some("PHYSICAL_DEVICE"),
+ Self::DEVICE => Some("DEVICE"),
+ Self::QUEUE => Some("QUEUE"),
+ Self::SEMAPHORE => Some("SEMAPHORE"),
+ Self::COMMAND_BUFFER => Some("COMMAND_BUFFER"),
+ Self::FENCE => Some("FENCE"),
+ Self::DEVICE_MEMORY => Some("DEVICE_MEMORY"),
+ Self::BUFFER => Some("BUFFER"),
+ Self::IMAGE => Some("IMAGE"),
+ Self::EVENT => Some("EVENT"),
+ Self::QUERY_POOL => Some("QUERY_POOL"),
+ Self::BUFFER_VIEW => Some("BUFFER_VIEW"),
+ Self::IMAGE_VIEW => Some("IMAGE_VIEW"),
+ Self::SHADER_MODULE => Some("SHADER_MODULE"),
+ Self::PIPELINE_CACHE => Some("PIPELINE_CACHE"),
+ Self::PIPELINE_LAYOUT => Some("PIPELINE_LAYOUT"),
+ Self::RENDER_PASS => Some("RENDER_PASS"),
+ Self::PIPELINE => Some("PIPELINE"),
+ Self::DESCRIPTOR_SET_LAYOUT => Some("DESCRIPTOR_SET_LAYOUT"),
+ Self::SAMPLER => Some("SAMPLER"),
+ Self::DESCRIPTOR_POOL => Some("DESCRIPTOR_POOL"),
+ Self::DESCRIPTOR_SET => Some("DESCRIPTOR_SET"),
+ Self::FRAMEBUFFER => Some("FRAMEBUFFER"),
+ Self::COMMAND_POOL => Some("COMMAND_POOL"),
+ Self::SURFACE_KHR => Some("SURFACE_KHR"),
+ Self::SWAPCHAIN_KHR => Some("SWAPCHAIN_KHR"),
+ Self::DISPLAY_KHR => Some("DISPLAY_KHR"),
+ Self::DISPLAY_MODE_KHR => Some("DISPLAY_MODE_KHR"),
+ Self::DEBUG_REPORT_CALLBACK_EXT => Some("DEBUG_REPORT_CALLBACK_EXT"),
+ Self::VIDEO_SESSION_KHR => Some("VIDEO_SESSION_KHR"),
+ Self::VIDEO_SESSION_PARAMETERS_KHR => Some("VIDEO_SESSION_PARAMETERS_KHR"),
+ Self::CU_MODULE_NVX => Some("CU_MODULE_NVX"),
+ Self::CU_FUNCTION_NVX => Some("CU_FUNCTION_NVX"),
+ Self::DEBUG_UTILS_MESSENGER_EXT => Some("DEBUG_UTILS_MESSENGER_EXT"),
+ Self::ACCELERATION_STRUCTURE_KHR => Some("ACCELERATION_STRUCTURE_KHR"),
+ Self::VALIDATION_CACHE_EXT => Some("VALIDATION_CACHE_EXT"),
+ Self::ACCELERATION_STRUCTURE_NV => Some("ACCELERATION_STRUCTURE_NV"),
+ Self::PERFORMANCE_CONFIGURATION_INTEL => Some("PERFORMANCE_CONFIGURATION_INTEL"),
+ Self::DEFERRED_OPERATION_KHR => Some("DEFERRED_OPERATION_KHR"),
+ Self::INDIRECT_COMMANDS_LAYOUT_NV => Some("INDIRECT_COMMANDS_LAYOUT_NV"),
+ Self::BUFFER_COLLECTION_FUCHSIA => Some("BUFFER_COLLECTION_FUCHSIA"),
+ Self::MICROMAP_EXT => Some("MICROMAP_EXT"),
+ Self::OPTICAL_FLOW_SESSION_NV => Some("OPTICAL_FLOW_SESSION_NV"),
+ Self::SAMPLER_YCBCR_CONVERSION => Some("SAMPLER_YCBCR_CONVERSION"),
+ Self::DESCRIPTOR_UPDATE_TEMPLATE => Some("DESCRIPTOR_UPDATE_TEMPLATE"),
+ Self::PRIVATE_DATA_SLOT => Some("PRIVATE_DATA_SLOT"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
+impl fmt::Debug for Result {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let name = match *self {
+ Self::SUCCESS => Some("SUCCESS"),
+ Self::NOT_READY => Some("NOT_READY"),
+ Self::TIMEOUT => Some("TIMEOUT"),
+ Self::EVENT_SET => Some("EVENT_SET"),
+ Self::EVENT_RESET => Some("EVENT_RESET"),
+ Self::INCOMPLETE => Some("INCOMPLETE"),
+ Self::ERROR_OUT_OF_HOST_MEMORY => Some("ERROR_OUT_OF_HOST_MEMORY"),
+ Self::ERROR_OUT_OF_DEVICE_MEMORY => Some("ERROR_OUT_OF_DEVICE_MEMORY"),
+ Self::ERROR_INITIALIZATION_FAILED => Some("ERROR_INITIALIZATION_FAILED"),
+ Self::ERROR_DEVICE_LOST => Some("ERROR_DEVICE_LOST"),
+ Self::ERROR_MEMORY_MAP_FAILED => Some("ERROR_MEMORY_MAP_FAILED"),
+ Self::ERROR_LAYER_NOT_PRESENT => Some("ERROR_LAYER_NOT_PRESENT"),
+ Self::ERROR_EXTENSION_NOT_PRESENT => Some("ERROR_EXTENSION_NOT_PRESENT"),
+ Self::ERROR_FEATURE_NOT_PRESENT => Some("ERROR_FEATURE_NOT_PRESENT"),
+ Self::ERROR_INCOMPATIBLE_DRIVER => Some("ERROR_INCOMPATIBLE_DRIVER"),
+ Self::ERROR_TOO_MANY_OBJECTS => Some("ERROR_TOO_MANY_OBJECTS"),
+ Self::ERROR_FORMAT_NOT_SUPPORTED => Some("ERROR_FORMAT_NOT_SUPPORTED"),
+ Self::ERROR_FRAGMENTED_POOL => Some("ERROR_FRAGMENTED_POOL"),
+ Self::ERROR_UNKNOWN => Some("ERROR_UNKNOWN"),
+ Self::ERROR_SURFACE_LOST_KHR => Some("ERROR_SURFACE_LOST_KHR"),
+ Self::ERROR_NATIVE_WINDOW_IN_USE_KHR => Some("ERROR_NATIVE_WINDOW_IN_USE_KHR"),
+ Self::SUBOPTIMAL_KHR => Some("SUBOPTIMAL_KHR"),
+ Self::ERROR_OUT_OF_DATE_KHR => Some("ERROR_OUT_OF_DATE_KHR"),
+ Self::ERROR_INCOMPATIBLE_DISPLAY_KHR => Some("ERROR_INCOMPATIBLE_DISPLAY_KHR"),
+ Self::ERROR_VALIDATION_FAILED_EXT => Some("ERROR_VALIDATION_FAILED_EXT"),
+ Self::ERROR_INVALID_SHADER_NV => Some("ERROR_INVALID_SHADER_NV"),
+ Self::ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR => {
+ Some("ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR")
+ }
+ Self::ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR => {
+ Some("ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR")
+ }
+ Self::ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR => {
+ Some("ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR")
+ }
+ Self::ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR => {
+ Some("ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR")
+ }
+ Self::ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR => {
+ Some("ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR")
+ }
+ Self::ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR => {
+ Some("ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR")
+ }
+ Self::ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT => {
+ Some("ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT")
+ }
+ Self::ERROR_NOT_PERMITTED_KHR => Some("ERROR_NOT_PERMITTED_KHR"),
+ Self::ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT => {
+ Some("ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT")
+ }
+ Self::THREAD_IDLE_KHR => Some("THREAD_IDLE_KHR"),
+ Self::THREAD_DONE_KHR => Some("THREAD_DONE_KHR"),
+ Self::OPERATION_DEFERRED_KHR => Some("OPERATION_DEFERRED_KHR"),
+ Self::OPERATION_NOT_DEFERRED_KHR => Some("OPERATION_NOT_DEFERRED_KHR"),
+ Self::ERROR_COMPRESSION_EXHAUSTED_EXT => Some("ERROR_COMPRESSION_EXHAUSTED_EXT"),
+ Self::ERROR_OUT_OF_POOL_MEMORY => Some("ERROR_OUT_OF_POOL_MEMORY"),
+ Self::ERROR_INVALID_EXTERNAL_HANDLE => Some("ERROR_INVALID_EXTERNAL_HANDLE"),
+ Self::ERROR_FRAGMENTATION => Some("ERROR_FRAGMENTATION"),
+ Self::ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS => {
+ Some("ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS")
+ }
+ Self::PIPELINE_COMPILE_REQUIRED => Some("PIPELINE_COMPILE_REQUIRED"),
+ _ => None,
+ };
+ if let Some(x) = name {
+ f.write_str(x)
+ } else {
+ self.0.fmt(f)
+ }
+ }
+}
diff --git a/third_party/rust/ash/src/vk/extensions.rs b/third_party/rust/ash/src/vk/extensions.rs
new file mode 100644
index 0000000000..cf4936d995
--- /dev/null
+++ b/third_party/rust/ash/src/vk/extensions.rs
@@ -0,0 +1,25322 @@
+use crate::vk::aliases::*;
+use crate::vk::bitflags::*;
+use crate::vk::definitions::*;
+use crate::vk::enums::*;
+use crate::vk::platform_types::*;
+use std::os::raw::*;
+impl KhrSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 25u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroySurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ surface: SurfaceKHR,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfaceSupportKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ surface: SurfaceKHR,
+ p_supported: *mut Bool32,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ surface: SurfaceKHR,
+ p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfaceFormatsKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ surface: SurfaceKHR,
+ p_surface_format_count: *mut u32,
+ p_surface_formats: *mut SurfaceFormatKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfacePresentModesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ surface: SurfaceKHR,
+ p_present_mode_count: *mut u32,
+ p_present_modes: *mut PresentModeKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrSurfaceFn {
+ pub destroy_surface_khr: PFN_vkDestroySurfaceKHR,
+ pub get_physical_device_surface_support_khr: PFN_vkGetPhysicalDeviceSurfaceSupportKHR,
+ pub get_physical_device_surface_capabilities_khr: PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR,
+ pub get_physical_device_surface_formats_khr: PFN_vkGetPhysicalDeviceSurfaceFormatsKHR,
+ pub get_physical_device_surface_present_modes_khr:
+ PFN_vkGetPhysicalDeviceSurfacePresentModesKHR,
+}
+unsafe impl Send for KhrSurfaceFn {}
+unsafe impl Sync for KhrSurfaceFn {}
+impl KhrSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ destroy_surface_khr: unsafe {
+ unsafe extern "system" fn destroy_surface_khr(
+ _instance: Instance,
+ _surface: SurfaceKHR,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_surface_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroySurfaceKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_surface_support_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_support_khr(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _surface: SurfaceKHR,
+ _p_supported: *mut Bool32,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_support_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfaceSupportKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_support_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_surface_capabilities_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_capabilities_khr(
+ _physical_device: PhysicalDevice,
+ _surface: SurfaceKHR,
+ _p_surface_capabilities: *mut SurfaceCapabilitiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_capabilities_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfaceCapabilitiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_capabilities_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_surface_formats_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_formats_khr(
+ _physical_device: PhysicalDevice,
+ _surface: SurfaceKHR,
+ _p_surface_format_count: *mut u32,
+ _p_surface_formats: *mut SurfaceFormatKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_formats_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfaceFormatsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_formats_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_surface_present_modes_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_present_modes_khr(
+ _physical_device: PhysicalDevice,
+ _surface: SurfaceKHR,
+ _p_present_mode_count: *mut u32,
+ _p_present_modes: *mut PresentModeKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_present_modes_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfacePresentModesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_present_modes_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_surface'"]
+impl ObjectType {
+ pub const SURFACE_KHR: Self = Self(1_000_000_000);
+}
+#[doc = "Generated from 'VK_KHR_surface'"]
+impl Result {
+ pub const ERROR_SURFACE_LOST_KHR: Self = Self(-1_000_000_000);
+ pub const ERROR_NATIVE_WINDOW_IN_USE_KHR: Self = Self(-1_000_000_001);
+}
+impl KhrSwapchainFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_swapchain\0") }
+ }
+ pub const SPEC_VERSION: u32 = 70u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateSwapchainKHR = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const SwapchainCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_swapchain: *mut SwapchainKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroySwapchainKHR = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSwapchainImagesKHR = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ p_swapchain_image_count: *mut u32,
+ p_swapchain_images: *mut Image,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireNextImageKHR = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ timeout: u64,
+ semaphore: Semaphore,
+ fence: Fence,
+ p_image_index: *mut u32,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueuePresentKHR =
+ unsafe extern "system" fn(queue: Queue, p_present_info: *const PresentInfoKHR) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceGroupPresentCapabilitiesKHR = unsafe extern "system" fn(
+ device: Device,
+ p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceGroupSurfacePresentModesKHR = unsafe extern "system" fn(
+ device: Device,
+ surface: SurfaceKHR,
+ p_modes: *mut DeviceGroupPresentModeFlagsKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDevicePresentRectanglesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ surface: SurfaceKHR,
+ p_rect_count: *mut u32,
+ p_rects: *mut Rect2D,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireNextImage2KHR = unsafe extern "system" fn(
+ device: Device,
+ p_acquire_info: *const AcquireNextImageInfoKHR,
+ p_image_index: *mut u32,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrSwapchainFn {
+ pub create_swapchain_khr: PFN_vkCreateSwapchainKHR,
+ pub destroy_swapchain_khr: PFN_vkDestroySwapchainKHR,
+ pub get_swapchain_images_khr: PFN_vkGetSwapchainImagesKHR,
+ pub acquire_next_image_khr: PFN_vkAcquireNextImageKHR,
+ pub queue_present_khr: PFN_vkQueuePresentKHR,
+ pub get_device_group_present_capabilities_khr: PFN_vkGetDeviceGroupPresentCapabilitiesKHR,
+ pub get_device_group_surface_present_modes_khr: PFN_vkGetDeviceGroupSurfacePresentModesKHR,
+ pub get_physical_device_present_rectangles_khr: PFN_vkGetPhysicalDevicePresentRectanglesKHR,
+ pub acquire_next_image2_khr: PFN_vkAcquireNextImage2KHR,
+}
+unsafe impl Send for KhrSwapchainFn {}
+unsafe impl Sync for KhrSwapchainFn {}
+impl KhrSwapchainFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_swapchain_khr: unsafe {
+ unsafe extern "system" fn create_swapchain_khr(
+ _device: Device,
+ _p_create_info: *const SwapchainCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_swapchain: *mut SwapchainKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_swapchain_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateSwapchainKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_swapchain_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_swapchain_khr: unsafe {
+ unsafe extern "system" fn destroy_swapchain_khr(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_swapchain_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroySwapchainKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_swapchain_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_swapchain_images_khr: unsafe {
+ unsafe extern "system" fn get_swapchain_images_khr(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _p_swapchain_image_count: *mut u32,
+ _p_swapchain_images: *mut Image,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_swapchain_images_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetSwapchainImagesKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_swapchain_images_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_next_image_khr: unsafe {
+ unsafe extern "system" fn acquire_next_image_khr(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _timeout: u64,
+ _semaphore: Semaphore,
+ _fence: Fence,
+ _p_image_index: *mut u32,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_next_image_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireNextImageKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_next_image_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_present_khr: unsafe {
+ unsafe extern "system" fn queue_present_khr(
+ _queue: Queue,
+ _p_present_info: *const PresentInfoKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(queue_present_khr)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueuePresentKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ queue_present_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_group_present_capabilities_khr: unsafe {
+ unsafe extern "system" fn get_device_group_present_capabilities_khr(
+ _device: Device,
+ _p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_present_capabilities_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupPresentCapabilitiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_present_capabilities_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_group_surface_present_modes_khr: unsafe {
+ unsafe extern "system" fn get_device_group_surface_present_modes_khr(
+ _device: Device,
+ _surface: SurfaceKHR,
+ _p_modes: *mut DeviceGroupPresentModeFlagsKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_surface_present_modes_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupSurfacePresentModesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_surface_present_modes_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_present_rectangles_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_present_rectangles_khr(
+ _physical_device: PhysicalDevice,
+ _surface: SurfaceKHR,
+ _p_rect_count: *mut u32,
+ _p_rects: *mut Rect2D,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_present_rectangles_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDevicePresentRectanglesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_present_rectangles_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_next_image2_khr: unsafe {
+ unsafe extern "system" fn acquire_next_image2_khr(
+ _device: Device,
+ _p_acquire_info: *const AcquireNextImageInfoKHR,
+ _p_image_index: *mut u32,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_next_image2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireNextImage2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_next_image2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_swapchain'"]
+impl ImageLayout {
+ pub const PRESENT_SRC_KHR: Self = Self(1_000_001_002);
+}
+#[doc = "Generated from 'VK_KHR_swapchain'"]
+impl ObjectType {
+ pub const SWAPCHAIN_KHR: Self = Self(1_000_001_000);
+}
+#[doc = "Generated from 'VK_KHR_swapchain'"]
+impl Result {
+ pub const SUBOPTIMAL_KHR: Self = Self(1_000_001_003);
+ pub const ERROR_OUT_OF_DATE_KHR: Self = Self(-1_000_001_004);
+}
+#[doc = "Generated from 'VK_KHR_swapchain'"]
+impl StructureType {
+ pub const SWAPCHAIN_CREATE_INFO_KHR: Self = Self(1_000_001_000);
+ pub const PRESENT_INFO_KHR: Self = Self(1_000_001_001);
+ pub const DEVICE_GROUP_PRESENT_CAPABILITIES_KHR: Self = Self(1_000_060_007);
+ pub const IMAGE_SWAPCHAIN_CREATE_INFO_KHR: Self = Self(1_000_060_008);
+ pub const BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: Self = Self(1_000_060_009);
+ pub const ACQUIRE_NEXT_IMAGE_INFO_KHR: Self = Self(1_000_060_010);
+ pub const DEVICE_GROUP_PRESENT_INFO_KHR: Self = Self(1_000_060_011);
+ pub const DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR: Self = Self(1_000_060_012);
+}
+#[doc = "Generated from 'VK_KHR_swapchain'"]
+impl SwapchainCreateFlagsKHR {
+ #[doc = "Allow images with VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT"]
+ pub const SPLIT_INSTANCE_BIND_REGIONS: Self = Self(0b1);
+ #[doc = "Swapchain is protected"]
+ pub const PROTECTED: Self = Self(0b10);
+}
+impl KhrDisplayFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_display\0") }
+ }
+ pub const SPEC_VERSION: u32 = 23u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceDisplayPropertiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_property_count: *mut u32,
+ p_properties: *mut DisplayPropertiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_property_count: *mut u32,
+ p_properties: *mut DisplayPlanePropertiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDisplayPlaneSupportedDisplaysKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ plane_index: u32,
+ p_display_count: *mut u32,
+ p_displays: *mut DisplayKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDisplayModePropertiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ display: DisplayKHR,
+ p_property_count: *mut u32,
+ p_properties: *mut DisplayModePropertiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDisplayModeKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ display: DisplayKHR,
+ p_create_info: *const DisplayModeCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_mode: *mut DisplayModeKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDisplayPlaneCapabilitiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ mode: DisplayModeKHR,
+ plane_index: u32,
+ p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDisplayPlaneSurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const DisplaySurfaceCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrDisplayFn {
+ pub get_physical_device_display_properties_khr: PFN_vkGetPhysicalDeviceDisplayPropertiesKHR,
+ pub get_physical_device_display_plane_properties_khr:
+ PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR,
+ pub get_display_plane_supported_displays_khr: PFN_vkGetDisplayPlaneSupportedDisplaysKHR,
+ pub get_display_mode_properties_khr: PFN_vkGetDisplayModePropertiesKHR,
+ pub create_display_mode_khr: PFN_vkCreateDisplayModeKHR,
+ pub get_display_plane_capabilities_khr: PFN_vkGetDisplayPlaneCapabilitiesKHR,
+ pub create_display_plane_surface_khr: PFN_vkCreateDisplayPlaneSurfaceKHR,
+}
+unsafe impl Send for KhrDisplayFn {}
+unsafe impl Sync for KhrDisplayFn {}
+impl KhrDisplayFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_display_properties_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_display_properties_khr(
+ _physical_device: PhysicalDevice,
+ _p_property_count: *mut u32,
+ _p_properties: *mut DisplayPropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_display_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceDisplayPropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_display_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_display_plane_properties_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_display_plane_properties_khr(
+ _physical_device: PhysicalDevice,
+ _p_property_count: *mut u32,
+ _p_properties: *mut DisplayPlanePropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_display_plane_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceDisplayPlanePropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_display_plane_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_display_plane_supported_displays_khr: unsafe {
+ unsafe extern "system" fn get_display_plane_supported_displays_khr(
+ _physical_device: PhysicalDevice,
+ _plane_index: u32,
+ _p_display_count: *mut u32,
+ _p_displays: *mut DisplayKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_display_plane_supported_displays_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDisplayPlaneSupportedDisplaysKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_display_plane_supported_displays_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_display_mode_properties_khr: unsafe {
+ unsafe extern "system" fn get_display_mode_properties_khr(
+ _physical_device: PhysicalDevice,
+ _display: DisplayKHR,
+ _p_property_count: *mut u32,
+ _p_properties: *mut DisplayModePropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_display_mode_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDisplayModePropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_display_mode_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_display_mode_khr: unsafe {
+ unsafe extern "system" fn create_display_mode_khr(
+ _physical_device: PhysicalDevice,
+ _display: DisplayKHR,
+ _p_create_info: *const DisplayModeCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_mode: *mut DisplayModeKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_display_mode_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateDisplayModeKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_display_mode_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_display_plane_capabilities_khr: unsafe {
+ unsafe extern "system" fn get_display_plane_capabilities_khr(
+ _physical_device: PhysicalDevice,
+ _mode: DisplayModeKHR,
+ _plane_index: u32,
+ _p_capabilities: *mut DisplayPlaneCapabilitiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_display_plane_capabilities_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDisplayPlaneCapabilitiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_display_plane_capabilities_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_display_plane_surface_khr: unsafe {
+ unsafe extern "system" fn create_display_plane_surface_khr(
+ _instance: Instance,
+ _p_create_info: *const DisplaySurfaceCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_display_plane_surface_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDisplayPlaneSurfaceKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_display_plane_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_display'"]
+impl ObjectType {
+ pub const DISPLAY_KHR: Self = Self(1_000_002_000);
+ pub const DISPLAY_MODE_KHR: Self = Self(1_000_002_001);
+}
+#[doc = "Generated from 'VK_KHR_display'"]
+impl StructureType {
+ pub const DISPLAY_MODE_CREATE_INFO_KHR: Self = Self(1_000_002_000);
+ pub const DISPLAY_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_002_001);
+}
+impl KhrDisplaySwapchainFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_display_swapchain\0") }
+ }
+ pub const SPEC_VERSION: u32 = 10u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateSharedSwapchainsKHR = unsafe extern "system" fn(
+ device: Device,
+ swapchain_count: u32,
+ p_create_infos: *const SwapchainCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_swapchains: *mut SwapchainKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrDisplaySwapchainFn {
+ pub create_shared_swapchains_khr: PFN_vkCreateSharedSwapchainsKHR,
+}
+unsafe impl Send for KhrDisplaySwapchainFn {}
+unsafe impl Sync for KhrDisplaySwapchainFn {}
+impl KhrDisplaySwapchainFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_shared_swapchains_khr: unsafe {
+ unsafe extern "system" fn create_shared_swapchains_khr(
+ _device: Device,
+ _swapchain_count: u32,
+ _p_create_infos: *const SwapchainCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_swapchains: *mut SwapchainKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_shared_swapchains_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateSharedSwapchainsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_shared_swapchains_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_display_swapchain'"]
+impl Result {
+ pub const ERROR_INCOMPATIBLE_DISPLAY_KHR: Self = Self(-1_000_003_001);
+}
+#[doc = "Generated from 'VK_KHR_display_swapchain'"]
+impl StructureType {
+ pub const DISPLAY_PRESENT_INFO_KHR: Self = Self(1_000_003_000);
+}
+impl KhrXlibSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_xlib_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 6u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateXlibSurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const XlibSurfaceCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ dpy: *mut Display,
+ visual_id: VisualID,
+) -> Bool32;
+#[derive(Clone)]
+pub struct KhrXlibSurfaceFn {
+ pub create_xlib_surface_khr: PFN_vkCreateXlibSurfaceKHR,
+ pub get_physical_device_xlib_presentation_support_khr:
+ PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR,
+}
+unsafe impl Send for KhrXlibSurfaceFn {}
+unsafe impl Sync for KhrXlibSurfaceFn {}
+impl KhrXlibSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_xlib_surface_khr: unsafe {
+ unsafe extern "system" fn create_xlib_surface_khr(
+ _instance: Instance,
+ _p_create_info: *const XlibSurfaceCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_xlib_surface_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateXlibSurfaceKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_xlib_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_xlib_presentation_support_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_xlib_presentation_support_khr(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _dpy: *mut Display,
+ _visual_id: VisualID,
+ ) -> Bool32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_xlib_presentation_support_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceXlibPresentationSupportKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_xlib_presentation_support_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_xlib_surface'"]
+impl StructureType {
+ pub const XLIB_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_004_000);
+}
+impl KhrXcbSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_xcb_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 6u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateXcbSurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const XcbSurfaceCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ connection: *mut xcb_connection_t,
+ visual_id: xcb_visualid_t,
+) -> Bool32;
+#[derive(Clone)]
+pub struct KhrXcbSurfaceFn {
+ pub create_xcb_surface_khr: PFN_vkCreateXcbSurfaceKHR,
+ pub get_physical_device_xcb_presentation_support_khr:
+ PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR,
+}
+unsafe impl Send for KhrXcbSurfaceFn {}
+unsafe impl Sync for KhrXcbSurfaceFn {}
+impl KhrXcbSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_xcb_surface_khr: unsafe {
+ unsafe extern "system" fn create_xcb_surface_khr(
+ _instance: Instance,
+ _p_create_info: *const XcbSurfaceCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_xcb_surface_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateXcbSurfaceKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_xcb_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_xcb_presentation_support_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_xcb_presentation_support_khr(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _connection: *mut xcb_connection_t,
+ _visual_id: xcb_visualid_t,
+ ) -> Bool32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_xcb_presentation_support_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceXcbPresentationSupportKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_xcb_presentation_support_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_xcb_surface'"]
+impl StructureType {
+ pub const XCB_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_005_000);
+}
+impl KhrWaylandSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_wayland_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 6u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateWaylandSurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const WaylandSurfaceCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ display: *mut wl_display,
+)
+ -> Bool32;
+#[derive(Clone)]
+pub struct KhrWaylandSurfaceFn {
+ pub create_wayland_surface_khr: PFN_vkCreateWaylandSurfaceKHR,
+ pub get_physical_device_wayland_presentation_support_khr:
+ PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR,
+}
+unsafe impl Send for KhrWaylandSurfaceFn {}
+unsafe impl Sync for KhrWaylandSurfaceFn {}
+impl KhrWaylandSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_wayland_surface_khr: unsafe {
+ unsafe extern "system" fn create_wayland_surface_khr(
+ _instance: Instance,
+ _p_create_info: *const WaylandSurfaceCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_wayland_surface_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateWaylandSurfaceKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_wayland_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_wayland_presentation_support_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_wayland_presentation_support_khr(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _display: *mut wl_display,
+ ) -> Bool32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_wayland_presentation_support_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceWaylandPresentationSupportKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_wayland_presentation_support_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_wayland_surface'"]
+impl StructureType {
+ pub const WAYLAND_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_006_000);
+}
+impl KhrMirSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_mir_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[derive(Clone)]
+pub struct KhrMirSurfaceFn {}
+unsafe impl Send for KhrMirSurfaceFn {}
+unsafe impl Sync for KhrMirSurfaceFn {}
+impl KhrMirSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrAndroidSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_android_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 6u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateAndroidSurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const AndroidSurfaceCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrAndroidSurfaceFn {
+ pub create_android_surface_khr: PFN_vkCreateAndroidSurfaceKHR,
+}
+unsafe impl Send for KhrAndroidSurfaceFn {}
+unsafe impl Sync for KhrAndroidSurfaceFn {}
+impl KhrAndroidSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_android_surface_khr: unsafe {
+ unsafe extern "system" fn create_android_surface_khr(
+ _instance: Instance,
+ _p_create_info: *const AndroidSurfaceCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_android_surface_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateAndroidSurfaceKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_android_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_android_surface'"]
+impl StructureType {
+ pub const ANDROID_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_008_000);
+}
+impl KhrWin32SurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_win32_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 6u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateWin32SurfaceKHR = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const Win32SurfaceCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR =
+ unsafe extern "system" fn(physical_device: PhysicalDevice, queue_family_index: u32) -> Bool32;
+#[derive(Clone)]
+pub struct KhrWin32SurfaceFn {
+ pub create_win32_surface_khr: PFN_vkCreateWin32SurfaceKHR,
+ pub get_physical_device_win32_presentation_support_khr:
+ PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR,
+}
+unsafe impl Send for KhrWin32SurfaceFn {}
+unsafe impl Sync for KhrWin32SurfaceFn {}
+impl KhrWin32SurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_win32_surface_khr: unsafe {
+ unsafe extern "system" fn create_win32_surface_khr(
+ _instance: Instance,
+ _p_create_info: *const Win32SurfaceCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_win32_surface_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateWin32SurfaceKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_win32_surface_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_win32_presentation_support_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_win32_presentation_support_khr(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ ) -> Bool32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_win32_presentation_support_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceWin32PresentationSupportKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_win32_presentation_support_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_win32_surface'"]
+impl StructureType {
+ pub const WIN32_SURFACE_CREATE_INFO_KHR: Self = Self(1_000_009_000);
+}
+impl AndroidNativeBufferFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ANDROID_native_buffer\0") }
+ }
+ pub const SPEC_VERSION: u32 = 8u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSwapchainGrallocUsageANDROID = unsafe extern "system" fn(
+ device: Device,
+ format: Format,
+ image_usage: ImageUsageFlags,
+ gralloc_usage: *mut c_int,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireImageANDROID = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ native_fence_fd: c_int,
+ semaphore: Semaphore,
+ fence: Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueSignalReleaseImageANDROID = unsafe extern "system" fn(
+ queue: Queue,
+ wait_semaphore_count: u32,
+ p_wait_semaphores: *const Semaphore,
+ image: Image,
+ p_native_fence_fd: *mut c_int,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSwapchainGrallocUsage2ANDROID = unsafe extern "system" fn(
+ device: Device,
+ format: Format,
+ image_usage: ImageUsageFlags,
+ swapchain_image_usage: SwapchainImageUsageFlagsANDROID,
+ gralloc_consumer_usage: *mut u64,
+ gralloc_producer_usage: *mut u64,
+) -> Result;
+#[derive(Clone)]
+pub struct AndroidNativeBufferFn {
+ pub get_swapchain_gralloc_usage_android: PFN_vkGetSwapchainGrallocUsageANDROID,
+ pub acquire_image_android: PFN_vkAcquireImageANDROID,
+ pub queue_signal_release_image_android: PFN_vkQueueSignalReleaseImageANDROID,
+ pub get_swapchain_gralloc_usage2_android: PFN_vkGetSwapchainGrallocUsage2ANDROID,
+}
+unsafe impl Send for AndroidNativeBufferFn {}
+unsafe impl Sync for AndroidNativeBufferFn {}
+impl AndroidNativeBufferFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_swapchain_gralloc_usage_android: unsafe {
+ unsafe extern "system" fn get_swapchain_gralloc_usage_android(
+ _device: Device,
+ _format: Format,
+ _image_usage: ImageUsageFlags,
+ _gralloc_usage: *mut c_int,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_swapchain_gralloc_usage_android)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSwapchainGrallocUsageANDROID\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_swapchain_gralloc_usage_android
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_image_android: unsafe {
+ unsafe extern "system" fn acquire_image_android(
+ _device: Device,
+ _image: Image,
+ _native_fence_fd: c_int,
+ _semaphore: Semaphore,
+ _fence: Fence,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_image_android)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireImageANDROID\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_image_android
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_signal_release_image_android: unsafe {
+ unsafe extern "system" fn queue_signal_release_image_android(
+ _queue: Queue,
+ _wait_semaphore_count: u32,
+ _p_wait_semaphores: *const Semaphore,
+ _image: Image,
+ _p_native_fence_fd: *mut c_int,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(queue_signal_release_image_android)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkQueueSignalReleaseImageANDROID\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ queue_signal_release_image_android
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_swapchain_gralloc_usage2_android: unsafe {
+ unsafe extern "system" fn get_swapchain_gralloc_usage2_android(
+ _device: Device,
+ _format: Format,
+ _image_usage: ImageUsageFlags,
+ _swapchain_image_usage: SwapchainImageUsageFlagsANDROID,
+ _gralloc_consumer_usage: *mut u64,
+ _gralloc_producer_usage: *mut u64,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_swapchain_gralloc_usage2_android)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSwapchainGrallocUsage2ANDROID\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_swapchain_gralloc_usage2_android
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_ANDROID_native_buffer'"]
+impl StructureType {
+ pub const NATIVE_BUFFER_ANDROID: Self = Self(1_000_010_000);
+ pub const SWAPCHAIN_IMAGE_CREATE_INFO_ANDROID: Self = Self(1_000_010_001);
+ pub const PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID: Self = Self(1_000_010_002);
+}
+impl ExtDebugReportFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_debug_report\0") }
+ }
+ pub const SPEC_VERSION: u32 = 10u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDebugReportCallbackEXT = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const DebugReportCallbackCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_callback: *mut DebugReportCallbackEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDebugReportCallbackEXT = unsafe extern "system" fn(
+ instance: Instance,
+ callback: DebugReportCallbackEXT,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkDebugReportMessageEXT = unsafe extern "system" fn(
+ instance: Instance,
+ flags: DebugReportFlagsEXT,
+ object_type: DebugReportObjectTypeEXT,
+ object: u64,
+ location: usize,
+ message_code: i32,
+ p_layer_prefix: *const c_char,
+ p_message: *const c_char,
+);
+#[derive(Clone)]
+pub struct ExtDebugReportFn {
+ pub create_debug_report_callback_ext: PFN_vkCreateDebugReportCallbackEXT,
+ pub destroy_debug_report_callback_ext: PFN_vkDestroyDebugReportCallbackEXT,
+ pub debug_report_message_ext: PFN_vkDebugReportMessageEXT,
+}
+unsafe impl Send for ExtDebugReportFn {}
+unsafe impl Sync for ExtDebugReportFn {}
+impl ExtDebugReportFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_debug_report_callback_ext: unsafe {
+ unsafe extern "system" fn create_debug_report_callback_ext(
+ _instance: Instance,
+ _p_create_info: *const DebugReportCallbackCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_callback: *mut DebugReportCallbackEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_debug_report_callback_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDebugReportCallbackEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_debug_report_callback_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_debug_report_callback_ext: unsafe {
+ unsafe extern "system" fn destroy_debug_report_callback_ext(
+ _instance: Instance,
+ _callback: DebugReportCallbackEXT,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_debug_report_callback_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyDebugReportCallbackEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_debug_report_callback_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ debug_report_message_ext: unsafe {
+ unsafe extern "system" fn debug_report_message_ext(
+ _instance: Instance,
+ _flags: DebugReportFlagsEXT,
+ _object_type: DebugReportObjectTypeEXT,
+ _object: u64,
+ _location: usize,
+ _message_code: i32,
+ _p_layer_prefix: *const c_char,
+ _p_message: *const c_char,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(debug_report_message_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDebugReportMessageEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ debug_report_message_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_debug_report'"]
+impl DebugReportObjectTypeEXT {
+ pub const SAMPLER_YCBCR_CONVERSION: Self = Self(1_000_156_000);
+ pub const DESCRIPTOR_UPDATE_TEMPLATE: Self = Self(1_000_085_000);
+}
+#[doc = "Generated from 'VK_EXT_debug_report'"]
+impl ObjectType {
+ pub const DEBUG_REPORT_CALLBACK_EXT: Self = Self(1_000_011_000);
+}
+#[doc = "Generated from 'VK_EXT_debug_report'"]
+impl Result {
+ pub const ERROR_VALIDATION_FAILED_EXT: Self = Self(-1_000_011_001);
+}
+#[doc = "Generated from 'VK_EXT_debug_report'"]
+impl StructureType {
+ pub const DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT: Self = Self(1_000_011_000);
+}
+impl NvGlslShaderFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_glsl_shader\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvGlslShaderFn {}
+unsafe impl Send for NvGlslShaderFn {}
+unsafe impl Sync for NvGlslShaderFn {}
+impl NvGlslShaderFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_glsl_shader'"]
+impl Result {
+ pub const ERROR_INVALID_SHADER_NV: Self = Self(-1_000_012_000);
+}
+impl ExtDepthRangeUnrestrictedFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_range_unrestricted\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtDepthRangeUnrestrictedFn {}
+unsafe impl Send for ExtDepthRangeUnrestrictedFn {}
+unsafe impl Sync for ExtDepthRangeUnrestrictedFn {}
+impl ExtDepthRangeUnrestrictedFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrSamplerMirrorClampToEdgeFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_sampler_mirror_clamp_to_edge\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct KhrSamplerMirrorClampToEdgeFn {}
+unsafe impl Send for KhrSamplerMirrorClampToEdgeFn {}
+unsafe impl Sync for KhrSamplerMirrorClampToEdgeFn {}
+impl KhrSamplerMirrorClampToEdgeFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_sampler_mirror_clamp_to_edge'"]
+impl SamplerAddressMode {
+ #[doc = "Note that this defines what was previously a core enum, and so uses the 'value' attribute rather than 'offset', and does not have a suffix. This is a special case, and should not be repeated"]
+ pub const MIRROR_CLAMP_TO_EDGE: Self = Self(4);
+ #[deprecated = "Alias introduced for consistency with extension suffixing rules"]
+ pub const MIRROR_CLAMP_TO_EDGE_KHR: Self = Self::MIRROR_CLAMP_TO_EDGE;
+}
+impl ImgFilterCubicFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_IMG_filter_cubic\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ImgFilterCubicFn {}
+unsafe impl Send for ImgFilterCubicFn {}
+unsafe impl Sync for ImgFilterCubicFn {}
+impl ImgFilterCubicFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_IMG_filter_cubic'"]
+impl Filter {
+ pub const CUBIC_IMG: Self = Self::CUBIC_EXT;
+}
+#[doc = "Generated from 'VK_IMG_filter_cubic'"]
+impl FormatFeatureFlags {
+ #[doc = "Format can be filtered with VK_FILTER_CUBIC_IMG when being sampled"]
+ pub const SAMPLED_IMAGE_FILTER_CUBIC_IMG: Self = Self::SAMPLED_IMAGE_FILTER_CUBIC_EXT;
+}
+impl AmdExtension17Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_17\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension17Fn {}
+unsafe impl Send for AmdExtension17Fn {}
+unsafe impl Sync for AmdExtension17Fn {}
+impl AmdExtension17Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension18Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_18\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension18Fn {}
+unsafe impl Send for AmdExtension18Fn {}
+unsafe impl Sync for AmdExtension18Fn {}
+impl AmdExtension18Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdRasterizationOrderFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_rasterization_order\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdRasterizationOrderFn {}
+unsafe impl Send for AmdRasterizationOrderFn {}
+unsafe impl Sync for AmdRasterizationOrderFn {}
+impl AmdRasterizationOrderFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_rasterization_order'"]
+impl StructureType {
+ pub const PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD: Self = Self(1_000_018_000);
+}
+impl AmdExtension20Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_20\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension20Fn {}
+unsafe impl Send for AmdExtension20Fn {}
+unsafe impl Sync for AmdExtension20Fn {}
+impl AmdExtension20Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdShaderTrinaryMinmaxFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_trinary_minmax\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderTrinaryMinmaxFn {}
+unsafe impl Send for AmdShaderTrinaryMinmaxFn {}
+unsafe impl Sync for AmdShaderTrinaryMinmaxFn {}
+impl AmdShaderTrinaryMinmaxFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdShaderExplicitVertexParameterFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_AMD_shader_explicit_vertex_parameter\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderExplicitVertexParameterFn {}
+unsafe impl Send for AmdShaderExplicitVertexParameterFn {}
+unsafe impl Sync for AmdShaderExplicitVertexParameterFn {}
+impl AmdShaderExplicitVertexParameterFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtDebugMarkerFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_debug_marker\0") }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkDebugMarkerSetObjectTagEXT = unsafe extern "system" fn(
+ device: Device,
+ p_tag_info: *const DebugMarkerObjectTagInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDebugMarkerSetObjectNameEXT = unsafe extern "system" fn(
+ device: Device,
+ p_name_info: *const DebugMarkerObjectNameInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDebugMarkerBeginEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_marker_info: *const DebugMarkerMarkerInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDebugMarkerEndEXT = unsafe extern "system" fn(command_buffer: CommandBuffer);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDebugMarkerInsertEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_marker_info: *const DebugMarkerMarkerInfoEXT,
+);
+#[derive(Clone)]
+pub struct ExtDebugMarkerFn {
+ pub debug_marker_set_object_tag_ext: PFN_vkDebugMarkerSetObjectTagEXT,
+ pub debug_marker_set_object_name_ext: PFN_vkDebugMarkerSetObjectNameEXT,
+ pub cmd_debug_marker_begin_ext: PFN_vkCmdDebugMarkerBeginEXT,
+ pub cmd_debug_marker_end_ext: PFN_vkCmdDebugMarkerEndEXT,
+ pub cmd_debug_marker_insert_ext: PFN_vkCmdDebugMarkerInsertEXT,
+}
+unsafe impl Send for ExtDebugMarkerFn {}
+unsafe impl Sync for ExtDebugMarkerFn {}
+impl ExtDebugMarkerFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ debug_marker_set_object_tag_ext: unsafe {
+ unsafe extern "system" fn debug_marker_set_object_tag_ext(
+ _device: Device,
+ _p_tag_info: *const DebugMarkerObjectTagInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(debug_marker_set_object_tag_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDebugMarkerSetObjectTagEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ debug_marker_set_object_tag_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ debug_marker_set_object_name_ext: unsafe {
+ unsafe extern "system" fn debug_marker_set_object_name_ext(
+ _device: Device,
+ _p_name_info: *const DebugMarkerObjectNameInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(debug_marker_set_object_name_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDebugMarkerSetObjectNameEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ debug_marker_set_object_name_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_debug_marker_begin_ext: unsafe {
+ unsafe extern "system" fn cmd_debug_marker_begin_ext(
+ _command_buffer: CommandBuffer,
+ _p_marker_info: *const DebugMarkerMarkerInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_debug_marker_begin_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerBeginEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_debug_marker_begin_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_debug_marker_end_ext: unsafe {
+ unsafe extern "system" fn cmd_debug_marker_end_ext(_command_buffer: CommandBuffer) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_debug_marker_end_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerEndEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_debug_marker_end_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_debug_marker_insert_ext: unsafe {
+ unsafe extern "system" fn cmd_debug_marker_insert_ext(
+ _command_buffer: CommandBuffer,
+ _p_marker_info: *const DebugMarkerMarkerInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_debug_marker_insert_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDebugMarkerInsertEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_debug_marker_insert_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_debug_marker'"]
+impl StructureType {
+ pub const DEBUG_MARKER_OBJECT_NAME_INFO_EXT: Self = Self(1_000_022_000);
+ pub const DEBUG_MARKER_OBJECT_TAG_INFO_EXT: Self = Self(1_000_022_001);
+ pub const DEBUG_MARKER_MARKER_INFO_EXT: Self = Self(1_000_022_002);
+}
+impl KhrVideoQueueFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_queue\0") }
+ }
+ pub const SPEC_VERSION: u32 = 8u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_video_profile: *const VideoProfileInfoKHR,
+ p_capabilities: *mut VideoCapabilitiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_video_format_info: *const PhysicalDeviceVideoFormatInfoKHR,
+ p_video_format_property_count: *mut u32,
+ p_video_format_properties: *mut VideoFormatPropertiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateVideoSessionKHR = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const VideoSessionCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_video_session: *mut VideoSessionKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyVideoSessionKHR = unsafe extern "system" fn(
+ device: Device,
+ video_session: VideoSessionKHR,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetVideoSessionMemoryRequirementsKHR = unsafe extern "system" fn(
+ device: Device,
+ video_session: VideoSessionKHR,
+ p_memory_requirements_count: *mut u32,
+ p_memory_requirements: *mut VideoSessionMemoryRequirementsKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindVideoSessionMemoryKHR = unsafe extern "system" fn(
+ device: Device,
+ video_session: VideoSessionKHR,
+ bind_session_memory_info_count: u32,
+ p_bind_session_memory_infos: *const BindVideoSessionMemoryInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateVideoSessionParametersKHR = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const VideoSessionParametersCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_video_session_parameters: *mut VideoSessionParametersKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkUpdateVideoSessionParametersKHR = unsafe extern "system" fn(
+ device: Device,
+ video_session_parameters: VideoSessionParametersKHR,
+ p_update_info: *const VideoSessionParametersUpdateInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyVideoSessionParametersKHR = unsafe extern "system" fn(
+ device: Device,
+ video_session_parameters: VideoSessionParametersKHR,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginVideoCodingKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_begin_info: *const VideoBeginCodingInfoKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndVideoCodingKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_end_coding_info: *const VideoEndCodingInfoKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdControlVideoCodingKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_coding_control_info: *const VideoCodingControlInfoKHR,
+);
+#[derive(Clone)]
+pub struct KhrVideoQueueFn {
+ pub get_physical_device_video_capabilities_khr: PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR,
+ pub get_physical_device_video_format_properties_khr:
+ PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR,
+ pub create_video_session_khr: PFN_vkCreateVideoSessionKHR,
+ pub destroy_video_session_khr: PFN_vkDestroyVideoSessionKHR,
+ pub get_video_session_memory_requirements_khr: PFN_vkGetVideoSessionMemoryRequirementsKHR,
+ pub bind_video_session_memory_khr: PFN_vkBindVideoSessionMemoryKHR,
+ pub create_video_session_parameters_khr: PFN_vkCreateVideoSessionParametersKHR,
+ pub update_video_session_parameters_khr: PFN_vkUpdateVideoSessionParametersKHR,
+ pub destroy_video_session_parameters_khr: PFN_vkDestroyVideoSessionParametersKHR,
+ pub cmd_begin_video_coding_khr: PFN_vkCmdBeginVideoCodingKHR,
+ pub cmd_end_video_coding_khr: PFN_vkCmdEndVideoCodingKHR,
+ pub cmd_control_video_coding_khr: PFN_vkCmdControlVideoCodingKHR,
+}
+unsafe impl Send for KhrVideoQueueFn {}
+unsafe impl Sync for KhrVideoQueueFn {}
+impl KhrVideoQueueFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_video_capabilities_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_video_capabilities_khr(
+ _physical_device: PhysicalDevice,
+ _p_video_profile: *const VideoProfileInfoKHR,
+ _p_capabilities: *mut VideoCapabilitiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_video_capabilities_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceVideoCapabilitiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_video_capabilities_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_video_format_properties_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_video_format_properties_khr(
+ _physical_device: PhysicalDevice,
+ _p_video_format_info: *const PhysicalDeviceVideoFormatInfoKHR,
+ _p_video_format_property_count: *mut u32,
+ _p_video_format_properties: *mut VideoFormatPropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_video_format_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceVideoFormatPropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_video_format_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_video_session_khr: unsafe {
+ unsafe extern "system" fn create_video_session_khr(
+ _device: Device,
+ _p_create_info: *const VideoSessionCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_video_session: *mut VideoSessionKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_video_session_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateVideoSessionKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_video_session_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_video_session_khr: unsafe {
+ unsafe extern "system" fn destroy_video_session_khr(
+ _device: Device,
+ _video_session: VideoSessionKHR,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_video_session_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyVideoSessionKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_video_session_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_video_session_memory_requirements_khr: unsafe {
+ unsafe extern "system" fn get_video_session_memory_requirements_khr(
+ _device: Device,
+ _video_session: VideoSessionKHR,
+ _p_memory_requirements_count: *mut u32,
+ _p_memory_requirements: *mut VideoSessionMemoryRequirementsKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_video_session_memory_requirements_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetVideoSessionMemoryRequirementsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_video_session_memory_requirements_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_video_session_memory_khr: unsafe {
+ unsafe extern "system" fn bind_video_session_memory_khr(
+ _device: Device,
+ _video_session: VideoSessionKHR,
+ _bind_session_memory_info_count: u32,
+ _p_bind_session_memory_infos: *const BindVideoSessionMemoryInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(bind_video_session_memory_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkBindVideoSessionMemoryKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ bind_video_session_memory_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_video_session_parameters_khr: unsafe {
+ unsafe extern "system" fn create_video_session_parameters_khr(
+ _device: Device,
+ _p_create_info: *const VideoSessionParametersCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_video_session_parameters: *mut VideoSessionParametersKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_video_session_parameters_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateVideoSessionParametersKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_video_session_parameters_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ update_video_session_parameters_khr: unsafe {
+ unsafe extern "system" fn update_video_session_parameters_khr(
+ _device: Device,
+ _video_session_parameters: VideoSessionParametersKHR,
+ _p_update_info: *const VideoSessionParametersUpdateInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(update_video_session_parameters_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkUpdateVideoSessionParametersKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ update_video_session_parameters_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_video_session_parameters_khr: unsafe {
+ unsafe extern "system" fn destroy_video_session_parameters_khr(
+ _device: Device,
+ _video_session_parameters: VideoSessionParametersKHR,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_video_session_parameters_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyVideoSessionParametersKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_video_session_parameters_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_video_coding_khr: unsafe {
+ unsafe extern "system" fn cmd_begin_video_coding_khr(
+ _command_buffer: CommandBuffer,
+ _p_begin_info: *const VideoBeginCodingInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_video_coding_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginVideoCodingKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_video_coding_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_video_coding_khr: unsafe {
+ unsafe extern "system" fn cmd_end_video_coding_khr(
+ _command_buffer: CommandBuffer,
+ _p_end_coding_info: *const VideoEndCodingInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_video_coding_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndVideoCodingKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_video_coding_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_control_video_coding_khr: unsafe {
+ unsafe extern "system" fn cmd_control_video_coding_khr(
+ _command_buffer: CommandBuffer,
+ _p_coding_control_info: *const VideoCodingControlInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_control_video_coding_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdControlVideoCodingKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_control_video_coding_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_video_queue'"]
+impl ObjectType {
+ #[doc = "VkVideoSessionKHR"]
+ pub const VIDEO_SESSION_KHR: Self = Self(1_000_023_000);
+ #[doc = "VkVideoSessionParametersKHR"]
+ pub const VIDEO_SESSION_PARAMETERS_KHR: Self = Self(1_000_023_001);
+}
+#[doc = "Generated from 'VK_KHR_video_queue'"]
+impl QueryResultFlags {
+ pub const WITH_STATUS_KHR: Self = Self(0b1_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_queue'"]
+impl QueryType {
+ pub const RESULT_STATUS_ONLY_KHR: Self = Self(1_000_023_000);
+}
+#[doc = "Generated from 'VK_KHR_video_queue'"]
+impl Result {
+ pub const ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_000);
+ pub const ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_001);
+ pub const ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_002);
+ pub const ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_003);
+ pub const ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_004);
+ pub const ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR: Self = Self(-1_000_023_005);
+}
+#[doc = "Generated from 'VK_KHR_video_queue'"]
+impl StructureType {
+ pub const VIDEO_PROFILE_INFO_KHR: Self = Self(1_000_023_000);
+ pub const VIDEO_CAPABILITIES_KHR: Self = Self(1_000_023_001);
+ pub const VIDEO_PICTURE_RESOURCE_INFO_KHR: Self = Self(1_000_023_002);
+ pub const VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR: Self = Self(1_000_023_003);
+ pub const BIND_VIDEO_SESSION_MEMORY_INFO_KHR: Self = Self(1_000_023_004);
+ pub const VIDEO_SESSION_CREATE_INFO_KHR: Self = Self(1_000_023_005);
+ pub const VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_023_006);
+ pub const VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR: Self = Self(1_000_023_007);
+ pub const VIDEO_BEGIN_CODING_INFO_KHR: Self = Self(1_000_023_008);
+ pub const VIDEO_END_CODING_INFO_KHR: Self = Self(1_000_023_009);
+ pub const VIDEO_CODING_CONTROL_INFO_KHR: Self = Self(1_000_023_010);
+ pub const VIDEO_REFERENCE_SLOT_INFO_KHR: Self = Self(1_000_023_011);
+ pub const QUEUE_FAMILY_VIDEO_PROPERTIES_KHR: Self = Self(1_000_023_012);
+ pub const VIDEO_PROFILE_LIST_INFO_KHR: Self = Self(1_000_023_013);
+ pub const PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR: Self = Self(1_000_023_014);
+ pub const VIDEO_FORMAT_PROPERTIES_KHR: Self = Self(1_000_023_015);
+ pub const QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR: Self = Self(1_000_023_016);
+}
+impl KhrVideoDecodeQueueFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_queue\0") }
+ }
+ pub const SPEC_VERSION: u32 = 7u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDecodeVideoKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_decode_info: *const VideoDecodeInfoKHR,
+);
+#[derive(Clone)]
+pub struct KhrVideoDecodeQueueFn {
+ pub cmd_decode_video_khr: PFN_vkCmdDecodeVideoKHR,
+}
+unsafe impl Send for KhrVideoDecodeQueueFn {}
+unsafe impl Sync for KhrVideoDecodeQueueFn {}
+impl KhrVideoDecodeQueueFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_decode_video_khr: unsafe {
+ unsafe extern "system" fn cmd_decode_video_khr(
+ _command_buffer: CommandBuffer,
+ _p_decode_info: *const VideoDecodeInfoKHR,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_decode_video_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDecodeVideoKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_decode_video_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl AccessFlags2 {
+ pub const VIDEO_DECODE_READ_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const VIDEO_DECODE_WRITE_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl BufferUsageFlags {
+ pub const VIDEO_DECODE_SRC_KHR: Self = Self(0b10_0000_0000_0000);
+ pub const VIDEO_DECODE_DST_KHR: Self = Self(0b100_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl FormatFeatureFlags {
+ pub const VIDEO_DECODE_OUTPUT_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+ pub const VIDEO_DECODE_DPB_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl FormatFeatureFlags2 {
+ pub const VIDEO_DECODE_OUTPUT_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+ pub const VIDEO_DECODE_DPB_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl ImageLayout {
+ pub const VIDEO_DECODE_DST_KHR: Self = Self(1_000_024_000);
+ pub const VIDEO_DECODE_SRC_KHR: Self = Self(1_000_024_001);
+ pub const VIDEO_DECODE_DPB_KHR: Self = Self(1_000_024_002);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl ImageUsageFlags {
+ pub const VIDEO_DECODE_DST_KHR: Self = Self(0b100_0000_0000);
+ pub const VIDEO_DECODE_SRC_KHR: Self = Self(0b1000_0000_0000);
+ pub const VIDEO_DECODE_DPB_KHR: Self = Self(0b1_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl PipelineStageFlags2 {
+ pub const VIDEO_DECODE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl QueueFlags {
+ pub const VIDEO_DECODE_KHR: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_queue'"]
+impl StructureType {
+ pub const VIDEO_DECODE_INFO_KHR: Self = Self(1_000_024_000);
+ pub const VIDEO_DECODE_CAPABILITIES_KHR: Self = Self(1_000_024_001);
+ pub const VIDEO_DECODE_USAGE_INFO_KHR: Self = Self(1_000_024_002);
+}
+impl AmdGcnShaderFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_gcn_shader\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdGcnShaderFn {}
+unsafe impl Send for AmdGcnShaderFn {}
+unsafe impl Sync for AmdGcnShaderFn {}
+impl AmdGcnShaderFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvDedicatedAllocationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_dedicated_allocation\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvDedicatedAllocationFn {}
+unsafe impl Send for NvDedicatedAllocationFn {}
+unsafe impl Sync for NvDedicatedAllocationFn {}
+impl NvDedicatedAllocationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_dedicated_allocation'"]
+impl StructureType {
+ pub const DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV: Self = Self(1_000_026_000);
+ pub const DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV: Self = Self(1_000_026_001);
+ pub const DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV: Self = Self(1_000_026_002);
+}
+impl ExtExtension28Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_28\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension28Fn {}
+unsafe impl Send for ExtExtension28Fn {}
+unsafe impl Sync for ExtExtension28Fn {}
+impl ExtExtension28Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtTransformFeedbackFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_transform_feedback\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindTransformFeedbackBuffersEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_binding: u32,
+ binding_count: u32,
+ p_buffers: *const Buffer,
+ p_offsets: *const DeviceSize,
+ p_sizes: *const DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginTransformFeedbackEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_counter_buffer: u32,
+ counter_buffer_count: u32,
+ p_counter_buffers: *const Buffer,
+ p_counter_buffer_offsets: *const DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndTransformFeedbackEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_counter_buffer: u32,
+ counter_buffer_count: u32,
+ p_counter_buffers: *const Buffer,
+ p_counter_buffer_offsets: *const DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginQueryIndexedEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ query_pool: QueryPool,
+ query: u32,
+ flags: QueryControlFlags,
+ index: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndQueryIndexedEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ query_pool: QueryPool,
+ query: u32,
+ index: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawIndirectByteCountEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ instance_count: u32,
+ first_instance: u32,
+ counter_buffer: Buffer,
+ counter_buffer_offset: DeviceSize,
+ counter_offset: u32,
+ vertex_stride: u32,
+);
+#[derive(Clone)]
+pub struct ExtTransformFeedbackFn {
+ pub cmd_bind_transform_feedback_buffers_ext: PFN_vkCmdBindTransformFeedbackBuffersEXT,
+ pub cmd_begin_transform_feedback_ext: PFN_vkCmdBeginTransformFeedbackEXT,
+ pub cmd_end_transform_feedback_ext: PFN_vkCmdEndTransformFeedbackEXT,
+ pub cmd_begin_query_indexed_ext: PFN_vkCmdBeginQueryIndexedEXT,
+ pub cmd_end_query_indexed_ext: PFN_vkCmdEndQueryIndexedEXT,
+ pub cmd_draw_indirect_byte_count_ext: PFN_vkCmdDrawIndirectByteCountEXT,
+}
+unsafe impl Send for ExtTransformFeedbackFn {}
+unsafe impl Sync for ExtTransformFeedbackFn {}
+impl ExtTransformFeedbackFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_bind_transform_feedback_buffers_ext: unsafe {
+ unsafe extern "system" fn cmd_bind_transform_feedback_buffers_ext(
+ _command_buffer: CommandBuffer,
+ _first_binding: u32,
+ _binding_count: u32,
+ _p_buffers: *const Buffer,
+ _p_offsets: *const DeviceSize,
+ _p_sizes: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_transform_feedback_buffers_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindTransformFeedbackBuffersEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_transform_feedback_buffers_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_transform_feedback_ext: unsafe {
+ unsafe extern "system" fn cmd_begin_transform_feedback_ext(
+ _command_buffer: CommandBuffer,
+ _first_counter_buffer: u32,
+ _counter_buffer_count: u32,
+ _p_counter_buffers: *const Buffer,
+ _p_counter_buffer_offsets: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_transform_feedback_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBeginTransformFeedbackEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_transform_feedback_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_transform_feedback_ext: unsafe {
+ unsafe extern "system" fn cmd_end_transform_feedback_ext(
+ _command_buffer: CommandBuffer,
+ _first_counter_buffer: u32,
+ _counter_buffer_count: u32,
+ _p_counter_buffers: *const Buffer,
+ _p_counter_buffer_offsets: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_transform_feedback_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdEndTransformFeedbackEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_transform_feedback_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_query_indexed_ext: unsafe {
+ unsafe extern "system" fn cmd_begin_query_indexed_ext(
+ _command_buffer: CommandBuffer,
+ _query_pool: QueryPool,
+ _query: u32,
+ _flags: QueryControlFlags,
+ _index: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_query_indexed_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginQueryIndexedEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_query_indexed_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_query_indexed_ext: unsafe {
+ unsafe extern "system" fn cmd_end_query_indexed_ext(
+ _command_buffer: CommandBuffer,
+ _query_pool: QueryPool,
+ _query: u32,
+ _index: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_query_indexed_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndQueryIndexedEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_query_indexed_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indirect_byte_count_ext: unsafe {
+ unsafe extern "system" fn cmd_draw_indirect_byte_count_ext(
+ _command_buffer: CommandBuffer,
+ _instance_count: u32,
+ _first_instance: u32,
+ _counter_buffer: Buffer,
+ _counter_buffer_offset: DeviceSize,
+ _counter_offset: u32,
+ _vertex_stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indirect_byte_count_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawIndirectByteCountEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indirect_byte_count_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_transform_feedback'"]
+impl AccessFlags {
+ pub const TRANSFORM_FEEDBACK_WRITE_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+ pub const TRANSFORM_FEEDBACK_COUNTER_READ_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000);
+ pub const TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT: Self =
+ Self(0b1000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_transform_feedback'"]
+impl BufferUsageFlags {
+ pub const TRANSFORM_FEEDBACK_BUFFER_EXT: Self = Self(0b1000_0000_0000);
+ pub const TRANSFORM_FEEDBACK_COUNTER_BUFFER_EXT: Self = Self(0b1_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_transform_feedback'"]
+impl PipelineStageFlags {
+ pub const TRANSFORM_FEEDBACK_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_transform_feedback'"]
+impl QueryType {
+ pub const TRANSFORM_FEEDBACK_STREAM_EXT: Self = Self(1_000_028_004);
+}
+#[doc = "Generated from 'VK_EXT_transform_feedback'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: Self = Self(1_000_028_000);
+ pub const PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: Self = Self(1_000_028_001);
+ pub const PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT: Self = Self(1_000_028_002);
+}
+impl NvxBinaryImportFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NVX_binary_import\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateCuModuleNVX = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const CuModuleCreateInfoNVX,
+ p_allocator: *const AllocationCallbacks,
+ p_module: *mut CuModuleNVX,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateCuFunctionNVX = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const CuFunctionCreateInfoNVX,
+ p_allocator: *const AllocationCallbacks,
+ p_function: *mut CuFunctionNVX,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyCuModuleNVX = unsafe extern "system" fn(
+ device: Device,
+ module: CuModuleNVX,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyCuFunctionNVX = unsafe extern "system" fn(
+ device: Device,
+ function: CuFunctionNVX,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCuLaunchKernelNVX =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, p_launch_info: *const CuLaunchInfoNVX);
+#[derive(Clone)]
+pub struct NvxBinaryImportFn {
+ pub create_cu_module_nvx: PFN_vkCreateCuModuleNVX,
+ pub create_cu_function_nvx: PFN_vkCreateCuFunctionNVX,
+ pub destroy_cu_module_nvx: PFN_vkDestroyCuModuleNVX,
+ pub destroy_cu_function_nvx: PFN_vkDestroyCuFunctionNVX,
+ pub cmd_cu_launch_kernel_nvx: PFN_vkCmdCuLaunchKernelNVX,
+}
+unsafe impl Send for NvxBinaryImportFn {}
+unsafe impl Sync for NvxBinaryImportFn {}
+impl NvxBinaryImportFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_cu_module_nvx: unsafe {
+ unsafe extern "system" fn create_cu_module_nvx(
+ _device: Device,
+ _p_create_info: *const CuModuleCreateInfoNVX,
+ _p_allocator: *const AllocationCallbacks,
+ _p_module: *mut CuModuleNVX,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_cu_module_nvx)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateCuModuleNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_cu_module_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_cu_function_nvx: unsafe {
+ unsafe extern "system" fn create_cu_function_nvx(
+ _device: Device,
+ _p_create_info: *const CuFunctionCreateInfoNVX,
+ _p_allocator: *const AllocationCallbacks,
+ _p_function: *mut CuFunctionNVX,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_cu_function_nvx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateCuFunctionNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_cu_function_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_cu_module_nvx: unsafe {
+ unsafe extern "system" fn destroy_cu_module_nvx(
+ _device: Device,
+ _module: CuModuleNVX,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_cu_module_nvx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyCuModuleNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_cu_module_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_cu_function_nvx: unsafe {
+ unsafe extern "system" fn destroy_cu_function_nvx(
+ _device: Device,
+ _function: CuFunctionNVX,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_cu_function_nvx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyCuFunctionNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_cu_function_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_cu_launch_kernel_nvx: unsafe {
+ unsafe extern "system" fn cmd_cu_launch_kernel_nvx(
+ _command_buffer: CommandBuffer,
+ _p_launch_info: *const CuLaunchInfoNVX,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_cu_launch_kernel_nvx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCuLaunchKernelNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_cu_launch_kernel_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NVX_binary_import'"]
+impl DebugReportObjectTypeEXT {
+ pub const CU_MODULE_NVX: Self = Self(1_000_029_000);
+ pub const CU_FUNCTION_NVX: Self = Self(1_000_029_001);
+}
+#[doc = "Generated from 'VK_NVX_binary_import'"]
+impl ObjectType {
+ pub const CU_MODULE_NVX: Self = Self(1_000_029_000);
+ pub const CU_FUNCTION_NVX: Self = Self(1_000_029_001);
+}
+#[doc = "Generated from 'VK_NVX_binary_import'"]
+impl StructureType {
+ pub const CU_MODULE_CREATE_INFO_NVX: Self = Self(1_000_029_000);
+ pub const CU_FUNCTION_CREATE_INFO_NVX: Self = Self(1_000_029_001);
+ pub const CU_LAUNCH_INFO_NVX: Self = Self(1_000_029_002);
+}
+impl NvxImageViewHandleFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NVX_image_view_handle\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageViewHandleNVX =
+ unsafe extern "system" fn(device: Device, p_info: *const ImageViewHandleInfoNVX) -> u32;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageViewAddressNVX = unsafe extern "system" fn(
+ device: Device,
+ image_view: ImageView,
+ p_properties: *mut ImageViewAddressPropertiesNVX,
+) -> Result;
+#[derive(Clone)]
+pub struct NvxImageViewHandleFn {
+ pub get_image_view_handle_nvx: PFN_vkGetImageViewHandleNVX,
+ pub get_image_view_address_nvx: PFN_vkGetImageViewAddressNVX,
+}
+unsafe impl Send for NvxImageViewHandleFn {}
+unsafe impl Sync for NvxImageViewHandleFn {}
+impl NvxImageViewHandleFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_image_view_handle_nvx: unsafe {
+ unsafe extern "system" fn get_image_view_handle_nvx(
+ _device: Device,
+ _p_info: *const ImageViewHandleInfoNVX,
+ ) -> u32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_view_handle_nvx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetImageViewHandleNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_view_handle_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_view_address_nvx: unsafe {
+ unsafe extern "system" fn get_image_view_address_nvx(
+ _device: Device,
+ _image_view: ImageView,
+ _p_properties: *mut ImageViewAddressPropertiesNVX,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_view_address_nvx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetImageViewAddressNVX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_view_address_nvx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NVX_image_view_handle'"]
+impl StructureType {
+ pub const IMAGE_VIEW_HANDLE_INFO_NVX: Self = Self(1_000_030_000);
+ pub const IMAGE_VIEW_ADDRESS_PROPERTIES_NVX: Self = Self(1_000_030_001);
+}
+impl AmdExtension32Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_32\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension32Fn {}
+unsafe impl Send for AmdExtension32Fn {}
+unsafe impl Sync for AmdExtension32Fn {}
+impl AmdExtension32Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension33Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_33\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension33Fn {}
+unsafe impl Send for AmdExtension33Fn {}
+unsafe impl Sync for AmdExtension33Fn {}
+impl AmdExtension33Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdDrawIndirectCountFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_draw_indirect_count\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawIndirectCount = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ count_buffer: Buffer,
+ count_buffer_offset: DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawIndexedIndirectCount = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ count_buffer: Buffer,
+ count_buffer_offset: DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+);
+#[derive(Clone)]
+pub struct AmdDrawIndirectCountFn {
+ pub cmd_draw_indirect_count_amd: PFN_vkCmdDrawIndirectCount,
+ pub cmd_draw_indexed_indirect_count_amd: PFN_vkCmdDrawIndexedIndirectCount,
+}
+unsafe impl Send for AmdDrawIndirectCountFn {}
+unsafe impl Sync for AmdDrawIndirectCountFn {}
+impl AmdDrawIndirectCountFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_draw_indirect_count_amd: unsafe {
+ unsafe extern "system" fn cmd_draw_indirect_count_amd(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indirect_count_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCountAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indirect_count_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indexed_indirect_count_amd: unsafe {
+ unsafe extern "system" fn cmd_draw_indexed_indirect_count_amd(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indexed_indirect_count_amd)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawIndexedIndirectCountAMD\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indexed_indirect_count_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl AmdExtension35Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_35\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension35Fn {}
+unsafe impl Send for AmdExtension35Fn {}
+unsafe impl Sync for AmdExtension35Fn {}
+impl AmdExtension35Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdNegativeViewportHeightFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_negative_viewport_height\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdNegativeViewportHeightFn {}
+unsafe impl Send for AmdNegativeViewportHeightFn {}
+unsafe impl Sync for AmdNegativeViewportHeightFn {}
+impl AmdNegativeViewportHeightFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdGpuShaderHalfFloatFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_gpu_shader_half_float\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct AmdGpuShaderHalfFloatFn {}
+unsafe impl Send for AmdGpuShaderHalfFloatFn {}
+unsafe impl Sync for AmdGpuShaderHalfFloatFn {}
+impl AmdGpuShaderHalfFloatFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdShaderBallotFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_ballot\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderBallotFn {}
+unsafe impl Send for AmdShaderBallotFn {}
+unsafe impl Sync for AmdShaderBallotFn {}
+impl AmdShaderBallotFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtVideoEncodeH264Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_video_encode_h264\0") }
+ }
+ pub const SPEC_VERSION: u32 = 9u32;
+}
+#[derive(Clone)]
+pub struct ExtVideoEncodeH264Fn {}
+unsafe impl Send for ExtVideoEncodeH264Fn {}
+unsafe impl Sync for ExtVideoEncodeH264Fn {}
+impl ExtVideoEncodeH264Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_video_encode_h264'"]
+impl StructureType {
+ pub const VIDEO_ENCODE_H264_CAPABILITIES_EXT: Self = Self(1_000_038_000);
+ pub const VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT: Self = Self(1_000_038_001);
+ pub const VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT: Self = Self(1_000_038_002);
+ pub const VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT: Self = Self(1_000_038_003);
+ pub const VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT: Self = Self(1_000_038_004);
+ pub const VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT: Self = Self(1_000_038_005);
+ pub const VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT: Self = Self(1_000_038_006);
+ pub const VIDEO_ENCODE_H264_PROFILE_INFO_EXT: Self = Self(1_000_038_007);
+ pub const VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT: Self = Self(1_000_038_008);
+ pub const VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT: Self = Self(1_000_038_009);
+ pub const VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT: Self = Self(1_000_038_010);
+}
+#[doc = "Generated from 'VK_EXT_video_encode_h264'"]
+impl VideoCodecOperationFlagsKHR {
+ pub const ENCODE_H264_EXT: Self = Self(0b1_0000_0000_0000_0000);
+}
+impl ExtVideoEncodeH265Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_video_encode_h265\0") }
+ }
+ pub const SPEC_VERSION: u32 = 9u32;
+}
+#[derive(Clone)]
+pub struct ExtVideoEncodeH265Fn {}
+unsafe impl Send for ExtVideoEncodeH265Fn {}
+unsafe impl Sync for ExtVideoEncodeH265Fn {}
+impl ExtVideoEncodeH265Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_video_encode_h265'"]
+impl StructureType {
+ pub const VIDEO_ENCODE_H265_CAPABILITIES_EXT: Self = Self(1_000_039_000);
+ pub const VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT: Self = Self(1_000_039_001);
+ pub const VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT: Self = Self(1_000_039_002);
+ pub const VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT: Self = Self(1_000_039_003);
+ pub const VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT: Self = Self(1_000_039_004);
+ pub const VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT: Self = Self(1_000_039_005);
+ pub const VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT: Self = Self(1_000_039_006);
+ pub const VIDEO_ENCODE_H265_PROFILE_INFO_EXT: Self = Self(1_000_039_007);
+ pub const VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT: Self = Self(1_000_039_008);
+ pub const VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT: Self = Self(1_000_039_009);
+ pub const VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT: Self = Self(1_000_039_010);
+}
+#[doc = "Generated from 'VK_EXT_video_encode_h265'"]
+impl VideoCodecOperationFlagsKHR {
+ pub const ENCODE_H265_EXT: Self = Self(0b10_0000_0000_0000_0000);
+}
+impl KhrVideoDecodeH264Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_h264\0") }
+ }
+ pub const SPEC_VERSION: u32 = 8u32;
+}
+#[derive(Clone)]
+pub struct KhrVideoDecodeH264Fn {}
+unsafe impl Send for KhrVideoDecodeH264Fn {}
+unsafe impl Sync for KhrVideoDecodeH264Fn {}
+impl KhrVideoDecodeH264Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_video_decode_h264'"]
+impl StructureType {
+ pub const VIDEO_DECODE_H264_CAPABILITIES_KHR: Self = Self(1_000_040_000);
+ pub const VIDEO_DECODE_H264_PICTURE_INFO_KHR: Self = Self(1_000_040_001);
+ pub const VIDEO_DECODE_H264_PROFILE_INFO_KHR: Self = Self(1_000_040_003);
+ pub const VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_040_004);
+ pub const VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR: Self = Self(1_000_040_005);
+ pub const VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR: Self = Self(1_000_040_006);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_h264'"]
+impl VideoCodecOperationFlagsKHR {
+ pub const DECODE_H264: Self = Self(0b1);
+}
+impl AmdTextureGatherBiasLodFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_texture_gather_bias_lod\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdTextureGatherBiasLodFn {}
+unsafe impl Send for AmdTextureGatherBiasLodFn {}
+unsafe impl Sync for AmdTextureGatherBiasLodFn {}
+impl AmdTextureGatherBiasLodFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_texture_gather_bias_lod'"]
+impl StructureType {
+ pub const TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD: Self = Self(1_000_041_000);
+}
+impl AmdShaderInfoFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_info\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetShaderInfoAMD = unsafe extern "system" fn(
+ device: Device,
+ pipeline: Pipeline,
+ shader_stage: ShaderStageFlags,
+ info_type: ShaderInfoTypeAMD,
+ p_info_size: *mut usize,
+ p_info: *mut c_void,
+) -> Result;
+#[derive(Clone)]
+pub struct AmdShaderInfoFn {
+ pub get_shader_info_amd: PFN_vkGetShaderInfoAMD,
+}
+unsafe impl Send for AmdShaderInfoFn {}
+unsafe impl Sync for AmdShaderInfoFn {}
+impl AmdShaderInfoFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_shader_info_amd: unsafe {
+ unsafe extern "system" fn get_shader_info_amd(
+ _device: Device,
+ _pipeline: Pipeline,
+ _shader_stage: ShaderStageFlags,
+ _info_type: ShaderInfoTypeAMD,
+ _p_info_size: *mut usize,
+ _p_info: *mut c_void,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_shader_info_amd)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetShaderInfoAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_shader_info_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl AmdExtension44Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_44\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension44Fn {}
+unsafe impl Send for AmdExtension44Fn {}
+unsafe impl Sync for AmdExtension44Fn {}
+impl AmdExtension44Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrDynamicRenderingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_dynamic_rendering\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginRendering = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_rendering_info: *const RenderingInfo,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndRendering = unsafe extern "system" fn(command_buffer: CommandBuffer);
+#[derive(Clone)]
+pub struct KhrDynamicRenderingFn {
+ pub cmd_begin_rendering_khr: PFN_vkCmdBeginRendering,
+ pub cmd_end_rendering_khr: PFN_vkCmdEndRendering,
+}
+unsafe impl Send for KhrDynamicRenderingFn {}
+unsafe impl Sync for KhrDynamicRenderingFn {}
+impl KhrDynamicRenderingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_begin_rendering_khr: unsafe {
+ unsafe extern "system" fn cmd_begin_rendering_khr(
+ _command_buffer: CommandBuffer,
+ _p_rendering_info: *const RenderingInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_rendering_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderingKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_rendering_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_rendering_khr: unsafe {
+ unsafe extern "system" fn cmd_end_rendering_khr(_command_buffer: CommandBuffer) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_rendering_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderingKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_rendering_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_dynamic_rendering'"]
+impl AttachmentStoreOp {
+ pub const NONE_KHR: Self = Self::NONE;
+}
+#[doc = "Generated from 'VK_KHR_dynamic_rendering'"]
+impl PipelineCreateFlags {
+ pub const RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self =
+ Self(0b10_0000_0000_0000_0000_0000);
+ pub const RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_EXT: Self =
+ Self(0b100_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_dynamic_rendering'"]
+impl StructureType {
+ pub const RENDERING_INFO_KHR: Self = Self::RENDERING_INFO;
+ pub const RENDERING_ATTACHMENT_INFO_KHR: Self = Self::RENDERING_ATTACHMENT_INFO;
+ pub const PIPELINE_RENDERING_CREATE_INFO_KHR: Self = Self::PIPELINE_RENDERING_CREATE_INFO;
+ pub const PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES;
+ pub const COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR: Self =
+ Self::COMMAND_BUFFER_INHERITANCE_RENDERING_INFO;
+ pub const RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: Self = Self(1_000_044_006);
+ pub const RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT: Self = Self(1_000_044_007);
+ pub const ATTACHMENT_SAMPLE_COUNT_INFO_AMD: Self = Self(1_000_044_008);
+ pub const ATTACHMENT_SAMPLE_COUNT_INFO_NV: Self = Self::ATTACHMENT_SAMPLE_COUNT_INFO_AMD;
+ pub const MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX: Self = Self(1_000_044_009);
+}
+impl AmdExtension46Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_46\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension46Fn {}
+unsafe impl Send for AmdExtension46Fn {}
+unsafe impl Sync for AmdExtension46Fn {}
+impl AmdExtension46Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdShaderImageLoadStoreLodFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_image_load_store_lod\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderImageLoadStoreLodFn {}
+unsafe impl Send for AmdShaderImageLoadStoreLodFn {}
+unsafe impl Sync for AmdShaderImageLoadStoreLodFn {}
+impl AmdShaderImageLoadStoreLodFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvxExtension48Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NVX_extension_48\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvxExtension48Fn {}
+unsafe impl Send for NvxExtension48Fn {}
+unsafe impl Sync for NvxExtension48Fn {}
+impl NvxExtension48Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleExtension49Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_49\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension49Fn {}
+unsafe impl Send for GoogleExtension49Fn {}
+unsafe impl Sync for GoogleExtension49Fn {}
+impl GoogleExtension49Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GgpStreamDescriptorSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_stream_descriptor_surface\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateStreamDescriptorSurfaceGGP = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const StreamDescriptorSurfaceCreateInfoGGP,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct GgpStreamDescriptorSurfaceFn {
+ pub create_stream_descriptor_surface_ggp: PFN_vkCreateStreamDescriptorSurfaceGGP,
+}
+unsafe impl Send for GgpStreamDescriptorSurfaceFn {}
+unsafe impl Sync for GgpStreamDescriptorSurfaceFn {}
+impl GgpStreamDescriptorSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_stream_descriptor_surface_ggp: unsafe {
+ unsafe extern "system" fn create_stream_descriptor_surface_ggp(
+ _instance: Instance,
+ _p_create_info: *const StreamDescriptorSurfaceCreateInfoGGP,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_stream_descriptor_surface_ggp)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateStreamDescriptorSurfaceGGP\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_stream_descriptor_surface_ggp
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_GGP_stream_descriptor_surface'"]
+impl StructureType {
+ pub const STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP: Self = Self(1_000_049_000);
+}
+impl NvCornerSampledImageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_corner_sampled_image\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct NvCornerSampledImageFn {}
+unsafe impl Send for NvCornerSampledImageFn {}
+unsafe impl Sync for NvCornerSampledImageFn {}
+impl NvCornerSampledImageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_corner_sampled_image'"]
+impl ImageCreateFlags {
+ pub const CORNER_SAMPLED_NV: Self = Self(0b10_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_corner_sampled_image'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV: Self = Self(1_000_050_000);
+}
+impl NvExtension52Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_52\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension52Fn {}
+unsafe impl Send for NvExtension52Fn {}
+unsafe impl Sync for NvExtension52Fn {}
+impl NvExtension52Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension53Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_53\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension53Fn {}
+unsafe impl Send for NvExtension53Fn {}
+unsafe impl Sync for NvExtension53Fn {}
+impl NvExtension53Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrMultiviewFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_multiview\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrMultiviewFn {}
+unsafe impl Send for KhrMultiviewFn {}
+unsafe impl Sync for KhrMultiviewFn {}
+impl KhrMultiviewFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_multiview'"]
+impl DependencyFlags {
+ pub const VIEW_LOCAL_KHR: Self = Self::VIEW_LOCAL;
+}
+#[doc = "Generated from 'VK_KHR_multiview'"]
+impl StructureType {
+ pub const RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR: Self = Self::RENDER_PASS_MULTIVIEW_CREATE_INFO;
+ pub const PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
+ pub const PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
+}
+impl ImgFormatPvrtcFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_IMG_format_pvrtc\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ImgFormatPvrtcFn {}
+unsafe impl Send for ImgFormatPvrtcFn {}
+unsafe impl Sync for ImgFormatPvrtcFn {}
+impl ImgFormatPvrtcFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_IMG_format_pvrtc'"]
+impl Format {
+ pub const PVRTC1_2BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_000);
+ pub const PVRTC1_4BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_001);
+ pub const PVRTC2_2BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_002);
+ pub const PVRTC2_4BPP_UNORM_BLOCK_IMG: Self = Self(1_000_054_003);
+ pub const PVRTC1_2BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_004);
+ pub const PVRTC1_4BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_005);
+ pub const PVRTC2_2BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_006);
+ pub const PVRTC2_4BPP_SRGB_BLOCK_IMG: Self = Self(1_000_054_007);
+}
+impl NvExternalMemoryCapabilitiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory_capabilities\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV =
+ unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ format: Format,
+ ty: ImageType,
+ tiling: ImageTiling,
+ usage: ImageUsageFlags,
+ flags: ImageCreateFlags,
+ external_handle_type: ExternalMemoryHandleTypeFlagsNV,
+ p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV,
+ ) -> Result;
+#[derive(Clone)]
+pub struct NvExternalMemoryCapabilitiesFn {
+ pub get_physical_device_external_image_format_properties_nv:
+ PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV,
+}
+unsafe impl Send for NvExternalMemoryCapabilitiesFn {}
+unsafe impl Sync for NvExternalMemoryCapabilitiesFn {}
+impl NvExternalMemoryCapabilitiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_external_image_format_properties_nv: unsafe {
+ unsafe extern "system" fn get_physical_device_external_image_format_properties_nv(
+ _physical_device: PhysicalDevice,
+ _format: Format,
+ _ty: ImageType,
+ _tiling: ImageTiling,
+ _usage: ImageUsageFlags,
+ _flags: ImageCreateFlags,
+ _external_handle_type: ExternalMemoryHandleTypeFlagsNV,
+ _p_external_image_format_properties: *mut ExternalImageFormatPropertiesNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_image_format_properties_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalImageFormatPropertiesNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_image_format_properties_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl NvExternalMemoryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvExternalMemoryFn {}
+unsafe impl Send for NvExternalMemoryFn {}
+unsafe impl Sync for NvExternalMemoryFn {}
+impl NvExternalMemoryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_external_memory'"]
+impl StructureType {
+ pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV: Self = Self(1_000_056_000);
+ pub const EXPORT_MEMORY_ALLOCATE_INFO_NV: Self = Self(1_000_056_001);
+}
+impl NvExternalMemoryWin32Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory_win32\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryWin32HandleNV = unsafe extern "system" fn(
+ device: Device,
+ memory: DeviceMemory,
+ handle_type: ExternalMemoryHandleTypeFlagsNV,
+ p_handle: *mut HANDLE,
+) -> Result;
+#[derive(Clone)]
+pub struct NvExternalMemoryWin32Fn {
+ pub get_memory_win32_handle_nv: PFN_vkGetMemoryWin32HandleNV,
+}
+unsafe impl Send for NvExternalMemoryWin32Fn {}
+unsafe impl Sync for NvExternalMemoryWin32Fn {}
+impl NvExternalMemoryWin32Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_memory_win32_handle_nv: unsafe {
+ unsafe extern "system" fn get_memory_win32_handle_nv(
+ _device: Device,
+ _memory: DeviceMemory,
+ _handle_type: ExternalMemoryHandleTypeFlagsNV,
+ _p_handle: *mut HANDLE,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_win32_handle_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryWin32HandleNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_win32_handle_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_external_memory_win32'"]
+impl StructureType {
+ pub const IMPORT_MEMORY_WIN32_HANDLE_INFO_NV: Self = Self(1_000_057_000);
+ pub const EXPORT_MEMORY_WIN32_HANDLE_INFO_NV: Self = Self(1_000_057_001);
+}
+impl NvWin32KeyedMutexFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_win32_keyed_mutex\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct NvWin32KeyedMutexFn {}
+unsafe impl Send for NvWin32KeyedMutexFn {}
+unsafe impl Sync for NvWin32KeyedMutexFn {}
+impl NvWin32KeyedMutexFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_win32_keyed_mutex'"]
+impl StructureType {
+ pub const WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV: Self = Self(1_000_058_000);
+}
+impl KhrGetPhysicalDeviceProperties2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_get_physical_device_properties2\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceFeatures2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_features: *mut PhysicalDeviceFeatures2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceProperties2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_properties: *mut PhysicalDeviceProperties2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceFormatProperties2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ format: Format,
+ p_format_properties: *mut FormatProperties2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceImageFormatProperties2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
+ p_image_format_properties: *mut ImageFormatProperties2,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceQueueFamilyProperties2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_queue_family_property_count: *mut u32,
+ p_queue_family_properties: *mut QueueFamilyProperties2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceMemoryProperties2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
+ p_property_count: *mut u32,
+ p_properties: *mut SparseImageFormatProperties2,
+);
+#[derive(Clone)]
+pub struct KhrGetPhysicalDeviceProperties2Fn {
+ pub get_physical_device_features2_khr: PFN_vkGetPhysicalDeviceFeatures2,
+ pub get_physical_device_properties2_khr: PFN_vkGetPhysicalDeviceProperties2,
+ pub get_physical_device_format_properties2_khr: PFN_vkGetPhysicalDeviceFormatProperties2,
+ pub get_physical_device_image_format_properties2_khr:
+ PFN_vkGetPhysicalDeviceImageFormatProperties2,
+ pub get_physical_device_queue_family_properties2_khr:
+ PFN_vkGetPhysicalDeviceQueueFamilyProperties2,
+ pub get_physical_device_memory_properties2_khr: PFN_vkGetPhysicalDeviceMemoryProperties2,
+ pub get_physical_device_sparse_image_format_properties2_khr:
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties2,
+}
+unsafe impl Send for KhrGetPhysicalDeviceProperties2Fn {}
+unsafe impl Sync for KhrGetPhysicalDeviceProperties2Fn {}
+impl KhrGetPhysicalDeviceProperties2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_features2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_features2_khr(
+ _physical_device: PhysicalDevice,
+ _p_features: *mut PhysicalDeviceFeatures2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_features2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFeatures2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_features2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_properties: *mut PhysicalDeviceProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_format_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_format_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _format: Format,
+ _p_format_properties: *mut FormatProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_format_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFormatProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_format_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_image_format_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_image_format_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
+ _p_image_format_properties: *mut ImageFormatProperties2,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_image_format_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceImageFormatProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_image_format_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_queue_family_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_queue_family_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_queue_family_property_count: *mut u32,
+ _p_queue_family_properties: *mut QueueFamilyProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_queue_family_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceQueueFamilyProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_queue_family_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_memory_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_memory_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_memory_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceMemoryProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_memory_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_sparse_image_format_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_sparse_image_format_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
+ _p_property_count: *mut u32,
+ _p_properties: *mut SparseImageFormatProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_sparse_image_format_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSparseImageFormatProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_sparse_image_format_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_get_physical_device_properties2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FEATURES_2_KHR: Self = Self::PHYSICAL_DEVICE_FEATURES_2;
+ pub const PHYSICAL_DEVICE_PROPERTIES_2_KHR: Self = Self::PHYSICAL_DEVICE_PROPERTIES_2;
+ pub const FORMAT_PROPERTIES_2_KHR: Self = Self::FORMAT_PROPERTIES_2;
+ pub const IMAGE_FORMAT_PROPERTIES_2_KHR: Self = Self::IMAGE_FORMAT_PROPERTIES_2;
+ pub const PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR: Self =
+ Self::PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2;
+ pub const QUEUE_FAMILY_PROPERTIES_2_KHR: Self = Self::QUEUE_FAMILY_PROPERTIES_2;
+ pub const PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR: Self =
+ Self::PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
+ pub const SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR: Self = Self::SPARSE_IMAGE_FORMAT_PROPERTIES_2;
+ pub const PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR: Self =
+ Self::PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2;
+}
+impl KhrDeviceGroupFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_device_group\0") }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceGroupPeerMemoryFeatures = unsafe extern "system" fn(
+ device: Device,
+ heap_index: u32,
+ local_device_index: u32,
+ remote_device_index: u32,
+ p_peer_memory_features: *mut PeerMemoryFeatureFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDeviceMask =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, device_mask: u32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDispatchBase = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ base_group_x: u32,
+ base_group_y: u32,
+ base_group_z: u32,
+ group_count_x: u32,
+ group_count_y: u32,
+ group_count_z: u32,
+);
+#[derive(Clone)]
+pub struct KhrDeviceGroupFn {
+ pub get_device_group_peer_memory_features_khr: PFN_vkGetDeviceGroupPeerMemoryFeatures,
+ pub cmd_set_device_mask_khr: PFN_vkCmdSetDeviceMask,
+ pub cmd_dispatch_base_khr: PFN_vkCmdDispatchBase,
+ pub get_device_group_present_capabilities_khr:
+ crate::vk::PFN_vkGetDeviceGroupPresentCapabilitiesKHR,
+ pub get_device_group_surface_present_modes_khr:
+ crate::vk::PFN_vkGetDeviceGroupSurfacePresentModesKHR,
+ pub get_physical_device_present_rectangles_khr:
+ crate::vk::PFN_vkGetPhysicalDevicePresentRectanglesKHR,
+ pub acquire_next_image2_khr: crate::vk::PFN_vkAcquireNextImage2KHR,
+}
+unsafe impl Send for KhrDeviceGroupFn {}
+unsafe impl Sync for KhrDeviceGroupFn {}
+impl KhrDeviceGroupFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_device_group_peer_memory_features_khr: unsafe {
+ unsafe extern "system" fn get_device_group_peer_memory_features_khr(
+ _device: Device,
+ _heap_index: u32,
+ _local_device_index: u32,
+ _remote_device_index: u32,
+ _p_peer_memory_features: *mut PeerMemoryFeatureFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_peer_memory_features_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupPeerMemoryFeaturesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_peer_memory_features_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_device_mask_khr: unsafe {
+ unsafe extern "system" fn cmd_set_device_mask_khr(
+ _command_buffer: CommandBuffer,
+ _device_mask: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_device_mask_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDeviceMaskKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_device_mask_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_dispatch_base_khr: unsafe {
+ unsafe extern "system" fn cmd_dispatch_base_khr(
+ _command_buffer: CommandBuffer,
+ _base_group_x: u32,
+ _base_group_y: u32,
+ _base_group_z: u32,
+ _group_count_x: u32,
+ _group_count_y: u32,
+ _group_count_z: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_dispatch_base_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchBaseKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_dispatch_base_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_group_present_capabilities_khr: unsafe {
+ unsafe extern "system" fn get_device_group_present_capabilities_khr(
+ _device: Device,
+ _p_device_group_present_capabilities: *mut DeviceGroupPresentCapabilitiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_present_capabilities_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupPresentCapabilitiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_present_capabilities_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_group_surface_present_modes_khr: unsafe {
+ unsafe extern "system" fn get_device_group_surface_present_modes_khr(
+ _device: Device,
+ _surface: SurfaceKHR,
+ _p_modes: *mut DeviceGroupPresentModeFlagsKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_surface_present_modes_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupSurfacePresentModesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_surface_present_modes_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_present_rectangles_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_present_rectangles_khr(
+ _physical_device: PhysicalDevice,
+ _surface: SurfaceKHR,
+ _p_rect_count: *mut u32,
+ _p_rects: *mut Rect2D,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_present_rectangles_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDevicePresentRectanglesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_present_rectangles_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_next_image2_khr: unsafe {
+ unsafe extern "system" fn acquire_next_image2_khr(
+ _device: Device,
+ _p_acquire_info: *const AcquireNextImageInfoKHR,
+ _p_image_index: *mut u32,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_next_image2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireNextImage2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_next_image2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_device_group'"]
+impl DependencyFlags {
+ pub const DEVICE_GROUP_KHR: Self = Self::DEVICE_GROUP;
+}
+#[doc = "Generated from 'VK_KHR_device_group'"]
+impl ImageCreateFlags {
+ pub const SPLIT_INSTANCE_BIND_REGIONS_KHR: Self = Self::SPLIT_INSTANCE_BIND_REGIONS;
+}
+#[doc = "Generated from 'VK_KHR_device_group'"]
+impl MemoryAllocateFlags {
+ pub const DEVICE_MASK_KHR: Self = Self::DEVICE_MASK;
+}
+#[doc = "Generated from 'VK_KHR_device_group'"]
+impl PeerMemoryFeatureFlags {
+ pub const COPY_SRC_KHR: Self = Self::COPY_SRC;
+ pub const COPY_DST_KHR: Self = Self::COPY_DST;
+ pub const GENERIC_SRC_KHR: Self = Self::GENERIC_SRC;
+ pub const GENERIC_DST_KHR: Self = Self::GENERIC_DST;
+}
+#[doc = "Generated from 'VK_KHR_device_group'"]
+impl PipelineCreateFlags {
+ pub const VIEW_INDEX_FROM_DEVICE_INDEX_KHR: Self = Self::VIEW_INDEX_FROM_DEVICE_INDEX;
+}
+#[doc = "Generated from 'VK_KHR_device_group'"]
+impl StructureType {
+ pub const MEMORY_ALLOCATE_FLAGS_INFO_KHR: Self = Self::MEMORY_ALLOCATE_FLAGS_INFO;
+ pub const DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR: Self =
+ Self::DEVICE_GROUP_RENDER_PASS_BEGIN_INFO;
+ pub const DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR: Self =
+ Self::DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO;
+ pub const DEVICE_GROUP_SUBMIT_INFO_KHR: Self = Self::DEVICE_GROUP_SUBMIT_INFO;
+ pub const DEVICE_GROUP_BIND_SPARSE_INFO_KHR: Self = Self::DEVICE_GROUP_BIND_SPARSE_INFO;
+ pub const BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR: Self =
+ Self::BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO;
+ pub const BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR: Self =
+ Self::BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO;
+}
+impl ExtValidationFlagsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_validation_flags\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtValidationFlagsFn {}
+unsafe impl Send for ExtValidationFlagsFn {}
+unsafe impl Sync for ExtValidationFlagsFn {}
+impl ExtValidationFlagsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_validation_flags'"]
+impl StructureType {
+ pub const VALIDATION_FLAGS_EXT: Self = Self(1_000_061_000);
+}
+impl NnViSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NN_vi_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateViSurfaceNN = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const ViSurfaceCreateInfoNN,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct NnViSurfaceFn {
+ pub create_vi_surface_nn: PFN_vkCreateViSurfaceNN,
+}
+unsafe impl Send for NnViSurfaceFn {}
+unsafe impl Sync for NnViSurfaceFn {}
+impl NnViSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_vi_surface_nn: unsafe {
+ unsafe extern "system" fn create_vi_surface_nn(
+ _instance: Instance,
+ _p_create_info: *const ViSurfaceCreateInfoNN,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_vi_surface_nn)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateViSurfaceNN\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_vi_surface_nn
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NN_vi_surface'"]
+impl StructureType {
+ pub const VI_SURFACE_CREATE_INFO_NN: Self = Self(1_000_062_000);
+}
+impl KhrShaderDrawParametersFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_draw_parameters\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderDrawParametersFn {}
+unsafe impl Send for KhrShaderDrawParametersFn {}
+unsafe impl Sync for KhrShaderDrawParametersFn {}
+impl KhrShaderDrawParametersFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtShaderSubgroupBallotFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_subgroup_ballot\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderSubgroupBallotFn {}
+unsafe impl Send for ExtShaderSubgroupBallotFn {}
+unsafe impl Sync for ExtShaderSubgroupBallotFn {}
+impl ExtShaderSubgroupBallotFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtShaderSubgroupVoteFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_subgroup_vote\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderSubgroupVoteFn {}
+unsafe impl Send for ExtShaderSubgroupVoteFn {}
+unsafe impl Sync for ExtShaderSubgroupVoteFn {}
+impl ExtShaderSubgroupVoteFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtTextureCompressionAstcHdrFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_texture_compression_astc_hdr\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtTextureCompressionAstcHdrFn {}
+unsafe impl Send for ExtTextureCompressionAstcHdrFn {}
+unsafe impl Sync for ExtTextureCompressionAstcHdrFn {}
+impl ExtTextureCompressionAstcHdrFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_texture_compression_astc_hdr'"]
+impl Format {
+ pub const ASTC_4X4_SFLOAT_BLOCK_EXT: Self = Self::ASTC_4X4_SFLOAT_BLOCK;
+ pub const ASTC_5X4_SFLOAT_BLOCK_EXT: Self = Self::ASTC_5X4_SFLOAT_BLOCK;
+ pub const ASTC_5X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_5X5_SFLOAT_BLOCK;
+ pub const ASTC_6X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_6X5_SFLOAT_BLOCK;
+ pub const ASTC_6X6_SFLOAT_BLOCK_EXT: Self = Self::ASTC_6X6_SFLOAT_BLOCK;
+ pub const ASTC_8X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_8X5_SFLOAT_BLOCK;
+ pub const ASTC_8X6_SFLOAT_BLOCK_EXT: Self = Self::ASTC_8X6_SFLOAT_BLOCK;
+ pub const ASTC_8X8_SFLOAT_BLOCK_EXT: Self = Self::ASTC_8X8_SFLOAT_BLOCK;
+ pub const ASTC_10X5_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X5_SFLOAT_BLOCK;
+ pub const ASTC_10X6_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X6_SFLOAT_BLOCK;
+ pub const ASTC_10X8_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X8_SFLOAT_BLOCK;
+ pub const ASTC_10X10_SFLOAT_BLOCK_EXT: Self = Self::ASTC_10X10_SFLOAT_BLOCK;
+ pub const ASTC_12X10_SFLOAT_BLOCK_EXT: Self = Self::ASTC_12X10_SFLOAT_BLOCK;
+ pub const ASTC_12X12_SFLOAT_BLOCK_EXT: Self = Self::ASTC_12X12_SFLOAT_BLOCK;
+}
+#[doc = "Generated from 'VK_EXT_texture_compression_astc_hdr'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES;
+}
+impl ExtAstcDecodeModeFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_astc_decode_mode\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtAstcDecodeModeFn {}
+unsafe impl Send for ExtAstcDecodeModeFn {}
+unsafe impl Sync for ExtAstcDecodeModeFn {}
+impl ExtAstcDecodeModeFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_astc_decode_mode'"]
+impl StructureType {
+ pub const IMAGE_VIEW_ASTC_DECODE_MODE_EXT: Self = Self(1_000_067_000);
+ pub const PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT: Self = Self(1_000_067_001);
+}
+impl ExtPipelineRobustnessFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_robustness\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPipelineRobustnessFn {}
+unsafe impl Send for ExtPipelineRobustnessFn {}
+unsafe impl Sync for ExtPipelineRobustnessFn {}
+impl ExtPipelineRobustnessFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_pipeline_robustness'"]
+impl StructureType {
+ pub const PIPELINE_ROBUSTNESS_CREATE_INFO_EXT: Self = Self(1_000_068_000);
+ pub const PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT: Self = Self(1_000_068_001);
+ pub const PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT: Self = Self(1_000_068_002);
+}
+impl KhrMaintenance1Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance1\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkTrimCommandPool = unsafe extern "system" fn(
+ device: Device,
+ command_pool: CommandPool,
+ flags: CommandPoolTrimFlags,
+);
+#[derive(Clone)]
+pub struct KhrMaintenance1Fn {
+ pub trim_command_pool_khr: PFN_vkTrimCommandPool,
+}
+unsafe impl Send for KhrMaintenance1Fn {}
+unsafe impl Sync for KhrMaintenance1Fn {}
+impl KhrMaintenance1Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ trim_command_pool_khr: unsafe {
+ unsafe extern "system" fn trim_command_pool_khr(
+ _device: Device,
+ _command_pool: CommandPool,
+ _flags: CommandPoolTrimFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(trim_command_pool_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkTrimCommandPoolKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ trim_command_pool_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_maintenance1'"]
+impl FormatFeatureFlags {
+ pub const TRANSFER_SRC_KHR: Self = Self::TRANSFER_SRC;
+ pub const TRANSFER_DST_KHR: Self = Self::TRANSFER_DST;
+}
+#[doc = "Generated from 'VK_KHR_maintenance1'"]
+impl ImageCreateFlags {
+ pub const TYPE_2D_ARRAY_COMPATIBLE_KHR: Self = Self::TYPE_2D_ARRAY_COMPATIBLE;
+}
+#[doc = "Generated from 'VK_KHR_maintenance1'"]
+impl Result {
+ pub const ERROR_OUT_OF_POOL_MEMORY_KHR: Self = Self::ERROR_OUT_OF_POOL_MEMORY;
+}
+impl KhrDeviceGroupCreationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_device_group_creation\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumeratePhysicalDeviceGroups = unsafe extern "system" fn(
+ instance: Instance,
+ p_physical_device_group_count: *mut u32,
+ p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrDeviceGroupCreationFn {
+ pub enumerate_physical_device_groups_khr: PFN_vkEnumeratePhysicalDeviceGroups,
+}
+unsafe impl Send for KhrDeviceGroupCreationFn {}
+unsafe impl Sync for KhrDeviceGroupCreationFn {}
+impl KhrDeviceGroupCreationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ enumerate_physical_device_groups_khr: unsafe {
+ unsafe extern "system" fn enumerate_physical_device_groups_khr(
+ _instance: Instance,
+ _p_physical_device_group_count: *mut u32,
+ _p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_physical_device_groups_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumeratePhysicalDeviceGroupsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_physical_device_groups_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_device_group_creation'"]
+impl MemoryHeapFlags {
+ pub const MULTI_INSTANCE_KHR: Self = Self::MULTI_INSTANCE;
+}
+#[doc = "Generated from 'VK_KHR_device_group_creation'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_GROUP_PROPERTIES;
+ pub const DEVICE_GROUP_DEVICE_CREATE_INFO_KHR: Self = Self::DEVICE_GROUP_DEVICE_CREATE_INFO;
+}
+impl KhrExternalMemoryCapabilitiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_external_memory_capabilities\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceExternalBufferProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
+ p_external_buffer_properties: *mut ExternalBufferProperties,
+);
+#[derive(Clone)]
+pub struct KhrExternalMemoryCapabilitiesFn {
+ pub get_physical_device_external_buffer_properties_khr:
+ PFN_vkGetPhysicalDeviceExternalBufferProperties,
+}
+unsafe impl Send for KhrExternalMemoryCapabilitiesFn {}
+unsafe impl Sync for KhrExternalMemoryCapabilitiesFn {}
+impl KhrExternalMemoryCapabilitiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_external_buffer_properties_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_external_buffer_properties_khr(
+ _physical_device: PhysicalDevice,
+ _p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
+ _p_external_buffer_properties: *mut ExternalBufferProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_buffer_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalBufferPropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_buffer_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_memory_capabilities'"]
+impl ExternalMemoryFeatureFlags {
+ pub const DEDICATED_ONLY_KHR: Self = Self::DEDICATED_ONLY;
+ pub const EXPORTABLE_KHR: Self = Self::EXPORTABLE;
+ pub const IMPORTABLE_KHR: Self = Self::IMPORTABLE;
+}
+#[doc = "Generated from 'VK_KHR_external_memory_capabilities'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const OPAQUE_FD_KHR: Self = Self::OPAQUE_FD;
+ pub const OPAQUE_WIN32_KHR: Self = Self::OPAQUE_WIN32;
+ pub const OPAQUE_WIN32_KMT_KHR: Self = Self::OPAQUE_WIN32_KMT;
+ pub const D3D11_TEXTURE_KHR: Self = Self::D3D11_TEXTURE;
+ pub const D3D11_TEXTURE_KMT_KHR: Self = Self::D3D11_TEXTURE_KMT;
+ pub const D3D12_HEAP_KHR: Self = Self::D3D12_HEAP;
+ pub const D3D12_RESOURCE_KHR: Self = Self::D3D12_RESOURCE;
+}
+#[doc = "Generated from 'VK_KHR_external_memory_capabilities'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR: Self =
+ Self::PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO;
+ pub const EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR: Self = Self::EXTERNAL_IMAGE_FORMAT_PROPERTIES;
+ pub const PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR: Self =
+ Self::PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO;
+ pub const EXTERNAL_BUFFER_PROPERTIES_KHR: Self = Self::EXTERNAL_BUFFER_PROPERTIES;
+ pub const PHYSICAL_DEVICE_ID_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_ID_PROPERTIES;
+}
+impl KhrExternalMemoryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrExternalMemoryFn {}
+unsafe impl Send for KhrExternalMemoryFn {}
+unsafe impl Sync for KhrExternalMemoryFn {}
+impl KhrExternalMemoryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_memory'"]
+impl Result {
+ pub const ERROR_INVALID_EXTERNAL_HANDLE_KHR: Self = Self::ERROR_INVALID_EXTERNAL_HANDLE;
+}
+#[doc = "Generated from 'VK_KHR_external_memory'"]
+impl StructureType {
+ pub const EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR: Self =
+ Self::EXTERNAL_MEMORY_BUFFER_CREATE_INFO;
+ pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR: Self = Self::EXTERNAL_MEMORY_IMAGE_CREATE_INFO;
+ pub const EXPORT_MEMORY_ALLOCATE_INFO_KHR: Self = Self::EXPORT_MEMORY_ALLOCATE_INFO;
+}
+impl KhrExternalMemoryWin32Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory_win32\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryWin32HandleKHR = unsafe extern "system" fn(
+ device: Device,
+ p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR,
+ p_handle: *mut HANDLE,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryWin32HandlePropertiesKHR = unsafe extern "system" fn(
+ device: Device,
+ handle_type: ExternalMemoryHandleTypeFlags,
+ handle: HANDLE,
+ p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrExternalMemoryWin32Fn {
+ pub get_memory_win32_handle_khr: PFN_vkGetMemoryWin32HandleKHR,
+ pub get_memory_win32_handle_properties_khr: PFN_vkGetMemoryWin32HandlePropertiesKHR,
+}
+unsafe impl Send for KhrExternalMemoryWin32Fn {}
+unsafe impl Sync for KhrExternalMemoryWin32Fn {}
+impl KhrExternalMemoryWin32Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_memory_win32_handle_khr: unsafe {
+ unsafe extern "system" fn get_memory_win32_handle_khr(
+ _device: Device,
+ _p_get_win32_handle_info: *const MemoryGetWin32HandleInfoKHR,
+ _p_handle: *mut HANDLE,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_win32_handle_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryWin32HandleKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_win32_handle_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_memory_win32_handle_properties_khr: unsafe {
+ unsafe extern "system" fn get_memory_win32_handle_properties_khr(
+ _device: Device,
+ _handle_type: ExternalMemoryHandleTypeFlags,
+ _handle: HANDLE,
+ _p_memory_win32_handle_properties: *mut MemoryWin32HandlePropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_win32_handle_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryWin32HandlePropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_win32_handle_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_memory_win32'"]
+impl StructureType {
+ pub const IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_073_000);
+ pub const EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_073_001);
+ pub const MEMORY_WIN32_HANDLE_PROPERTIES_KHR: Self = Self(1_000_073_002);
+ pub const MEMORY_GET_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_073_003);
+}
+impl KhrExternalMemoryFdFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_memory_fd\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryFdKHR = unsafe extern "system" fn(
+ device: Device,
+ p_get_fd_info: *const MemoryGetFdInfoKHR,
+ p_fd: *mut c_int,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryFdPropertiesKHR = unsafe extern "system" fn(
+ device: Device,
+ handle_type: ExternalMemoryHandleTypeFlags,
+ fd: c_int,
+ p_memory_fd_properties: *mut MemoryFdPropertiesKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrExternalMemoryFdFn {
+ pub get_memory_fd_khr: PFN_vkGetMemoryFdKHR,
+ pub get_memory_fd_properties_khr: PFN_vkGetMemoryFdPropertiesKHR,
+}
+unsafe impl Send for KhrExternalMemoryFdFn {}
+unsafe impl Sync for KhrExternalMemoryFdFn {}
+impl KhrExternalMemoryFdFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_memory_fd_khr: unsafe {
+ unsafe extern "system" fn get_memory_fd_khr(
+ _device: Device,
+ _p_get_fd_info: *const MemoryGetFdInfoKHR,
+ _p_fd: *mut c_int,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_memory_fd_khr)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetMemoryFdKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_fd_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_memory_fd_properties_khr: unsafe {
+ unsafe extern "system" fn get_memory_fd_properties_khr(
+ _device: Device,
+ _handle_type: ExternalMemoryHandleTypeFlags,
+ _fd: c_int,
+ _p_memory_fd_properties: *mut MemoryFdPropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_fd_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryFdPropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_fd_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_memory_fd'"]
+impl StructureType {
+ pub const IMPORT_MEMORY_FD_INFO_KHR: Self = Self(1_000_074_000);
+ pub const MEMORY_FD_PROPERTIES_KHR: Self = Self(1_000_074_001);
+ pub const MEMORY_GET_FD_INFO_KHR: Self = Self(1_000_074_002);
+}
+impl KhrWin32KeyedMutexFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_win32_keyed_mutex\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrWin32KeyedMutexFn {}
+unsafe impl Send for KhrWin32KeyedMutexFn {}
+unsafe impl Sync for KhrWin32KeyedMutexFn {}
+impl KhrWin32KeyedMutexFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_win32_keyed_mutex'"]
+impl StructureType {
+ pub const WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR: Self = Self(1_000_075_000);
+}
+impl KhrExternalSemaphoreCapabilitiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_external_semaphore_capabilities\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceExternalSemaphoreProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
+ p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
+);
+#[derive(Clone)]
+pub struct KhrExternalSemaphoreCapabilitiesFn {
+ pub get_physical_device_external_semaphore_properties_khr:
+ PFN_vkGetPhysicalDeviceExternalSemaphoreProperties,
+}
+unsafe impl Send for KhrExternalSemaphoreCapabilitiesFn {}
+unsafe impl Sync for KhrExternalSemaphoreCapabilitiesFn {}
+impl KhrExternalSemaphoreCapabilitiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_external_semaphore_properties_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_external_semaphore_properties_khr(
+ _physical_device: PhysicalDevice,
+ _p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
+ _p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_semaphore_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalSemaphorePropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_semaphore_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore_capabilities'"]
+impl ExternalSemaphoreFeatureFlags {
+ pub const EXPORTABLE_KHR: Self = Self::EXPORTABLE;
+ pub const IMPORTABLE_KHR: Self = Self::IMPORTABLE;
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore_capabilities'"]
+impl ExternalSemaphoreHandleTypeFlags {
+ pub const OPAQUE_FD_KHR: Self = Self::OPAQUE_FD;
+ pub const OPAQUE_WIN32_KHR: Self = Self::OPAQUE_WIN32;
+ pub const OPAQUE_WIN32_KMT_KHR: Self = Self::OPAQUE_WIN32_KMT;
+ pub const D3D12_FENCE_KHR: Self = Self::D3D12_FENCE;
+ pub const SYNC_FD_KHR: Self = Self::SYNC_FD;
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore_capabilities'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR: Self =
+ Self::PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO;
+ pub const EXTERNAL_SEMAPHORE_PROPERTIES_KHR: Self = Self::EXTERNAL_SEMAPHORE_PROPERTIES;
+}
+impl KhrExternalSemaphoreFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrExternalSemaphoreFn {}
+unsafe impl Send for KhrExternalSemaphoreFn {}
+unsafe impl Sync for KhrExternalSemaphoreFn {}
+impl KhrExternalSemaphoreFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore'"]
+impl SemaphoreImportFlags {
+ pub const TEMPORARY_KHR: Self = Self::TEMPORARY;
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore'"]
+impl StructureType {
+ pub const EXPORT_SEMAPHORE_CREATE_INFO_KHR: Self = Self::EXPORT_SEMAPHORE_CREATE_INFO;
+}
+impl KhrExternalSemaphoreWin32Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore_win32\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkImportSemaphoreWin32HandleKHR = unsafe extern "system" fn(
+ device: Device,
+ p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSemaphoreWin32HandleKHR = unsafe extern "system" fn(
+ device: Device,
+ p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR,
+ p_handle: *mut HANDLE,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrExternalSemaphoreWin32Fn {
+ pub import_semaphore_win32_handle_khr: PFN_vkImportSemaphoreWin32HandleKHR,
+ pub get_semaphore_win32_handle_khr: PFN_vkGetSemaphoreWin32HandleKHR,
+}
+unsafe impl Send for KhrExternalSemaphoreWin32Fn {}
+unsafe impl Sync for KhrExternalSemaphoreWin32Fn {}
+impl KhrExternalSemaphoreWin32Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ import_semaphore_win32_handle_khr: unsafe {
+ unsafe extern "system" fn import_semaphore_win32_handle_khr(
+ _device: Device,
+ _p_import_semaphore_win32_handle_info: *const ImportSemaphoreWin32HandleInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(import_semaphore_win32_handle_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkImportSemaphoreWin32HandleKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ import_semaphore_win32_handle_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_semaphore_win32_handle_khr: unsafe {
+ unsafe extern "system" fn get_semaphore_win32_handle_khr(
+ _device: Device,
+ _p_get_win32_handle_info: *const SemaphoreGetWin32HandleInfoKHR,
+ _p_handle: *mut HANDLE,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_semaphore_win32_handle_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSemaphoreWin32HandleKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_semaphore_win32_handle_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore_win32'"]
+impl StructureType {
+ pub const IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_078_000);
+ pub const EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_078_001);
+ pub const D3D12_FENCE_SUBMIT_INFO_KHR: Self = Self(1_000_078_002);
+ pub const SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_078_003);
+}
+impl KhrExternalSemaphoreFdFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_semaphore_fd\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkImportSemaphoreFdKHR = unsafe extern "system" fn(
+ device: Device,
+ p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSemaphoreFdKHR = unsafe extern "system" fn(
+ device: Device,
+ p_get_fd_info: *const SemaphoreGetFdInfoKHR,
+ p_fd: *mut c_int,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrExternalSemaphoreFdFn {
+ pub import_semaphore_fd_khr: PFN_vkImportSemaphoreFdKHR,
+ pub get_semaphore_fd_khr: PFN_vkGetSemaphoreFdKHR,
+}
+unsafe impl Send for KhrExternalSemaphoreFdFn {}
+unsafe impl Sync for KhrExternalSemaphoreFdFn {}
+impl KhrExternalSemaphoreFdFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ import_semaphore_fd_khr: unsafe {
+ unsafe extern "system" fn import_semaphore_fd_khr(
+ _device: Device,
+ _p_import_semaphore_fd_info: *const ImportSemaphoreFdInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(import_semaphore_fd_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkImportSemaphoreFdKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ import_semaphore_fd_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_semaphore_fd_khr: unsafe {
+ unsafe extern "system" fn get_semaphore_fd_khr(
+ _device: Device,
+ _p_get_fd_info: *const SemaphoreGetFdInfoKHR,
+ _p_fd: *mut c_int,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_semaphore_fd_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetSemaphoreFdKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_semaphore_fd_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_semaphore_fd'"]
+impl StructureType {
+ pub const IMPORT_SEMAPHORE_FD_INFO_KHR: Self = Self(1_000_079_000);
+ pub const SEMAPHORE_GET_FD_INFO_KHR: Self = Self(1_000_079_001);
+}
+impl KhrPushDescriptorFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_push_descriptor\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdPushDescriptorSetKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_bind_point: PipelineBindPoint,
+ layout: PipelineLayout,
+ set: u32,
+ descriptor_write_count: u32,
+ p_descriptor_writes: *const WriteDescriptorSet,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdPushDescriptorSetWithTemplateKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ descriptor_update_template: DescriptorUpdateTemplate,
+ layout: PipelineLayout,
+ set: u32,
+ p_data: *const c_void,
+);
+#[derive(Clone)]
+pub struct KhrPushDescriptorFn {
+ pub cmd_push_descriptor_set_khr: PFN_vkCmdPushDescriptorSetKHR,
+ pub cmd_push_descriptor_set_with_template_khr: PFN_vkCmdPushDescriptorSetWithTemplateKHR,
+}
+unsafe impl Send for KhrPushDescriptorFn {}
+unsafe impl Sync for KhrPushDescriptorFn {}
+impl KhrPushDescriptorFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_push_descriptor_set_khr: unsafe {
+ unsafe extern "system" fn cmd_push_descriptor_set_khr(
+ _command_buffer: CommandBuffer,
+ _pipeline_bind_point: PipelineBindPoint,
+ _layout: PipelineLayout,
+ _set: u32,
+ _descriptor_write_count: u32,
+ _p_descriptor_writes: *const WriteDescriptorSet,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_push_descriptor_set_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdPushDescriptorSetKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_push_descriptor_set_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_push_descriptor_set_with_template_khr: unsafe {
+ unsafe extern "system" fn cmd_push_descriptor_set_with_template_khr(
+ _command_buffer: CommandBuffer,
+ _descriptor_update_template: DescriptorUpdateTemplate,
+ _layout: PipelineLayout,
+ _set: u32,
+ _p_data: *const c_void,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_push_descriptor_set_with_template_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdPushDescriptorSetWithTemplateKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_push_descriptor_set_with_template_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_push_descriptor'"]
+impl DescriptorSetLayoutCreateFlags {
+ #[doc = "Descriptors are pushed via flink:vkCmdPushDescriptorSetKHR"]
+ pub const PUSH_DESCRIPTOR_KHR: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_KHR_push_descriptor'"]
+impl DescriptorUpdateTemplateType {
+ #[doc = "Create descriptor update template for pushed descriptor updates"]
+ pub const PUSH_DESCRIPTORS_KHR: Self = Self(1);
+}
+#[doc = "Generated from 'VK_KHR_push_descriptor'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: Self = Self(1_000_080_000);
+}
+impl ExtConditionalRenderingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_conditional_rendering\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginConditionalRenderingEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndConditionalRenderingEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer);
+#[derive(Clone)]
+pub struct ExtConditionalRenderingFn {
+ pub cmd_begin_conditional_rendering_ext: PFN_vkCmdBeginConditionalRenderingEXT,
+ pub cmd_end_conditional_rendering_ext: PFN_vkCmdEndConditionalRenderingEXT,
+}
+unsafe impl Send for ExtConditionalRenderingFn {}
+unsafe impl Sync for ExtConditionalRenderingFn {}
+impl ExtConditionalRenderingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_begin_conditional_rendering_ext: unsafe {
+ unsafe extern "system" fn cmd_begin_conditional_rendering_ext(
+ _command_buffer: CommandBuffer,
+ _p_conditional_rendering_begin: *const ConditionalRenderingBeginInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_conditional_rendering_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBeginConditionalRenderingEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_conditional_rendering_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_conditional_rendering_ext: unsafe {
+ unsafe extern "system" fn cmd_end_conditional_rendering_ext(
+ _command_buffer: CommandBuffer,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_conditional_rendering_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdEndConditionalRenderingEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_conditional_rendering_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_conditional_rendering'"]
+impl AccessFlags {
+ #[doc = "read access flag for reading conditional rendering predicate"]
+ pub const CONDITIONAL_RENDERING_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_conditional_rendering'"]
+impl BufferUsageFlags {
+ #[doc = "Specifies the buffer can be used as predicate in conditional rendering"]
+ pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b10_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_conditional_rendering'"]
+impl PipelineStageFlags {
+ #[doc = "A pipeline stage for conditional rendering predicate fetch"]
+ pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b100_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_conditional_rendering'"]
+impl StructureType {
+ pub const COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT: Self = Self(1_000_081_000);
+ pub const PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: Self = Self(1_000_081_001);
+ pub const CONDITIONAL_RENDERING_BEGIN_INFO_EXT: Self = Self(1_000_081_002);
+}
+impl KhrShaderFloat16Int8Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_float16_int8\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderFloat16Int8Fn {}
+unsafe impl Send for KhrShaderFloat16Int8Fn {}
+unsafe impl Sync for KhrShaderFloat16Int8Fn {}
+impl KhrShaderFloat16Int8Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_float16_int8'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
+ pub const PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
+}
+impl Khr16bitStorageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_16bit_storage\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct Khr16bitStorageFn {}
+unsafe impl Send for Khr16bitStorageFn {}
+unsafe impl Sync for Khr16bitStorageFn {}
+impl Khr16bitStorageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_16bit_storage'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
+}
+impl KhrIncrementalPresentFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_incremental_present\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct KhrIncrementalPresentFn {}
+unsafe impl Send for KhrIncrementalPresentFn {}
+unsafe impl Sync for KhrIncrementalPresentFn {}
+impl KhrIncrementalPresentFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_incremental_present'"]
+impl StructureType {
+ pub const PRESENT_REGIONS_KHR: Self = Self(1_000_084_000);
+}
+impl KhrDescriptorUpdateTemplateFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_descriptor_update_template\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDescriptorUpdateTemplate = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const DescriptorUpdateTemplateCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_descriptor_update_template: *mut DescriptorUpdateTemplate,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDescriptorUpdateTemplate = unsafe extern "system" fn(
+ device: Device,
+ descriptor_update_template: DescriptorUpdateTemplate,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkUpdateDescriptorSetWithTemplate = unsafe extern "system" fn(
+ device: Device,
+ descriptor_set: DescriptorSet,
+ descriptor_update_template: DescriptorUpdateTemplate,
+ p_data: *const c_void,
+);
+#[derive(Clone)]
+pub struct KhrDescriptorUpdateTemplateFn {
+ pub create_descriptor_update_template_khr: PFN_vkCreateDescriptorUpdateTemplate,
+ pub destroy_descriptor_update_template_khr: PFN_vkDestroyDescriptorUpdateTemplate,
+ pub update_descriptor_set_with_template_khr: PFN_vkUpdateDescriptorSetWithTemplate,
+ pub cmd_push_descriptor_set_with_template_khr:
+ crate::vk::PFN_vkCmdPushDescriptorSetWithTemplateKHR,
+}
+unsafe impl Send for KhrDescriptorUpdateTemplateFn {}
+unsafe impl Sync for KhrDescriptorUpdateTemplateFn {}
+impl KhrDescriptorUpdateTemplateFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_descriptor_update_template_khr: unsafe {
+ unsafe extern "system" fn create_descriptor_update_template_khr(
+ _device: Device,
+ _p_create_info: *const DescriptorUpdateTemplateCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_descriptor_update_template: *mut DescriptorUpdateTemplate,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_descriptor_update_template_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDescriptorUpdateTemplateKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_descriptor_update_template_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_descriptor_update_template_khr: unsafe {
+ unsafe extern "system" fn destroy_descriptor_update_template_khr(
+ _device: Device,
+ _descriptor_update_template: DescriptorUpdateTemplate,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_descriptor_update_template_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyDescriptorUpdateTemplateKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_descriptor_update_template_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ update_descriptor_set_with_template_khr: unsafe {
+ unsafe extern "system" fn update_descriptor_set_with_template_khr(
+ _device: Device,
+ _descriptor_set: DescriptorSet,
+ _descriptor_update_template: DescriptorUpdateTemplate,
+ _p_data: *const c_void,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(update_descriptor_set_with_template_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkUpdateDescriptorSetWithTemplateKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ update_descriptor_set_with_template_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_push_descriptor_set_with_template_khr: unsafe {
+ unsafe extern "system" fn cmd_push_descriptor_set_with_template_khr(
+ _command_buffer: CommandBuffer,
+ _descriptor_update_template: DescriptorUpdateTemplate,
+ _layout: PipelineLayout,
+ _set: u32,
+ _p_data: *const c_void,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_push_descriptor_set_with_template_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdPushDescriptorSetWithTemplateKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_push_descriptor_set_with_template_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_descriptor_update_template'"]
+impl DebugReportObjectTypeEXT {
+ pub const DESCRIPTOR_UPDATE_TEMPLATE_KHR: Self = Self::DESCRIPTOR_UPDATE_TEMPLATE;
+}
+#[doc = "Generated from 'VK_KHR_descriptor_update_template'"]
+impl DescriptorUpdateTemplateType {
+ pub const DESCRIPTOR_SET_KHR: Self = Self::DESCRIPTOR_SET;
+}
+#[doc = "Generated from 'VK_KHR_descriptor_update_template'"]
+impl ObjectType {
+ pub const DESCRIPTOR_UPDATE_TEMPLATE_KHR: Self = Self::DESCRIPTOR_UPDATE_TEMPLATE;
+}
+#[doc = "Generated from 'VK_KHR_descriptor_update_template'"]
+impl StructureType {
+ pub const DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR: Self =
+ Self::DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
+}
+impl NvxDeviceGeneratedCommandsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NVX_device_generated_commands\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct NvxDeviceGeneratedCommandsFn {}
+unsafe impl Send for NvxDeviceGeneratedCommandsFn {}
+unsafe impl Sync for NvxDeviceGeneratedCommandsFn {}
+impl NvxDeviceGeneratedCommandsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvClipSpaceWScalingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_clip_space_w_scaling\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetViewportWScalingNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_viewport: u32,
+ viewport_count: u32,
+ p_viewport_w_scalings: *const ViewportWScalingNV,
+);
+#[derive(Clone)]
+pub struct NvClipSpaceWScalingFn {
+ pub cmd_set_viewport_w_scaling_nv: PFN_vkCmdSetViewportWScalingNV,
+}
+unsafe impl Send for NvClipSpaceWScalingFn {}
+unsafe impl Sync for NvClipSpaceWScalingFn {}
+impl NvClipSpaceWScalingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_viewport_w_scaling_nv: unsafe {
+ unsafe extern "system" fn cmd_set_viewport_w_scaling_nv(
+ _command_buffer: CommandBuffer,
+ _first_viewport: u32,
+ _viewport_count: u32,
+ _p_viewport_w_scalings: *const ViewportWScalingNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_viewport_w_scaling_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetViewportWScalingNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport_w_scaling_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_clip_space_w_scaling'"]
+impl DynamicState {
+ pub const VIEWPORT_W_SCALING_NV: Self = Self(1_000_087_000);
+}
+#[doc = "Generated from 'VK_NV_clip_space_w_scaling'"]
+impl StructureType {
+ pub const PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV: Self = Self(1_000_087_000);
+}
+impl ExtDirectModeDisplayFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_direct_mode_display\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkReleaseDisplayEXT =
+ unsafe extern "system" fn(physical_device: PhysicalDevice, display: DisplayKHR) -> Result;
+#[derive(Clone)]
+pub struct ExtDirectModeDisplayFn {
+ pub release_display_ext: PFN_vkReleaseDisplayEXT,
+}
+unsafe impl Send for ExtDirectModeDisplayFn {}
+unsafe impl Sync for ExtDirectModeDisplayFn {}
+impl ExtDirectModeDisplayFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ release_display_ext: unsafe {
+ unsafe extern "system" fn release_display_ext(
+ _physical_device: PhysicalDevice,
+ _display: DisplayKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(release_display_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkReleaseDisplayEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ release_display_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl ExtAcquireXlibDisplayFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_acquire_xlib_display\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireXlibDisplayEXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ dpy: *mut Display,
+ display: DisplayKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetRandROutputDisplayEXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ dpy: *mut Display,
+ rr_output: RROutput,
+ p_display: *mut DisplayKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtAcquireXlibDisplayFn {
+ pub acquire_xlib_display_ext: PFN_vkAcquireXlibDisplayEXT,
+ pub get_rand_r_output_display_ext: PFN_vkGetRandROutputDisplayEXT,
+}
+unsafe impl Send for ExtAcquireXlibDisplayFn {}
+unsafe impl Sync for ExtAcquireXlibDisplayFn {}
+impl ExtAcquireXlibDisplayFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ acquire_xlib_display_ext: unsafe {
+ unsafe extern "system" fn acquire_xlib_display_ext(
+ _physical_device: PhysicalDevice,
+ _dpy: *mut Display,
+ _display: DisplayKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_xlib_display_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireXlibDisplayEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_xlib_display_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_rand_r_output_display_ext: unsafe {
+ unsafe extern "system" fn get_rand_r_output_display_ext(
+ _physical_device: PhysicalDevice,
+ _dpy: *mut Display,
+ _rr_output: RROutput,
+ _p_display: *mut DisplayKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_rand_r_output_display_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRandROutputDisplayEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_rand_r_output_display_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl ExtDisplaySurfaceCounterFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_display_surface_counter\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ surface: SurfaceKHR,
+ p_surface_capabilities: *mut SurfaceCapabilities2EXT,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtDisplaySurfaceCounterFn {
+ pub get_physical_device_surface_capabilities2_ext:
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT,
+}
+unsafe impl Send for ExtDisplaySurfaceCounterFn {}
+unsafe impl Sync for ExtDisplaySurfaceCounterFn {}
+impl ExtDisplaySurfaceCounterFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_surface_capabilities2_ext: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_capabilities2_ext(
+ _physical_device: PhysicalDevice,
+ _surface: SurfaceKHR,
+ _p_surface_capabilities: *mut SurfaceCapabilities2EXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_capabilities2_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfaceCapabilities2EXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_capabilities2_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_display_surface_counter'"]
+impl StructureType {
+ pub const SURFACE_CAPABILITIES_2_EXT: Self = Self(1_000_090_000);
+}
+impl ExtDisplayControlFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_display_control\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkDisplayPowerControlEXT = unsafe extern "system" fn(
+ device: Device,
+ display: DisplayKHR,
+ p_display_power_info: *const DisplayPowerInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkRegisterDeviceEventEXT = unsafe extern "system" fn(
+ device: Device,
+ p_device_event_info: *const DeviceEventInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_fence: *mut Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkRegisterDisplayEventEXT = unsafe extern "system" fn(
+ device: Device,
+ display: DisplayKHR,
+ p_display_event_info: *const DisplayEventInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_fence: *mut Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSwapchainCounterEXT = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ counter: SurfaceCounterFlagsEXT,
+ p_counter_value: *mut u64,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtDisplayControlFn {
+ pub display_power_control_ext: PFN_vkDisplayPowerControlEXT,
+ pub register_device_event_ext: PFN_vkRegisterDeviceEventEXT,
+ pub register_display_event_ext: PFN_vkRegisterDisplayEventEXT,
+ pub get_swapchain_counter_ext: PFN_vkGetSwapchainCounterEXT,
+}
+unsafe impl Send for ExtDisplayControlFn {}
+unsafe impl Sync for ExtDisplayControlFn {}
+impl ExtDisplayControlFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ display_power_control_ext: unsafe {
+ unsafe extern "system" fn display_power_control_ext(
+ _device: Device,
+ _display: DisplayKHR,
+ _p_display_power_info: *const DisplayPowerInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(display_power_control_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDisplayPowerControlEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ display_power_control_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ register_device_event_ext: unsafe {
+ unsafe extern "system" fn register_device_event_ext(
+ _device: Device,
+ _p_device_event_info: *const DeviceEventInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_fence: *mut Fence,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(register_device_event_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkRegisterDeviceEventEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ register_device_event_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ register_display_event_ext: unsafe {
+ unsafe extern "system" fn register_display_event_ext(
+ _device: Device,
+ _display: DisplayKHR,
+ _p_display_event_info: *const DisplayEventInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_fence: *mut Fence,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(register_display_event_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkRegisterDisplayEventEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ register_display_event_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_swapchain_counter_ext: unsafe {
+ unsafe extern "system" fn get_swapchain_counter_ext(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _counter: SurfaceCounterFlagsEXT,
+ _p_counter_value: *mut u64,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_swapchain_counter_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetSwapchainCounterEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_swapchain_counter_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_display_control'"]
+impl StructureType {
+ pub const DISPLAY_POWER_INFO_EXT: Self = Self(1_000_091_000);
+ pub const DEVICE_EVENT_INFO_EXT: Self = Self(1_000_091_001);
+ pub const DISPLAY_EVENT_INFO_EXT: Self = Self(1_000_091_002);
+ pub const SWAPCHAIN_COUNTER_CREATE_INFO_EXT: Self = Self(1_000_091_003);
+}
+impl GoogleDisplayTimingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_display_timing\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetRefreshCycleDurationGOOGLE = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPastPresentationTimingGOOGLE = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ p_presentation_timing_count: *mut u32,
+ p_presentation_timings: *mut PastPresentationTimingGOOGLE,
+) -> Result;
+#[derive(Clone)]
+pub struct GoogleDisplayTimingFn {
+ pub get_refresh_cycle_duration_google: PFN_vkGetRefreshCycleDurationGOOGLE,
+ pub get_past_presentation_timing_google: PFN_vkGetPastPresentationTimingGOOGLE,
+}
+unsafe impl Send for GoogleDisplayTimingFn {}
+unsafe impl Sync for GoogleDisplayTimingFn {}
+impl GoogleDisplayTimingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_refresh_cycle_duration_google: unsafe {
+ unsafe extern "system" fn get_refresh_cycle_duration_google(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _p_display_timing_properties: *mut RefreshCycleDurationGOOGLE,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_refresh_cycle_duration_google)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRefreshCycleDurationGOOGLE\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_refresh_cycle_duration_google
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_past_presentation_timing_google: unsafe {
+ unsafe extern "system" fn get_past_presentation_timing_google(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _p_presentation_timing_count: *mut u32,
+ _p_presentation_timings: *mut PastPresentationTimingGOOGLE,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_past_presentation_timing_google)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPastPresentationTimingGOOGLE\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_past_presentation_timing_google
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_GOOGLE_display_timing'"]
+impl StructureType {
+ pub const PRESENT_TIMES_INFO_GOOGLE: Self = Self(1_000_092_000);
+}
+impl NvSampleMaskOverrideCoverageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_NV_sample_mask_override_coverage\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvSampleMaskOverrideCoverageFn {}
+unsafe impl Send for NvSampleMaskOverrideCoverageFn {}
+unsafe impl Sync for NvSampleMaskOverrideCoverageFn {}
+impl NvSampleMaskOverrideCoverageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvGeometryShaderPassthroughFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_geometry_shader_passthrough\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvGeometryShaderPassthroughFn {}
+unsafe impl Send for NvGeometryShaderPassthroughFn {}
+unsafe impl Sync for NvGeometryShaderPassthroughFn {}
+impl NvGeometryShaderPassthroughFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvViewportArray2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_viewport_array2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvViewportArray2Fn {}
+unsafe impl Send for NvViewportArray2Fn {}
+unsafe impl Sync for NvViewportArray2Fn {}
+impl NvViewportArray2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvxMultiviewPerViewAttributesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_NVX_multiview_per_view_attributes\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvxMultiviewPerViewAttributesFn {}
+unsafe impl Send for NvxMultiviewPerViewAttributesFn {}
+unsafe impl Sync for NvxMultiviewPerViewAttributesFn {}
+impl NvxMultiviewPerViewAttributesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NVX_multiview_per_view_attributes'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX: Self =
+ Self(1_000_097_000);
+}
+#[doc = "Generated from 'VK_NVX_multiview_per_view_attributes'"]
+impl SubpassDescriptionFlags {
+ pub const PER_VIEW_ATTRIBUTES_NVX: Self = Self(0b1);
+ pub const PER_VIEW_POSITION_X_ONLY_NVX: Self = Self(0b10);
+}
+impl NvViewportSwizzleFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_viewport_swizzle\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvViewportSwizzleFn {}
+unsafe impl Send for NvViewportSwizzleFn {}
+unsafe impl Sync for NvViewportSwizzleFn {}
+impl NvViewportSwizzleFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_viewport_swizzle'"]
+impl StructureType {
+ pub const PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV: Self = Self(1_000_098_000);
+}
+impl ExtDiscardRectanglesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_discard_rectangles\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDiscardRectangleEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_discard_rectangle: u32,
+ discard_rectangle_count: u32,
+ p_discard_rectangles: *const Rect2D,
+);
+#[derive(Clone)]
+pub struct ExtDiscardRectanglesFn {
+ pub cmd_set_discard_rectangle_ext: PFN_vkCmdSetDiscardRectangleEXT,
+}
+unsafe impl Send for ExtDiscardRectanglesFn {}
+unsafe impl Sync for ExtDiscardRectanglesFn {}
+impl ExtDiscardRectanglesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_discard_rectangle_ext: unsafe {
+ unsafe extern "system" fn cmd_set_discard_rectangle_ext(
+ _command_buffer: CommandBuffer,
+ _first_discard_rectangle: u32,
+ _discard_rectangle_count: u32,
+ _p_discard_rectangles: *const Rect2D,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_discard_rectangle_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDiscardRectangleEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_discard_rectangle_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_discard_rectangles'"]
+impl DynamicState {
+ pub const DISCARD_RECTANGLE_EXT: Self = Self(1_000_099_000);
+}
+#[doc = "Generated from 'VK_EXT_discard_rectangles'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT: Self = Self(1_000_099_000);
+ pub const PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT: Self = Self(1_000_099_001);
+}
+impl NvExtension101Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_101\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension101Fn {}
+unsafe impl Send for NvExtension101Fn {}
+unsafe impl Sync for NvExtension101Fn {}
+impl NvExtension101Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtConservativeRasterizationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_conservative_rasterization\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtConservativeRasterizationFn {}
+unsafe impl Send for ExtConservativeRasterizationFn {}
+unsafe impl Sync for ExtConservativeRasterizationFn {}
+impl ExtConservativeRasterizationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_conservative_rasterization'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT: Self = Self(1_000_101_000);
+ pub const PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT: Self = Self(1_000_101_001);
+}
+impl ExtDepthClipEnableFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_clip_enable\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtDepthClipEnableFn {}
+unsafe impl Send for ExtDepthClipEnableFn {}
+unsafe impl Sync for ExtDepthClipEnableFn {}
+impl ExtDepthClipEnableFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_depth_clip_enable'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: Self = Self(1_000_102_000);
+ pub const PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT: Self = Self(1_000_102_001);
+}
+impl NvExtension104Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_104\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension104Fn {}
+unsafe impl Send for NvExtension104Fn {}
+unsafe impl Sync for NvExtension104Fn {}
+impl NvExtension104Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtSwapchainColorspaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_swapchain_colorspace\0") }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[derive(Clone)]
+pub struct ExtSwapchainColorspaceFn {}
+unsafe impl Send for ExtSwapchainColorspaceFn {}
+unsafe impl Sync for ExtSwapchainColorspaceFn {}
+impl ExtSwapchainColorspaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_swapchain_colorspace'"]
+impl ColorSpaceKHR {
+ pub const DISPLAY_P3_NONLINEAR_EXT: Self = Self(1_000_104_001);
+ pub const EXTENDED_SRGB_LINEAR_EXT: Self = Self(1_000_104_002);
+ pub const DISPLAY_P3_LINEAR_EXT: Self = Self(1_000_104_003);
+ pub const DCI_P3_NONLINEAR_EXT: Self = Self(1_000_104_004);
+ pub const BT709_LINEAR_EXT: Self = Self(1_000_104_005);
+ pub const BT709_NONLINEAR_EXT: Self = Self(1_000_104_006);
+ pub const BT2020_LINEAR_EXT: Self = Self(1_000_104_007);
+ pub const HDR10_ST2084_EXT: Self = Self(1_000_104_008);
+ pub const DOLBYVISION_EXT: Self = Self(1_000_104_009);
+ pub const HDR10_HLG_EXT: Self = Self(1_000_104_010);
+ pub const ADOBERGB_LINEAR_EXT: Self = Self(1_000_104_011);
+ pub const ADOBERGB_NONLINEAR_EXT: Self = Self(1_000_104_012);
+ pub const PASS_THROUGH_EXT: Self = Self(1_000_104_013);
+ pub const EXTENDED_SRGB_NONLINEAR_EXT: Self = Self(1_000_104_014);
+}
+impl ExtHdrMetadataFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_hdr_metadata\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetHdrMetadataEXT = unsafe extern "system" fn(
+ device: Device,
+ swapchain_count: u32,
+ p_swapchains: *const SwapchainKHR,
+ p_metadata: *const HdrMetadataEXT,
+);
+#[derive(Clone)]
+pub struct ExtHdrMetadataFn {
+ pub set_hdr_metadata_ext: PFN_vkSetHdrMetadataEXT,
+}
+unsafe impl Send for ExtHdrMetadataFn {}
+unsafe impl Sync for ExtHdrMetadataFn {}
+impl ExtHdrMetadataFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ set_hdr_metadata_ext: unsafe {
+ unsafe extern "system" fn set_hdr_metadata_ext(
+ _device: Device,
+ _swapchain_count: u32,
+ _p_swapchains: *const SwapchainKHR,
+ _p_metadata: *const HdrMetadataEXT,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(set_hdr_metadata_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSetHdrMetadataEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ set_hdr_metadata_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_hdr_metadata'"]
+impl StructureType {
+ pub const HDR_METADATA_EXT: Self = Self(1_000_105_000);
+}
+impl ImgExtension107Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_IMG_extension_107\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ImgExtension107Fn {}
+unsafe impl Send for ImgExtension107Fn {}
+unsafe impl Sync for ImgExtension107Fn {}
+impl ImgExtension107Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ImgExtension108Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_IMG_extension_108\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ImgExtension108Fn {}
+unsafe impl Send for ImgExtension108Fn {}
+unsafe impl Sync for ImgExtension108Fn {}
+impl ImgExtension108Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrImagelessFramebufferFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_imageless_framebuffer\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrImagelessFramebufferFn {}
+unsafe impl Send for KhrImagelessFramebufferFn {}
+unsafe impl Sync for KhrImagelessFramebufferFn {}
+impl KhrImagelessFramebufferFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_imageless_framebuffer'"]
+impl FramebufferCreateFlags {
+ pub const IMAGELESS_KHR: Self = Self::IMAGELESS;
+}
+#[doc = "Generated from 'VK_KHR_imageless_framebuffer'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES;
+ pub const FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR: Self =
+ Self::FRAMEBUFFER_ATTACHMENTS_CREATE_INFO;
+ pub const FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR: Self = Self::FRAMEBUFFER_ATTACHMENT_IMAGE_INFO;
+ pub const RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR: Self = Self::RENDER_PASS_ATTACHMENT_BEGIN_INFO;
+}
+impl KhrCreateRenderpass2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_create_renderpass2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateRenderPass2 = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const RenderPassCreateInfo2,
+ p_allocator: *const AllocationCallbacks,
+ p_render_pass: *mut RenderPass,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginRenderPass2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_render_pass_begin: *const RenderPassBeginInfo,
+ p_subpass_begin_info: *const SubpassBeginInfo,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdNextSubpass2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_subpass_begin_info: *const SubpassBeginInfo,
+ p_subpass_end_info: *const SubpassEndInfo,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndRenderPass2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_subpass_end_info: *const SubpassEndInfo,
+);
+#[derive(Clone)]
+pub struct KhrCreateRenderpass2Fn {
+ pub create_render_pass2_khr: PFN_vkCreateRenderPass2,
+ pub cmd_begin_render_pass2_khr: PFN_vkCmdBeginRenderPass2,
+ pub cmd_next_subpass2_khr: PFN_vkCmdNextSubpass2,
+ pub cmd_end_render_pass2_khr: PFN_vkCmdEndRenderPass2,
+}
+unsafe impl Send for KhrCreateRenderpass2Fn {}
+unsafe impl Sync for KhrCreateRenderpass2Fn {}
+impl KhrCreateRenderpass2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_render_pass2_khr: unsafe {
+ unsafe extern "system" fn create_render_pass2_khr(
+ _device: Device,
+ _p_create_info: *const RenderPassCreateInfo2,
+ _p_allocator: *const AllocationCallbacks,
+ _p_render_pass: *mut RenderPass,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_render_pass2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateRenderPass2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_render_pass2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_render_pass2_khr: unsafe {
+ unsafe extern "system" fn cmd_begin_render_pass2_khr(
+ _command_buffer: CommandBuffer,
+ _p_render_pass_begin: *const RenderPassBeginInfo,
+ _p_subpass_begin_info: *const SubpassBeginInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_render_pass2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderPass2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_render_pass2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_next_subpass2_khr: unsafe {
+ unsafe extern "system" fn cmd_next_subpass2_khr(
+ _command_buffer: CommandBuffer,
+ _p_subpass_begin_info: *const SubpassBeginInfo,
+ _p_subpass_end_info: *const SubpassEndInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_next_subpass2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdNextSubpass2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_next_subpass2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_render_pass2_khr: unsafe {
+ unsafe extern "system" fn cmd_end_render_pass2_khr(
+ _command_buffer: CommandBuffer,
+ _p_subpass_end_info: *const SubpassEndInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_render_pass2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderPass2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_render_pass2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_create_renderpass2'"]
+impl StructureType {
+ pub const ATTACHMENT_DESCRIPTION_2_KHR: Self = Self::ATTACHMENT_DESCRIPTION_2;
+ pub const ATTACHMENT_REFERENCE_2_KHR: Self = Self::ATTACHMENT_REFERENCE_2;
+ pub const SUBPASS_DESCRIPTION_2_KHR: Self = Self::SUBPASS_DESCRIPTION_2;
+ pub const SUBPASS_DEPENDENCY_2_KHR: Self = Self::SUBPASS_DEPENDENCY_2;
+ pub const RENDER_PASS_CREATE_INFO_2_KHR: Self = Self::RENDER_PASS_CREATE_INFO_2;
+ pub const SUBPASS_BEGIN_INFO_KHR: Self = Self::SUBPASS_BEGIN_INFO;
+ pub const SUBPASS_END_INFO_KHR: Self = Self::SUBPASS_END_INFO;
+}
+impl ImgExtension111Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_IMG_extension_111\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ImgExtension111Fn {}
+unsafe impl Send for ImgExtension111Fn {}
+unsafe impl Sync for ImgExtension111Fn {}
+impl ImgExtension111Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrSharedPresentableImageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shared_presentable_image\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSwapchainStatusKHR =
+ unsafe extern "system" fn(device: Device, swapchain: SwapchainKHR) -> Result;
+#[derive(Clone)]
+pub struct KhrSharedPresentableImageFn {
+ pub get_swapchain_status_khr: PFN_vkGetSwapchainStatusKHR,
+}
+unsafe impl Send for KhrSharedPresentableImageFn {}
+unsafe impl Sync for KhrSharedPresentableImageFn {}
+impl KhrSharedPresentableImageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_swapchain_status_khr: unsafe {
+ unsafe extern "system" fn get_swapchain_status_khr(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_swapchain_status_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetSwapchainStatusKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_swapchain_status_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_shared_presentable_image'"]
+impl ImageLayout {
+ pub const SHARED_PRESENT_KHR: Self = Self(1_000_111_000);
+}
+#[doc = "Generated from 'VK_KHR_shared_presentable_image'"]
+impl PresentModeKHR {
+ pub const SHARED_DEMAND_REFRESH: Self = Self(1_000_111_000);
+ pub const SHARED_CONTINUOUS_REFRESH: Self = Self(1_000_111_001);
+}
+#[doc = "Generated from 'VK_KHR_shared_presentable_image'"]
+impl StructureType {
+ pub const SHARED_PRESENT_SURFACE_CAPABILITIES_KHR: Self = Self(1_000_111_000);
+}
+impl KhrExternalFenceCapabilitiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence_capabilities\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceExternalFenceProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
+ p_external_fence_properties: *mut ExternalFenceProperties,
+);
+#[derive(Clone)]
+pub struct KhrExternalFenceCapabilitiesFn {
+ pub get_physical_device_external_fence_properties_khr:
+ PFN_vkGetPhysicalDeviceExternalFenceProperties,
+}
+unsafe impl Send for KhrExternalFenceCapabilitiesFn {}
+unsafe impl Sync for KhrExternalFenceCapabilitiesFn {}
+impl KhrExternalFenceCapabilitiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_external_fence_properties_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_external_fence_properties_khr(
+ _physical_device: PhysicalDevice,
+ _p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
+ _p_external_fence_properties: *mut ExternalFenceProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_fence_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalFencePropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_fence_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_fence_capabilities'"]
+impl ExternalFenceFeatureFlags {
+ pub const EXPORTABLE_KHR: Self = Self::EXPORTABLE;
+ pub const IMPORTABLE_KHR: Self = Self::IMPORTABLE;
+}
+#[doc = "Generated from 'VK_KHR_external_fence_capabilities'"]
+impl ExternalFenceHandleTypeFlags {
+ pub const OPAQUE_FD_KHR: Self = Self::OPAQUE_FD;
+ pub const OPAQUE_WIN32_KHR: Self = Self::OPAQUE_WIN32;
+ pub const OPAQUE_WIN32_KMT_KHR: Self = Self::OPAQUE_WIN32_KMT;
+ pub const SYNC_FD_KHR: Self = Self::SYNC_FD;
+}
+#[doc = "Generated from 'VK_KHR_external_fence_capabilities'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR: Self =
+ Self::PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO;
+ pub const EXTERNAL_FENCE_PROPERTIES_KHR: Self = Self::EXTERNAL_FENCE_PROPERTIES;
+}
+impl KhrExternalFenceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrExternalFenceFn {}
+unsafe impl Send for KhrExternalFenceFn {}
+unsafe impl Sync for KhrExternalFenceFn {}
+impl KhrExternalFenceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_fence'"]
+impl FenceImportFlags {
+ pub const TEMPORARY_KHR: Self = Self::TEMPORARY;
+}
+#[doc = "Generated from 'VK_KHR_external_fence'"]
+impl StructureType {
+ pub const EXPORT_FENCE_CREATE_INFO_KHR: Self = Self::EXPORT_FENCE_CREATE_INFO;
+}
+impl KhrExternalFenceWin32Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence_win32\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkImportFenceWin32HandleKHR = unsafe extern "system" fn(
+ device: Device,
+ p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetFenceWin32HandleKHR = unsafe extern "system" fn(
+ device: Device,
+ p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR,
+ p_handle: *mut HANDLE,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrExternalFenceWin32Fn {
+ pub import_fence_win32_handle_khr: PFN_vkImportFenceWin32HandleKHR,
+ pub get_fence_win32_handle_khr: PFN_vkGetFenceWin32HandleKHR,
+}
+unsafe impl Send for KhrExternalFenceWin32Fn {}
+unsafe impl Sync for KhrExternalFenceWin32Fn {}
+impl KhrExternalFenceWin32Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ import_fence_win32_handle_khr: unsafe {
+ unsafe extern "system" fn import_fence_win32_handle_khr(
+ _device: Device,
+ _p_import_fence_win32_handle_info: *const ImportFenceWin32HandleInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(import_fence_win32_handle_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkImportFenceWin32HandleKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ import_fence_win32_handle_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_fence_win32_handle_khr: unsafe {
+ unsafe extern "system" fn get_fence_win32_handle_khr(
+ _device: Device,
+ _p_get_win32_handle_info: *const FenceGetWin32HandleInfoKHR,
+ _p_handle: *mut HANDLE,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_fence_win32_handle_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetFenceWin32HandleKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_fence_win32_handle_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_fence_win32'"]
+impl StructureType {
+ pub const IMPORT_FENCE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_114_000);
+ pub const EXPORT_FENCE_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_114_001);
+ pub const FENCE_GET_WIN32_HANDLE_INFO_KHR: Self = Self(1_000_114_002);
+}
+impl KhrExternalFenceFdFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_external_fence_fd\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkImportFenceFdKHR = unsafe extern "system" fn(
+ device: Device,
+ p_import_fence_fd_info: *const ImportFenceFdInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetFenceFdKHR = unsafe extern "system" fn(
+ device: Device,
+ p_get_fd_info: *const FenceGetFdInfoKHR,
+ p_fd: *mut c_int,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrExternalFenceFdFn {
+ pub import_fence_fd_khr: PFN_vkImportFenceFdKHR,
+ pub get_fence_fd_khr: PFN_vkGetFenceFdKHR,
+}
+unsafe impl Send for KhrExternalFenceFdFn {}
+unsafe impl Sync for KhrExternalFenceFdFn {}
+impl KhrExternalFenceFdFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ import_fence_fd_khr: unsafe {
+ unsafe extern "system" fn import_fence_fd_khr(
+ _device: Device,
+ _p_import_fence_fd_info: *const ImportFenceFdInfoKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(import_fence_fd_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkImportFenceFdKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ import_fence_fd_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_fence_fd_khr: unsafe {
+ unsafe extern "system" fn get_fence_fd_khr(
+ _device: Device,
+ _p_get_fd_info: *const FenceGetFdInfoKHR,
+ _p_fd: *mut c_int,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_fence_fd_khr)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetFenceFdKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_fence_fd_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_external_fence_fd'"]
+impl StructureType {
+ pub const IMPORT_FENCE_FD_INFO_KHR: Self = Self(1_000_115_000);
+ pub const FENCE_GET_FD_INFO_KHR: Self = Self(1_000_115_001);
+}
+impl KhrPerformanceQueryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_performance_query\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR =
+ unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ p_counter_count: *mut u32,
+ p_counters: *mut PerformanceCounterKHR,
+ p_counter_descriptions: *mut PerformanceCounterDescriptionKHR,
+ ) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR =
+ unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_performance_query_create_info: *const QueryPoolPerformanceCreateInfoKHR,
+ p_num_passes: *mut u32,
+ );
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireProfilingLockKHR =
+ unsafe extern "system" fn(device: Device, p_info: *const AcquireProfilingLockInfoKHR) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkReleaseProfilingLockKHR = unsafe extern "system" fn(device: Device);
+#[derive(Clone)]
+pub struct KhrPerformanceQueryFn {
+ pub enumerate_physical_device_queue_family_performance_query_counters_khr:
+ PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR,
+ pub get_physical_device_queue_family_performance_query_passes_khr:
+ PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR,
+ pub acquire_profiling_lock_khr: PFN_vkAcquireProfilingLockKHR,
+ pub release_profiling_lock_khr: PFN_vkReleaseProfilingLockKHR,
+}
+unsafe impl Send for KhrPerformanceQueryFn {}
+unsafe impl Sync for KhrPerformanceQueryFn {}
+impl KhrPerformanceQueryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ enumerate_physical_device_queue_family_performance_query_counters_khr: unsafe {
+ unsafe extern "system" fn enumerate_physical_device_queue_family_performance_query_counters_khr(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _p_counter_count: *mut u32,
+ _p_counters: *mut PerformanceCounterKHR,
+ _p_counter_descriptions: *mut PerformanceCounterDescriptionKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(
+ enumerate_physical_device_queue_family_performance_query_counters_khr
+ )
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_physical_device_queue_family_performance_query_counters_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_queue_family_performance_query_passes_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_queue_family_performance_query_passes_khr(
+ _physical_device: PhysicalDevice,
+ _p_performance_query_create_info: *const QueryPoolPerformanceCreateInfoKHR,
+ _p_num_passes: *mut u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_queue_family_performance_query_passes_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_queue_family_performance_query_passes_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_profiling_lock_khr: unsafe {
+ unsafe extern "system" fn acquire_profiling_lock_khr(
+ _device: Device,
+ _p_info: *const AcquireProfilingLockInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_profiling_lock_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireProfilingLockKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_profiling_lock_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ release_profiling_lock_khr: unsafe {
+ unsafe extern "system" fn release_profiling_lock_khr(_device: Device) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(release_profiling_lock_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkReleaseProfilingLockKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ release_profiling_lock_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_performance_query'"]
+impl QueryType {
+ pub const PERFORMANCE_QUERY_KHR: Self = Self(1_000_116_000);
+}
+#[doc = "Generated from 'VK_KHR_performance_query'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR: Self = Self(1_000_116_000);
+ pub const PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR: Self = Self(1_000_116_001);
+ pub const QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR: Self = Self(1_000_116_002);
+ pub const PERFORMANCE_QUERY_SUBMIT_INFO_KHR: Self = Self(1_000_116_003);
+ pub const ACQUIRE_PROFILING_LOCK_INFO_KHR: Self = Self(1_000_116_004);
+ pub const PERFORMANCE_COUNTER_KHR: Self = Self(1_000_116_005);
+ pub const PERFORMANCE_COUNTER_DESCRIPTION_KHR: Self = Self(1_000_116_006);
+}
+impl KhrMaintenance2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrMaintenance2Fn {}
+unsafe impl Send for KhrMaintenance2Fn {}
+unsafe impl Sync for KhrMaintenance2Fn {}
+impl KhrMaintenance2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_maintenance2'"]
+impl ImageCreateFlags {
+ pub const BLOCK_TEXEL_VIEW_COMPATIBLE_KHR: Self = Self::BLOCK_TEXEL_VIEW_COMPATIBLE;
+ pub const EXTENDED_USAGE_KHR: Self = Self::EXTENDED_USAGE;
+}
+#[doc = "Generated from 'VK_KHR_maintenance2'"]
+impl ImageLayout {
+ pub const DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR: Self =
+ Self::DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL;
+ pub const DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR: Self =
+ Self::DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL;
+}
+#[doc = "Generated from 'VK_KHR_maintenance2'"]
+impl PointClippingBehavior {
+ pub const ALL_CLIP_PLANES_KHR: Self = Self::ALL_CLIP_PLANES;
+ pub const USER_CLIP_PLANES_ONLY_KHR: Self = Self::USER_CLIP_PLANES_ONLY;
+}
+#[doc = "Generated from 'VK_KHR_maintenance2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES;
+ pub const RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR: Self =
+ Self::RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO;
+ pub const IMAGE_VIEW_USAGE_CREATE_INFO_KHR: Self = Self::IMAGE_VIEW_USAGE_CREATE_INFO;
+ pub const PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR: Self =
+ Self::PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO;
+}
+#[doc = "Generated from 'VK_KHR_maintenance2'"]
+impl TessellationDomainOrigin {
+ pub const UPPER_LEFT_KHR: Self = Self::UPPER_LEFT;
+ pub const LOWER_LEFT_KHR: Self = Self::LOWER_LEFT;
+}
+impl KhrExtension119Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_119\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension119Fn {}
+unsafe impl Send for KhrExtension119Fn {}
+unsafe impl Sync for KhrExtension119Fn {}
+impl KhrExtension119Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrGetSurfaceCapabilities2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_surface_capabilities2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ p_surface_capabilities: *mut SurfaceCapabilities2KHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfaceFormats2KHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ p_surface_format_count: *mut u32,
+ p_surface_formats: *mut SurfaceFormat2KHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrGetSurfaceCapabilities2Fn {
+ pub get_physical_device_surface_capabilities2_khr:
+ PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR,
+ pub get_physical_device_surface_formats2_khr: PFN_vkGetPhysicalDeviceSurfaceFormats2KHR,
+}
+unsafe impl Send for KhrGetSurfaceCapabilities2Fn {}
+unsafe impl Sync for KhrGetSurfaceCapabilities2Fn {}
+impl KhrGetSurfaceCapabilities2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_surface_capabilities2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_capabilities2_khr(
+ _physical_device: PhysicalDevice,
+ _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ _p_surface_capabilities: *mut SurfaceCapabilities2KHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_capabilities2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfaceCapabilities2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_capabilities2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_surface_formats2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_formats2_khr(
+ _physical_device: PhysicalDevice,
+ _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ _p_surface_format_count: *mut u32,
+ _p_surface_formats: *mut SurfaceFormat2KHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_formats2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfaceFormats2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_formats2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_get_surface_capabilities2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SURFACE_INFO_2_KHR: Self = Self(1_000_119_000);
+ pub const SURFACE_CAPABILITIES_2_KHR: Self = Self(1_000_119_001);
+ pub const SURFACE_FORMAT_2_KHR: Self = Self(1_000_119_002);
+}
+impl KhrVariablePointersFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_variable_pointers\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrVariablePointersFn {}
+unsafe impl Send for KhrVariablePointersFn {}
+unsafe impl Sync for KhrVariablePointersFn {}
+impl KhrVariablePointersFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_variable_pointers'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
+ pub const PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR;
+}
+impl KhrGetDisplayProperties2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_display_properties2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceDisplayProperties2KHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_property_count: *mut u32,
+ p_properties: *mut DisplayProperties2KHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_property_count: *mut u32,
+ p_properties: *mut DisplayPlaneProperties2KHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDisplayModeProperties2KHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ display: DisplayKHR,
+ p_property_count: *mut u32,
+ p_properties: *mut DisplayModeProperties2KHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDisplayPlaneCapabilities2KHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_display_plane_info: *const DisplayPlaneInfo2KHR,
+ p_capabilities: *mut DisplayPlaneCapabilities2KHR,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrGetDisplayProperties2Fn {
+ pub get_physical_device_display_properties2_khr: PFN_vkGetPhysicalDeviceDisplayProperties2KHR,
+ pub get_physical_device_display_plane_properties2_khr:
+ PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR,
+ pub get_display_mode_properties2_khr: PFN_vkGetDisplayModeProperties2KHR,
+ pub get_display_plane_capabilities2_khr: PFN_vkGetDisplayPlaneCapabilities2KHR,
+}
+unsafe impl Send for KhrGetDisplayProperties2Fn {}
+unsafe impl Sync for KhrGetDisplayProperties2Fn {}
+impl KhrGetDisplayProperties2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_display_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_display_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_property_count: *mut u32,
+ _p_properties: *mut DisplayProperties2KHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_display_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceDisplayProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_display_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_display_plane_properties2_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_display_plane_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _p_property_count: *mut u32,
+ _p_properties: *mut DisplayPlaneProperties2KHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_display_plane_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceDisplayPlaneProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_display_plane_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_display_mode_properties2_khr: unsafe {
+ unsafe extern "system" fn get_display_mode_properties2_khr(
+ _physical_device: PhysicalDevice,
+ _display: DisplayKHR,
+ _p_property_count: *mut u32,
+ _p_properties: *mut DisplayModeProperties2KHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_display_mode_properties2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDisplayModeProperties2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_display_mode_properties2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_display_plane_capabilities2_khr: unsafe {
+ unsafe extern "system" fn get_display_plane_capabilities2_khr(
+ _physical_device: PhysicalDevice,
+ _p_display_plane_info: *const DisplayPlaneInfo2KHR,
+ _p_capabilities: *mut DisplayPlaneCapabilities2KHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_display_plane_capabilities2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDisplayPlaneCapabilities2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_display_plane_capabilities2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_get_display_properties2'"]
+impl StructureType {
+ pub const DISPLAY_PROPERTIES_2_KHR: Self = Self(1_000_121_000);
+ pub const DISPLAY_PLANE_PROPERTIES_2_KHR: Self = Self(1_000_121_001);
+ pub const DISPLAY_MODE_PROPERTIES_2_KHR: Self = Self(1_000_121_002);
+ pub const DISPLAY_PLANE_INFO_2_KHR: Self = Self(1_000_121_003);
+ pub const DISPLAY_PLANE_CAPABILITIES_2_KHR: Self = Self(1_000_121_004);
+}
+impl MvkIosSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_MVK_ios_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateIOSSurfaceMVK = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const IOSSurfaceCreateInfoMVK,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct MvkIosSurfaceFn {
+ pub create_ios_surface_mvk: PFN_vkCreateIOSSurfaceMVK,
+}
+unsafe impl Send for MvkIosSurfaceFn {}
+unsafe impl Sync for MvkIosSurfaceFn {}
+impl MvkIosSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_ios_surface_mvk: unsafe {
+ unsafe extern "system" fn create_ios_surface_mvk(
+ _instance: Instance,
+ _p_create_info: *const IOSSurfaceCreateInfoMVK,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_ios_surface_mvk)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateIOSSurfaceMVK\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_ios_surface_mvk
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_MVK_ios_surface'"]
+impl StructureType {
+ pub const IOS_SURFACE_CREATE_INFO_MVK: Self = Self(1_000_122_000);
+}
+impl MvkMacosSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_MVK_macos_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateMacOSSurfaceMVK = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const MacOSSurfaceCreateInfoMVK,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct MvkMacosSurfaceFn {
+ pub create_mac_os_surface_mvk: PFN_vkCreateMacOSSurfaceMVK,
+}
+unsafe impl Send for MvkMacosSurfaceFn {}
+unsafe impl Sync for MvkMacosSurfaceFn {}
+impl MvkMacosSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_mac_os_surface_mvk: unsafe {
+ unsafe extern "system" fn create_mac_os_surface_mvk(
+ _instance: Instance,
+ _p_create_info: *const MacOSSurfaceCreateInfoMVK,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_mac_os_surface_mvk)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateMacOSSurfaceMVK\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_mac_os_surface_mvk
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_MVK_macos_surface'"]
+impl StructureType {
+ pub const MACOS_SURFACE_CREATE_INFO_MVK: Self = Self(1_000_123_000);
+}
+impl MvkMoltenvkFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_MVK_moltenvk\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct MvkMoltenvkFn {}
+unsafe impl Send for MvkMoltenvkFn {}
+unsafe impl Sync for MvkMoltenvkFn {}
+impl MvkMoltenvkFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExternalMemoryDmaBufFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_external_memory_dma_buf\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtExternalMemoryDmaBufFn {}
+unsafe impl Send for ExtExternalMemoryDmaBufFn {}
+unsafe impl Sync for ExtExternalMemoryDmaBufFn {}
+impl ExtExternalMemoryDmaBufFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_external_memory_dma_buf'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const DMA_BUF_EXT: Self = Self(0b10_0000_0000);
+}
+impl ExtQueueFamilyForeignFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_queue_family_foreign\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtQueueFamilyForeignFn {}
+unsafe impl Send for ExtQueueFamilyForeignFn {}
+unsafe impl Sync for ExtQueueFamilyForeignFn {}
+impl ExtQueueFamilyForeignFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrDedicatedAllocationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_dedicated_allocation\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct KhrDedicatedAllocationFn {}
+unsafe impl Send for KhrDedicatedAllocationFn {}
+unsafe impl Sync for KhrDedicatedAllocationFn {}
+impl KhrDedicatedAllocationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_dedicated_allocation'"]
+impl StructureType {
+ pub const MEMORY_DEDICATED_REQUIREMENTS_KHR: Self = Self::MEMORY_DEDICATED_REQUIREMENTS;
+ pub const MEMORY_DEDICATED_ALLOCATE_INFO_KHR: Self = Self::MEMORY_DEDICATED_ALLOCATE_INFO;
+}
+impl ExtDebugUtilsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_debug_utils\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetDebugUtilsObjectNameEXT = unsafe extern "system" fn(
+ device: Device,
+ p_name_info: *const DebugUtilsObjectNameInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetDebugUtilsObjectTagEXT = unsafe extern "system" fn(
+ device: Device,
+ p_tag_info: *const DebugUtilsObjectTagInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueBeginDebugUtilsLabelEXT =
+ unsafe extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT);
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueEndDebugUtilsLabelEXT = unsafe extern "system" fn(queue: Queue);
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueInsertDebugUtilsLabelEXT =
+ unsafe extern "system" fn(queue: Queue, p_label_info: *const DebugUtilsLabelEXT);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginDebugUtilsLabelEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_label_info: *const DebugUtilsLabelEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndDebugUtilsLabelEXT = unsafe extern "system" fn(command_buffer: CommandBuffer);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdInsertDebugUtilsLabelEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_label_info: *const DebugUtilsLabelEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDebugUtilsMessengerEXT = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const DebugUtilsMessengerCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_messenger: *mut DebugUtilsMessengerEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDebugUtilsMessengerEXT = unsafe extern "system" fn(
+ instance: Instance,
+ messenger: DebugUtilsMessengerEXT,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkSubmitDebugUtilsMessageEXT = unsafe extern "system" fn(
+ instance: Instance,
+ message_severity: DebugUtilsMessageSeverityFlagsEXT,
+ message_types: DebugUtilsMessageTypeFlagsEXT,
+ p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
+);
+#[derive(Clone)]
+pub struct ExtDebugUtilsFn {
+ pub set_debug_utils_object_name_ext: PFN_vkSetDebugUtilsObjectNameEXT,
+ pub set_debug_utils_object_tag_ext: PFN_vkSetDebugUtilsObjectTagEXT,
+ pub queue_begin_debug_utils_label_ext: PFN_vkQueueBeginDebugUtilsLabelEXT,
+ pub queue_end_debug_utils_label_ext: PFN_vkQueueEndDebugUtilsLabelEXT,
+ pub queue_insert_debug_utils_label_ext: PFN_vkQueueInsertDebugUtilsLabelEXT,
+ pub cmd_begin_debug_utils_label_ext: PFN_vkCmdBeginDebugUtilsLabelEXT,
+ pub cmd_end_debug_utils_label_ext: PFN_vkCmdEndDebugUtilsLabelEXT,
+ pub cmd_insert_debug_utils_label_ext: PFN_vkCmdInsertDebugUtilsLabelEXT,
+ pub create_debug_utils_messenger_ext: PFN_vkCreateDebugUtilsMessengerEXT,
+ pub destroy_debug_utils_messenger_ext: PFN_vkDestroyDebugUtilsMessengerEXT,
+ pub submit_debug_utils_message_ext: PFN_vkSubmitDebugUtilsMessageEXT,
+}
+unsafe impl Send for ExtDebugUtilsFn {}
+unsafe impl Sync for ExtDebugUtilsFn {}
+impl ExtDebugUtilsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ set_debug_utils_object_name_ext: unsafe {
+ unsafe extern "system" fn set_debug_utils_object_name_ext(
+ _device: Device,
+ _p_name_info: *const DebugUtilsObjectNameInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_debug_utils_object_name_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSetDebugUtilsObjectNameEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ set_debug_utils_object_name_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_debug_utils_object_tag_ext: unsafe {
+ unsafe extern "system" fn set_debug_utils_object_tag_ext(
+ _device: Device,
+ _p_tag_info: *const DebugUtilsObjectTagInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_debug_utils_object_tag_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSetDebugUtilsObjectTagEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ set_debug_utils_object_tag_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_begin_debug_utils_label_ext: unsafe {
+ unsafe extern "system" fn queue_begin_debug_utils_label_ext(
+ _queue: Queue,
+ _p_label_info: *const DebugUtilsLabelEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(queue_begin_debug_utils_label_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkQueueBeginDebugUtilsLabelEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ queue_begin_debug_utils_label_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_end_debug_utils_label_ext: unsafe {
+ unsafe extern "system" fn queue_end_debug_utils_label_ext(_queue: Queue) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(queue_end_debug_utils_label_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkQueueEndDebugUtilsLabelEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ queue_end_debug_utils_label_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_insert_debug_utils_label_ext: unsafe {
+ unsafe extern "system" fn queue_insert_debug_utils_label_ext(
+ _queue: Queue,
+ _p_label_info: *const DebugUtilsLabelEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(queue_insert_debug_utils_label_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkQueueInsertDebugUtilsLabelEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ queue_insert_debug_utils_label_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_debug_utils_label_ext: unsafe {
+ unsafe extern "system" fn cmd_begin_debug_utils_label_ext(
+ _command_buffer: CommandBuffer,
+ _p_label_info: *const DebugUtilsLabelEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_debug_utils_label_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBeginDebugUtilsLabelEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_debug_utils_label_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_debug_utils_label_ext: unsafe {
+ unsafe extern "system" fn cmd_end_debug_utils_label_ext(
+ _command_buffer: CommandBuffer,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_end_debug_utils_label_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdEndDebugUtilsLabelEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_debug_utils_label_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_insert_debug_utils_label_ext: unsafe {
+ unsafe extern "system" fn cmd_insert_debug_utils_label_ext(
+ _command_buffer: CommandBuffer,
+ _p_label_info: *const DebugUtilsLabelEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_insert_debug_utils_label_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdInsertDebugUtilsLabelEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_insert_debug_utils_label_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_debug_utils_messenger_ext: unsafe {
+ unsafe extern "system" fn create_debug_utils_messenger_ext(
+ _instance: Instance,
+ _p_create_info: *const DebugUtilsMessengerCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_messenger: *mut DebugUtilsMessengerEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_debug_utils_messenger_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDebugUtilsMessengerEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_debug_utils_messenger_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_debug_utils_messenger_ext: unsafe {
+ unsafe extern "system" fn destroy_debug_utils_messenger_ext(
+ _instance: Instance,
+ _messenger: DebugUtilsMessengerEXT,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_debug_utils_messenger_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyDebugUtilsMessengerEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_debug_utils_messenger_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ submit_debug_utils_message_ext: unsafe {
+ unsafe extern "system" fn submit_debug_utils_message_ext(
+ _instance: Instance,
+ _message_severity: DebugUtilsMessageSeverityFlagsEXT,
+ _message_types: DebugUtilsMessageTypeFlagsEXT,
+ _p_callback_data: *const DebugUtilsMessengerCallbackDataEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(submit_debug_utils_message_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSubmitDebugUtilsMessageEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ submit_debug_utils_message_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_debug_utils'"]
+impl ObjectType {
+ pub const DEBUG_UTILS_MESSENGER_EXT: Self = Self(1_000_128_000);
+}
+#[doc = "Generated from 'VK_EXT_debug_utils'"]
+impl StructureType {
+ pub const DEBUG_UTILS_OBJECT_NAME_INFO_EXT: Self = Self(1_000_128_000);
+ pub const DEBUG_UTILS_OBJECT_TAG_INFO_EXT: Self = Self(1_000_128_001);
+ pub const DEBUG_UTILS_LABEL_EXT: Self = Self(1_000_128_002);
+ pub const DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT: Self = Self(1_000_128_003);
+ pub const DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT: Self = Self(1_000_128_004);
+}
+impl AndroidExternalMemoryAndroidHardwareBufferFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_ANDROID_external_memory_android_hardware_buffer\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 5u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetAndroidHardwareBufferPropertiesANDROID = unsafe extern "system" fn(
+ device: Device,
+ buffer: *const AHardwareBuffer,
+ p_properties: *mut AndroidHardwareBufferPropertiesANDROID,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryAndroidHardwareBufferANDROID = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID,
+ p_buffer: *mut *mut AHardwareBuffer,
+) -> Result;
+#[derive(Clone)]
+pub struct AndroidExternalMemoryAndroidHardwareBufferFn {
+ pub get_android_hardware_buffer_properties_android:
+ PFN_vkGetAndroidHardwareBufferPropertiesANDROID,
+ pub get_memory_android_hardware_buffer_android: PFN_vkGetMemoryAndroidHardwareBufferANDROID,
+}
+unsafe impl Send for AndroidExternalMemoryAndroidHardwareBufferFn {}
+unsafe impl Sync for AndroidExternalMemoryAndroidHardwareBufferFn {}
+impl AndroidExternalMemoryAndroidHardwareBufferFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_android_hardware_buffer_properties_android: unsafe {
+ unsafe extern "system" fn get_android_hardware_buffer_properties_android(
+ _device: Device,
+ _buffer: *const AHardwareBuffer,
+ _p_properties: *mut AndroidHardwareBufferPropertiesANDROID,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_android_hardware_buffer_properties_android)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetAndroidHardwareBufferPropertiesANDROID\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_android_hardware_buffer_properties_android
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_memory_android_hardware_buffer_android: unsafe {
+ unsafe extern "system" fn get_memory_android_hardware_buffer_android(
+ _device: Device,
+ _p_info: *const MemoryGetAndroidHardwareBufferInfoANDROID,
+ _p_buffer: *mut *mut AHardwareBuffer,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_android_hardware_buffer_android)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryAndroidHardwareBufferANDROID\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_android_hardware_buffer_android
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_ANDROID_external_memory_android_hardware_buffer'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const ANDROID_HARDWARE_BUFFER_ANDROID: Self = Self(0b100_0000_0000);
+}
+#[doc = "Generated from 'VK_ANDROID_external_memory_android_hardware_buffer'"]
+impl StructureType {
+ pub const ANDROID_HARDWARE_BUFFER_USAGE_ANDROID: Self = Self(1_000_129_000);
+ pub const ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID: Self = Self(1_000_129_001);
+ pub const ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID: Self = Self(1_000_129_002);
+ pub const IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: Self = Self(1_000_129_003);
+ pub const MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID: Self = Self(1_000_129_004);
+ pub const EXTERNAL_FORMAT_ANDROID: Self = Self(1_000_129_005);
+ pub const ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID: Self = Self(1_000_129_006);
+}
+impl ExtSamplerFilterMinmaxFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_sampler_filter_minmax\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtSamplerFilterMinmaxFn {}
+unsafe impl Send for ExtSamplerFilterMinmaxFn {}
+unsafe impl Sync for ExtSamplerFilterMinmaxFn {}
+impl ExtSamplerFilterMinmaxFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_sampler_filter_minmax'"]
+impl FormatFeatureFlags {
+ pub const SAMPLED_IMAGE_FILTER_MINMAX_EXT: Self = Self::SAMPLED_IMAGE_FILTER_MINMAX;
+}
+#[doc = "Generated from 'VK_EXT_sampler_filter_minmax'"]
+impl SamplerReductionMode {
+ pub const WEIGHTED_AVERAGE_EXT: Self = Self::WEIGHTED_AVERAGE;
+ pub const MIN_EXT: Self = Self::MIN;
+ pub const MAX_EXT: Self = Self::MAX;
+}
+#[doc = "Generated from 'VK_EXT_sampler_filter_minmax'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT: Self =
+ Self::PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES;
+ pub const SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT: Self =
+ Self::SAMPLER_REDUCTION_MODE_CREATE_INFO;
+}
+impl KhrStorageBufferStorageClassFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_storage_buffer_storage_class\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrStorageBufferStorageClassFn {}
+unsafe impl Send for KhrStorageBufferStorageClassFn {}
+unsafe impl Sync for KhrStorageBufferStorageClassFn {}
+impl KhrStorageBufferStorageClassFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdGpuShaderInt16Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_gpu_shader_int16\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct AmdGpuShaderInt16Fn {}
+unsafe impl Send for AmdGpuShaderInt16Fn {}
+unsafe impl Sync for AmdGpuShaderInt16Fn {}
+impl AmdGpuShaderInt16Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension134Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_134\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension134Fn {}
+unsafe impl Send for AmdExtension134Fn {}
+unsafe impl Sync for AmdExtension134Fn {}
+impl AmdExtension134Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension135Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_135\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension135Fn {}
+unsafe impl Send for AmdExtension135Fn {}
+unsafe impl Sync for AmdExtension135Fn {}
+impl AmdExtension135Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_extension_135'"]
+impl BufferUsageFlags {
+ pub const RESERVED_25_AMD: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+}
+impl AmdExtension136Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_136\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension136Fn {}
+unsafe impl Send for AmdExtension136Fn {}
+unsafe impl Sync for AmdExtension136Fn {}
+impl AmdExtension136Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdMixedAttachmentSamplesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_mixed_attachment_samples\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdMixedAttachmentSamplesFn {}
+unsafe impl Send for AmdMixedAttachmentSamplesFn {}
+unsafe impl Sync for AmdMixedAttachmentSamplesFn {}
+impl AmdMixedAttachmentSamplesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdShaderFragmentMaskFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_fragment_mask\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderFragmentMaskFn {}
+unsafe impl Send for AmdShaderFragmentMaskFn {}
+unsafe impl Sync for AmdShaderFragmentMaskFn {}
+impl AmdShaderFragmentMaskFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtInlineUniformBlockFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_inline_uniform_block\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtInlineUniformBlockFn {}
+unsafe impl Send for ExtInlineUniformBlockFn {}
+unsafe impl Sync for ExtInlineUniformBlockFn {}
+impl ExtInlineUniformBlockFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_inline_uniform_block'"]
+impl DescriptorType {
+ pub const INLINE_UNIFORM_BLOCK_EXT: Self = Self::INLINE_UNIFORM_BLOCK;
+}
+#[doc = "Generated from 'VK_EXT_inline_uniform_block'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES;
+ pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT: Self =
+ Self::PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES;
+ pub const WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT: Self =
+ Self::WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK;
+ pub const DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT: Self =
+ Self::DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO;
+}
+impl AmdExtension140Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_140\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension140Fn {}
+unsafe impl Send for AmdExtension140Fn {}
+unsafe impl Sync for AmdExtension140Fn {}
+impl AmdExtension140Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtShaderStencilExportFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_stencil_export\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderStencilExportFn {}
+unsafe impl Send for ExtShaderStencilExportFn {}
+unsafe impl Sync for ExtShaderStencilExportFn {}
+impl ExtShaderStencilExportFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension142Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_142\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension142Fn {}
+unsafe impl Send for AmdExtension142Fn {}
+unsafe impl Sync for AmdExtension142Fn {}
+impl AmdExtension142Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension143Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_143\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension143Fn {}
+unsafe impl Send for AmdExtension143Fn {}
+unsafe impl Sync for AmdExtension143Fn {}
+impl AmdExtension143Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtSampleLocationsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_sample_locations\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetSampleLocationsEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_sample_locations_info: *const SampleLocationsInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ samples: SampleCountFlags,
+ p_multisample_properties: *mut MultisamplePropertiesEXT,
+);
+#[derive(Clone)]
+pub struct ExtSampleLocationsFn {
+ pub cmd_set_sample_locations_ext: PFN_vkCmdSetSampleLocationsEXT,
+ pub get_physical_device_multisample_properties_ext:
+ PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT,
+}
+unsafe impl Send for ExtSampleLocationsFn {}
+unsafe impl Sync for ExtSampleLocationsFn {}
+impl ExtSampleLocationsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_sample_locations_ext: unsafe {
+ unsafe extern "system" fn cmd_set_sample_locations_ext(
+ _command_buffer: CommandBuffer,
+ _p_sample_locations_info: *const SampleLocationsInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_sample_locations_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetSampleLocationsEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_sample_locations_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_multisample_properties_ext: unsafe {
+ unsafe extern "system" fn get_physical_device_multisample_properties_ext(
+ _physical_device: PhysicalDevice,
+ _samples: SampleCountFlags,
+ _p_multisample_properties: *mut MultisamplePropertiesEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_multisample_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceMultisamplePropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_multisample_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_sample_locations'"]
+impl DynamicState {
+ pub const SAMPLE_LOCATIONS_EXT: Self = Self(1_000_143_000);
+}
+#[doc = "Generated from 'VK_EXT_sample_locations'"]
+impl ImageCreateFlags {
+ pub const SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_EXT: Self = Self(0b1_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_sample_locations'"]
+impl StructureType {
+ pub const SAMPLE_LOCATIONS_INFO_EXT: Self = Self(1_000_143_000);
+ pub const RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT: Self = Self(1_000_143_001);
+ pub const PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT: Self = Self(1_000_143_002);
+ pub const PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT: Self = Self(1_000_143_003);
+ pub const MULTISAMPLE_PROPERTIES_EXT: Self = Self(1_000_143_004);
+}
+impl KhrRelaxedBlockLayoutFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_relaxed_block_layout\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrRelaxedBlockLayoutFn {}
+unsafe impl Send for KhrRelaxedBlockLayoutFn {}
+unsafe impl Sync for KhrRelaxedBlockLayoutFn {}
+impl KhrRelaxedBlockLayoutFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrGetMemoryRequirements2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_get_memory_requirements2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageMemoryRequirements2 = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const ImageMemoryRequirementsInfo2,
+ p_memory_requirements: *mut MemoryRequirements2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetBufferMemoryRequirements2 = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const BufferMemoryRequirementsInfo2,
+ p_memory_requirements: *mut MemoryRequirements2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageSparseMemoryRequirements2 = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const ImageSparseMemoryRequirementsInfo2,
+ p_sparse_memory_requirement_count: *mut u32,
+ p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
+);
+#[derive(Clone)]
+pub struct KhrGetMemoryRequirements2Fn {
+ pub get_image_memory_requirements2_khr: PFN_vkGetImageMemoryRequirements2,
+ pub get_buffer_memory_requirements2_khr: PFN_vkGetBufferMemoryRequirements2,
+ pub get_image_sparse_memory_requirements2_khr: PFN_vkGetImageSparseMemoryRequirements2,
+}
+unsafe impl Send for KhrGetMemoryRequirements2Fn {}
+unsafe impl Sync for KhrGetMemoryRequirements2Fn {}
+impl KhrGetMemoryRequirements2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_image_memory_requirements2_khr: unsafe {
+ unsafe extern "system" fn get_image_memory_requirements2_khr(
+ _device: Device,
+ _p_info: *const ImageMemoryRequirementsInfo2,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_memory_requirements2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageMemoryRequirements2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_memory_requirements2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_memory_requirements2_khr: unsafe {
+ unsafe extern "system" fn get_buffer_memory_requirements2_khr(
+ _device: Device,
+ _p_info: *const BufferMemoryRequirementsInfo2,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_memory_requirements2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferMemoryRequirements2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_memory_requirements2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_sparse_memory_requirements2_khr: unsafe {
+ unsafe extern "system" fn get_image_sparse_memory_requirements2_khr(
+ _device: Device,
+ _p_info: *const ImageSparseMemoryRequirementsInfo2,
+ _p_sparse_memory_requirement_count: *mut u32,
+ _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_sparse_memory_requirements2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageSparseMemoryRequirements2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_sparse_memory_requirements2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_get_memory_requirements2'"]
+impl StructureType {
+ pub const BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR: Self = Self::BUFFER_MEMORY_REQUIREMENTS_INFO_2;
+ pub const IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR: Self = Self::IMAGE_MEMORY_REQUIREMENTS_INFO_2;
+ pub const IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR: Self =
+ Self::IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2;
+ pub const MEMORY_REQUIREMENTS_2_KHR: Self = Self::MEMORY_REQUIREMENTS_2;
+ pub const SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR: Self =
+ Self::SPARSE_IMAGE_MEMORY_REQUIREMENTS_2;
+}
+impl KhrImageFormatListFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_image_format_list\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrImageFormatListFn {}
+unsafe impl Send for KhrImageFormatListFn {}
+unsafe impl Sync for KhrImageFormatListFn {}
+impl KhrImageFormatListFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_image_format_list'"]
+impl StructureType {
+ pub const IMAGE_FORMAT_LIST_CREATE_INFO_KHR: Self = Self::IMAGE_FORMAT_LIST_CREATE_INFO;
+}
+impl ExtBlendOperationAdvancedFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_blend_operation_advanced\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtBlendOperationAdvancedFn {}
+unsafe impl Send for ExtBlendOperationAdvancedFn {}
+unsafe impl Sync for ExtBlendOperationAdvancedFn {}
+impl ExtBlendOperationAdvancedFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_blend_operation_advanced'"]
+impl AccessFlags {
+ pub const COLOR_ATTACHMENT_READ_NONCOHERENT_EXT: Self = Self(0b1000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_blend_operation_advanced'"]
+impl BlendOp {
+ pub const ZERO_EXT: Self = Self(1_000_148_000);
+ pub const SRC_EXT: Self = Self(1_000_148_001);
+ pub const DST_EXT: Self = Self(1_000_148_002);
+ pub const SRC_OVER_EXT: Self = Self(1_000_148_003);
+ pub const DST_OVER_EXT: Self = Self(1_000_148_004);
+ pub const SRC_IN_EXT: Self = Self(1_000_148_005);
+ pub const DST_IN_EXT: Self = Self(1_000_148_006);
+ pub const SRC_OUT_EXT: Self = Self(1_000_148_007);
+ pub const DST_OUT_EXT: Self = Self(1_000_148_008);
+ pub const SRC_ATOP_EXT: Self = Self(1_000_148_009);
+ pub const DST_ATOP_EXT: Self = Self(1_000_148_010);
+ pub const XOR_EXT: Self = Self(1_000_148_011);
+ pub const MULTIPLY_EXT: Self = Self(1_000_148_012);
+ pub const SCREEN_EXT: Self = Self(1_000_148_013);
+ pub const OVERLAY_EXT: Self = Self(1_000_148_014);
+ pub const DARKEN_EXT: Self = Self(1_000_148_015);
+ pub const LIGHTEN_EXT: Self = Self(1_000_148_016);
+ pub const COLORDODGE_EXT: Self = Self(1_000_148_017);
+ pub const COLORBURN_EXT: Self = Self(1_000_148_018);
+ pub const HARDLIGHT_EXT: Self = Self(1_000_148_019);
+ pub const SOFTLIGHT_EXT: Self = Self(1_000_148_020);
+ pub const DIFFERENCE_EXT: Self = Self(1_000_148_021);
+ pub const EXCLUSION_EXT: Self = Self(1_000_148_022);
+ pub const INVERT_EXT: Self = Self(1_000_148_023);
+ pub const INVERT_RGB_EXT: Self = Self(1_000_148_024);
+ pub const LINEARDODGE_EXT: Self = Self(1_000_148_025);
+ pub const LINEARBURN_EXT: Self = Self(1_000_148_026);
+ pub const VIVIDLIGHT_EXT: Self = Self(1_000_148_027);
+ pub const LINEARLIGHT_EXT: Self = Self(1_000_148_028);
+ pub const PINLIGHT_EXT: Self = Self(1_000_148_029);
+ pub const HARDMIX_EXT: Self = Self(1_000_148_030);
+ pub const HSL_HUE_EXT: Self = Self(1_000_148_031);
+ pub const HSL_SATURATION_EXT: Self = Self(1_000_148_032);
+ pub const HSL_COLOR_EXT: Self = Self(1_000_148_033);
+ pub const HSL_LUMINOSITY_EXT: Self = Self(1_000_148_034);
+ pub const PLUS_EXT: Self = Self(1_000_148_035);
+ pub const PLUS_CLAMPED_EXT: Self = Self(1_000_148_036);
+ pub const PLUS_CLAMPED_ALPHA_EXT: Self = Self(1_000_148_037);
+ pub const PLUS_DARKER_EXT: Self = Self(1_000_148_038);
+ pub const MINUS_EXT: Self = Self(1_000_148_039);
+ pub const MINUS_CLAMPED_EXT: Self = Self(1_000_148_040);
+ pub const CONTRAST_EXT: Self = Self(1_000_148_041);
+ pub const INVERT_OVG_EXT: Self = Self(1_000_148_042);
+ pub const RED_EXT: Self = Self(1_000_148_043);
+ pub const GREEN_EXT: Self = Self(1_000_148_044);
+ pub const BLUE_EXT: Self = Self(1_000_148_045);
+}
+#[doc = "Generated from 'VK_EXT_blend_operation_advanced'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT: Self = Self(1_000_148_000);
+ pub const PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT: Self = Self(1_000_148_001);
+ pub const PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT: Self = Self(1_000_148_002);
+}
+impl NvFragmentCoverageToColorFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_fragment_coverage_to_color\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvFragmentCoverageToColorFn {}
+unsafe impl Send for NvFragmentCoverageToColorFn {}
+unsafe impl Sync for NvFragmentCoverageToColorFn {}
+impl NvFragmentCoverageToColorFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_fragment_coverage_to_color'"]
+impl StructureType {
+ pub const PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV: Self = Self(1_000_149_000);
+}
+impl KhrAccelerationStructureFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_acceleration_structure\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 13u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateAccelerationStructureKHR = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const AccelerationStructureCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_acceleration_structure: *mut AccelerationStructureKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyAccelerationStructureKHR = unsafe extern "system" fn(
+ device: Device,
+ acceleration_structure: AccelerationStructureKHR,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBuildAccelerationStructuresKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ info_count: u32,
+ p_infos: *const AccelerationStructureBuildGeometryInfoKHR,
+ pp_build_range_infos: *const *const AccelerationStructureBuildRangeInfoKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBuildAccelerationStructuresIndirectKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ info_count: u32,
+ p_infos: *const AccelerationStructureBuildGeometryInfoKHR,
+ p_indirect_device_addresses: *const DeviceAddress,
+ p_indirect_strides: *const u32,
+ pp_max_primitive_counts: *const *const u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkBuildAccelerationStructuresKHR = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ info_count: u32,
+ p_infos: *const AccelerationStructureBuildGeometryInfoKHR,
+ pp_build_range_infos: *const *const AccelerationStructureBuildRangeInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCopyAccelerationStructureKHR = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ p_info: *const CopyAccelerationStructureInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCopyAccelerationStructureToMemoryKHR = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ p_info: *const CopyAccelerationStructureToMemoryInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCopyMemoryToAccelerationStructureKHR = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ p_info: *const CopyMemoryToAccelerationStructureInfoKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkWriteAccelerationStructuresPropertiesKHR = unsafe extern "system" fn(
+ device: Device,
+ acceleration_structure_count: u32,
+ p_acceleration_structures: *const AccelerationStructureKHR,
+ query_type: QueryType,
+ data_size: usize,
+ p_data: *mut c_void,
+ stride: usize,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyAccelerationStructureKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_info: *const CopyAccelerationStructureInfoKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyAccelerationStructureToMemoryKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_info: *const CopyAccelerationStructureToMemoryInfoKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyMemoryToAccelerationStructureKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_info: *const CopyMemoryToAccelerationStructureInfoKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetAccelerationStructureDeviceAddressKHR =
+ unsafe extern "system" fn(
+ device: Device,
+ p_info: *const AccelerationStructureDeviceAddressInfoKHR,
+ ) -> DeviceAddress;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteAccelerationStructuresPropertiesKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ acceleration_structure_count: u32,
+ p_acceleration_structures: *const AccelerationStructureKHR,
+ query_type: QueryType,
+ query_pool: QueryPool,
+ first_query: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceAccelerationStructureCompatibilityKHR = unsafe extern "system" fn(
+ device: Device,
+ p_version_info: *const AccelerationStructureVersionInfoKHR,
+ p_compatibility: *mut AccelerationStructureCompatibilityKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetAccelerationStructureBuildSizesKHR = unsafe extern "system" fn(
+ device: Device,
+ build_type: AccelerationStructureBuildTypeKHR,
+ p_build_info: *const AccelerationStructureBuildGeometryInfoKHR,
+ p_max_primitive_counts: *const u32,
+ p_size_info: *mut AccelerationStructureBuildSizesInfoKHR,
+);
+#[derive(Clone)]
+pub struct KhrAccelerationStructureFn {
+ pub create_acceleration_structure_khr: PFN_vkCreateAccelerationStructureKHR,
+ pub destroy_acceleration_structure_khr: PFN_vkDestroyAccelerationStructureKHR,
+ pub cmd_build_acceleration_structures_khr: PFN_vkCmdBuildAccelerationStructuresKHR,
+ pub cmd_build_acceleration_structures_indirect_khr:
+ PFN_vkCmdBuildAccelerationStructuresIndirectKHR,
+ pub build_acceleration_structures_khr: PFN_vkBuildAccelerationStructuresKHR,
+ pub copy_acceleration_structure_khr: PFN_vkCopyAccelerationStructureKHR,
+ pub copy_acceleration_structure_to_memory_khr: PFN_vkCopyAccelerationStructureToMemoryKHR,
+ pub copy_memory_to_acceleration_structure_khr: PFN_vkCopyMemoryToAccelerationStructureKHR,
+ pub write_acceleration_structures_properties_khr:
+ PFN_vkWriteAccelerationStructuresPropertiesKHR,
+ pub cmd_copy_acceleration_structure_khr: PFN_vkCmdCopyAccelerationStructureKHR,
+ pub cmd_copy_acceleration_structure_to_memory_khr:
+ PFN_vkCmdCopyAccelerationStructureToMemoryKHR,
+ pub cmd_copy_memory_to_acceleration_structure_khr:
+ PFN_vkCmdCopyMemoryToAccelerationStructureKHR,
+ pub get_acceleration_structure_device_address_khr:
+ PFN_vkGetAccelerationStructureDeviceAddressKHR,
+ pub cmd_write_acceleration_structures_properties_khr:
+ PFN_vkCmdWriteAccelerationStructuresPropertiesKHR,
+ pub get_device_acceleration_structure_compatibility_khr:
+ PFN_vkGetDeviceAccelerationStructureCompatibilityKHR,
+ pub get_acceleration_structure_build_sizes_khr: PFN_vkGetAccelerationStructureBuildSizesKHR,
+}
+unsafe impl Send for KhrAccelerationStructureFn {}
+unsafe impl Sync for KhrAccelerationStructureFn {}
+impl KhrAccelerationStructureFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_acceleration_structure_khr: unsafe {
+ unsafe extern "system" fn create_acceleration_structure_khr(
+ _device: Device,
+ _p_create_info: *const AccelerationStructureCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_acceleration_structure: *mut AccelerationStructureKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_acceleration_structure_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateAccelerationStructureKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_acceleration_structure_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_acceleration_structure_khr: unsafe {
+ unsafe extern "system" fn destroy_acceleration_structure_khr(
+ _device: Device,
+ _acceleration_structure: AccelerationStructureKHR,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_acceleration_structure_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyAccelerationStructureKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_acceleration_structure_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_build_acceleration_structures_khr: unsafe {
+ unsafe extern "system" fn cmd_build_acceleration_structures_khr(
+ _command_buffer: CommandBuffer,
+ _info_count: u32,
+ _p_infos: *const AccelerationStructureBuildGeometryInfoKHR,
+ _pp_build_range_infos: *const *const AccelerationStructureBuildRangeInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_build_acceleration_structures_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBuildAccelerationStructuresKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_build_acceleration_structures_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_build_acceleration_structures_indirect_khr: unsafe {
+ unsafe extern "system" fn cmd_build_acceleration_structures_indirect_khr(
+ _command_buffer: CommandBuffer,
+ _info_count: u32,
+ _p_infos: *const AccelerationStructureBuildGeometryInfoKHR,
+ _p_indirect_device_addresses: *const DeviceAddress,
+ _p_indirect_strides: *const u32,
+ _pp_max_primitive_counts: *const *const u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_build_acceleration_structures_indirect_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBuildAccelerationStructuresIndirectKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_build_acceleration_structures_indirect_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ build_acceleration_structures_khr: unsafe {
+ unsafe extern "system" fn build_acceleration_structures_khr(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _info_count: u32,
+ _p_infos: *const AccelerationStructureBuildGeometryInfoKHR,
+ _pp_build_range_infos: *const *const AccelerationStructureBuildRangeInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(build_acceleration_structures_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkBuildAccelerationStructuresKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ build_acceleration_structures_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ copy_acceleration_structure_khr: unsafe {
+ unsafe extern "system" fn copy_acceleration_structure_khr(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _p_info: *const CopyAccelerationStructureInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(copy_acceleration_structure_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCopyAccelerationStructureKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ copy_acceleration_structure_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ copy_acceleration_structure_to_memory_khr: unsafe {
+ unsafe extern "system" fn copy_acceleration_structure_to_memory_khr(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _p_info: *const CopyAccelerationStructureToMemoryInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(copy_acceleration_structure_to_memory_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCopyAccelerationStructureToMemoryKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ copy_acceleration_structure_to_memory_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ copy_memory_to_acceleration_structure_khr: unsafe {
+ unsafe extern "system" fn copy_memory_to_acceleration_structure_khr(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _p_info: *const CopyMemoryToAccelerationStructureInfoKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(copy_memory_to_acceleration_structure_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCopyMemoryToAccelerationStructureKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ copy_memory_to_acceleration_structure_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ write_acceleration_structures_properties_khr: unsafe {
+ unsafe extern "system" fn write_acceleration_structures_properties_khr(
+ _device: Device,
+ _acceleration_structure_count: u32,
+ _p_acceleration_structures: *const AccelerationStructureKHR,
+ _query_type: QueryType,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ _stride: usize,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(write_acceleration_structures_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkWriteAccelerationStructuresPropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ write_acceleration_structures_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_acceleration_structure_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_acceleration_structure_khr(
+ _command_buffer: CommandBuffer,
+ _p_info: *const CopyAccelerationStructureInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_acceleration_structure_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyAccelerationStructureKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_acceleration_structure_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_acceleration_structure_to_memory_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_acceleration_structure_to_memory_khr(
+ _command_buffer: CommandBuffer,
+ _p_info: *const CopyAccelerationStructureToMemoryInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_acceleration_structure_to_memory_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyAccelerationStructureToMemoryKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_acceleration_structure_to_memory_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_memory_to_acceleration_structure_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_memory_to_acceleration_structure_khr(
+ _command_buffer: CommandBuffer,
+ _p_info: *const CopyMemoryToAccelerationStructureInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_memory_to_acceleration_structure_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyMemoryToAccelerationStructureKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_memory_to_acceleration_structure_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_acceleration_structure_device_address_khr: unsafe {
+ unsafe extern "system" fn get_acceleration_structure_device_address_khr(
+ _device: Device,
+ _p_info: *const AccelerationStructureDeviceAddressInfoKHR,
+ ) -> DeviceAddress {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_acceleration_structure_device_address_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetAccelerationStructureDeviceAddressKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_acceleration_structure_device_address_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_acceleration_structures_properties_khr: unsafe {
+ unsafe extern "system" fn cmd_write_acceleration_structures_properties_khr(
+ _command_buffer: CommandBuffer,
+ _acceleration_structure_count: u32,
+ _p_acceleration_structures: *const AccelerationStructureKHR,
+ _query_type: QueryType,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_write_acceleration_structures_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdWriteAccelerationStructuresPropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_acceleration_structures_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_acceleration_structure_compatibility_khr: unsafe {
+ unsafe extern "system" fn get_device_acceleration_structure_compatibility_khr(
+ _device: Device,
+ _p_version_info: *const AccelerationStructureVersionInfoKHR,
+ _p_compatibility: *mut AccelerationStructureCompatibilityKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_acceleration_structure_compatibility_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceAccelerationStructureCompatibilityKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_acceleration_structure_compatibility_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_acceleration_structure_build_sizes_khr: unsafe {
+ unsafe extern "system" fn get_acceleration_structure_build_sizes_khr(
+ _device: Device,
+ _build_type: AccelerationStructureBuildTypeKHR,
+ _p_build_info: *const AccelerationStructureBuildGeometryInfoKHR,
+ _p_max_primitive_counts: *const u32,
+ _p_size_info: *mut AccelerationStructureBuildSizesInfoKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_acceleration_structure_build_sizes_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetAccelerationStructureBuildSizesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_acceleration_structure_build_sizes_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl AccessFlags {
+ pub const ACCELERATION_STRUCTURE_READ_KHR: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const ACCELERATION_STRUCTURE_WRITE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl BufferUsageFlags {
+ pub const ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_KHR: Self =
+ Self(0b1000_0000_0000_0000_0000);
+ pub const ACCELERATION_STRUCTURE_STORAGE_KHR: Self = Self(0b1_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl DebugReportObjectTypeEXT {
+ pub const ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl DescriptorType {
+ pub const ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl FormatFeatureFlags {
+ pub const ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl FormatFeatureFlags2 {
+ pub const ACCELERATION_STRUCTURE_VERTEX_BUFFER_KHR: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl IndexType {
+ pub const NONE_KHR: Self = Self(1_000_165_000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl ObjectType {
+ pub const ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl PipelineStageFlags {
+ pub const ACCELERATION_STRUCTURE_BUILD_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl QueryType {
+ pub const ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR: Self = Self(1_000_150_000);
+ pub const ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR: Self = Self(1_000_150_001);
+}
+#[doc = "Generated from 'VK_KHR_acceleration_structure'"]
+impl StructureType {
+ pub const WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR: Self = Self(1_000_150_007);
+ pub const ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR: Self = Self(1_000_150_000);
+ pub const ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR: Self = Self(1_000_150_002);
+ pub const ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR: Self = Self(1_000_150_003);
+ pub const ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR: Self = Self(1_000_150_004);
+ pub const ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR: Self = Self(1_000_150_005);
+ pub const ACCELERATION_STRUCTURE_GEOMETRY_KHR: Self = Self(1_000_150_006);
+ pub const ACCELERATION_STRUCTURE_VERSION_INFO_KHR: Self = Self(1_000_150_009);
+ pub const COPY_ACCELERATION_STRUCTURE_INFO_KHR: Self = Self(1_000_150_010);
+ pub const COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR: Self = Self(1_000_150_011);
+ pub const COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR: Self = Self(1_000_150_012);
+ pub const PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR: Self = Self(1_000_150_013);
+ pub const PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR: Self = Self(1_000_150_014);
+ pub const ACCELERATION_STRUCTURE_CREATE_INFO_KHR: Self = Self(1_000_150_017);
+ pub const ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR: Self = Self(1_000_150_020);
+}
+impl KhrRayTracingPipelineFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_tracing_pipeline\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdTraceRaysKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ width: u32,
+ height: u32,
+ depth: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateRayTracingPipelinesKHR = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ pipeline_cache: PipelineCache,
+ create_info_count: u32,
+ p_create_infos: *const RayTracingPipelineCreateInfoKHR,
+ p_allocator: *const AllocationCallbacks,
+ p_pipelines: *mut Pipeline,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetRayTracingShaderGroupHandlesKHR = unsafe extern "system" fn(
+ device: Device,
+ pipeline: Pipeline,
+ first_group: u32,
+ group_count: u32,
+ data_size: usize,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR =
+ unsafe extern "system" fn(
+ device: Device,
+ pipeline: Pipeline,
+ first_group: u32,
+ group_count: u32,
+ data_size: usize,
+ p_data: *mut c_void,
+ ) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdTraceRaysIndirectKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ indirect_device_address: DeviceAddress,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetRayTracingShaderGroupStackSizeKHR = unsafe extern "system" fn(
+ device: Device,
+ pipeline: Pipeline,
+ group: u32,
+ group_shader: ShaderGroupShaderKHR,
+) -> DeviceSize;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetRayTracingPipelineStackSizeKHR =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, pipeline_stack_size: u32);
+#[derive(Clone)]
+pub struct KhrRayTracingPipelineFn {
+ pub cmd_trace_rays_khr: PFN_vkCmdTraceRaysKHR,
+ pub create_ray_tracing_pipelines_khr: PFN_vkCreateRayTracingPipelinesKHR,
+ pub get_ray_tracing_shader_group_handles_khr: PFN_vkGetRayTracingShaderGroupHandlesKHR,
+ pub get_ray_tracing_capture_replay_shader_group_handles_khr:
+ PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR,
+ pub cmd_trace_rays_indirect_khr: PFN_vkCmdTraceRaysIndirectKHR,
+ pub get_ray_tracing_shader_group_stack_size_khr: PFN_vkGetRayTracingShaderGroupStackSizeKHR,
+ pub cmd_set_ray_tracing_pipeline_stack_size_khr: PFN_vkCmdSetRayTracingPipelineStackSizeKHR,
+}
+unsafe impl Send for KhrRayTracingPipelineFn {}
+unsafe impl Sync for KhrRayTracingPipelineFn {}
+impl KhrRayTracingPipelineFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_trace_rays_khr: unsafe {
+ unsafe extern "system" fn cmd_trace_rays_khr(
+ _command_buffer: CommandBuffer,
+ _p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _width: u32,
+ _height: u32,
+ _depth: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_khr)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_trace_rays_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_ray_tracing_pipelines_khr: unsafe {
+ unsafe extern "system" fn create_ray_tracing_pipelines_khr(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _pipeline_cache: PipelineCache,
+ _create_info_count: u32,
+ _p_create_infos: *const RayTracingPipelineCreateInfoKHR,
+ _p_allocator: *const AllocationCallbacks,
+ _p_pipelines: *mut Pipeline,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_ray_tracing_pipelines_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateRayTracingPipelinesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_ray_tracing_pipelines_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_ray_tracing_shader_group_handles_khr: unsafe {
+ unsafe extern "system" fn get_ray_tracing_shader_group_handles_khr(
+ _device: Device,
+ _pipeline: Pipeline,
+ _first_group: u32,
+ _group_count: u32,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_ray_tracing_shader_group_handles_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRayTracingShaderGroupHandlesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_ray_tracing_shader_group_handles_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_ray_tracing_capture_replay_shader_group_handles_khr: unsafe {
+ unsafe extern "system" fn get_ray_tracing_capture_replay_shader_group_handles_khr(
+ _device: Device,
+ _pipeline: Pipeline,
+ _first_group: u32,
+ _group_count: u32,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_ray_tracing_capture_replay_shader_group_handles_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRayTracingCaptureReplayShaderGroupHandlesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_ray_tracing_capture_replay_shader_group_handles_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_trace_rays_indirect_khr: unsafe {
+ unsafe extern "system" fn cmd_trace_rays_indirect_khr(
+ _command_buffer: CommandBuffer,
+ _p_raygen_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _p_miss_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _p_hit_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _p_callable_shader_binding_table: *const StridedDeviceAddressRegionKHR,
+ _indirect_device_address: DeviceAddress,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_trace_rays_indirect_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysIndirectKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_trace_rays_indirect_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_ray_tracing_shader_group_stack_size_khr: unsafe {
+ unsafe extern "system" fn get_ray_tracing_shader_group_stack_size_khr(
+ _device: Device,
+ _pipeline: Pipeline,
+ _group: u32,
+ _group_shader: ShaderGroupShaderKHR,
+ ) -> DeviceSize {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_ray_tracing_shader_group_stack_size_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRayTracingShaderGroupStackSizeKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_ray_tracing_shader_group_stack_size_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_ray_tracing_pipeline_stack_size_khr: unsafe {
+ unsafe extern "system" fn cmd_set_ray_tracing_pipeline_stack_size_khr(
+ _command_buffer: CommandBuffer,
+ _pipeline_stack_size: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_ray_tracing_pipeline_stack_size_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetRayTracingPipelineStackSizeKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_ray_tracing_pipeline_stack_size_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl BufferUsageFlags {
+ pub const SHADER_BINDING_TABLE_KHR: Self = Self(0b100_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl DynamicState {
+ pub const RAY_TRACING_PIPELINE_STACK_SIZE_KHR: Self = Self(1_000_347_000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl PipelineBindPoint {
+ pub const RAY_TRACING_KHR: Self = Self(1_000_165_000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl PipelineCreateFlags {
+ pub const RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_KHR: Self = Self(0b100_0000_0000_0000);
+ pub const RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_KHR: Self = Self(0b1000_0000_0000_0000);
+ pub const RAY_TRACING_NO_NULL_MISS_SHADERS_KHR: Self = Self(0b1_0000_0000_0000_0000);
+ pub const RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_KHR: Self = Self(0b10_0000_0000_0000_0000);
+ pub const RAY_TRACING_SKIP_TRIANGLES_KHR: Self = Self(0b1_0000_0000_0000);
+ pub const RAY_TRACING_SKIP_AABBS_KHR: Self = Self(0b10_0000_0000_0000);
+ pub const RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_KHR: Self =
+ Self(0b1000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl PipelineStageFlags {
+ pub const RAY_TRACING_SHADER_KHR: Self = Self(0b10_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl ShaderStageFlags {
+ pub const RAYGEN_KHR: Self = Self(0b1_0000_0000);
+ pub const ANY_HIT_KHR: Self = Self(0b10_0000_0000);
+ pub const CLOSEST_HIT_KHR: Self = Self(0b100_0000_0000);
+ pub const MISS_KHR: Self = Self(0b1000_0000_0000);
+ pub const INTERSECTION_KHR: Self = Self(0b1_0000_0000_0000);
+ pub const CALLABLE_KHR: Self = Self(0b10_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_pipeline'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR: Self = Self(1_000_347_000);
+ pub const PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR: Self = Self(1_000_347_001);
+ pub const RAY_TRACING_PIPELINE_CREATE_INFO_KHR: Self = Self(1_000_150_015);
+ pub const RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR: Self = Self(1_000_150_016);
+ pub const RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR: Self = Self(1_000_150_018);
+}
+impl KhrRayQueryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_query\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrRayQueryFn {}
+unsafe impl Send for KhrRayQueryFn {}
+unsafe impl Sync for KhrRayQueryFn {}
+impl KhrRayQueryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_ray_query'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR: Self = Self(1_000_348_013);
+}
+impl NvExtension152Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_152\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension152Fn {}
+unsafe impl Send for NvExtension152Fn {}
+unsafe impl Sync for NvExtension152Fn {}
+impl NvExtension152Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvFramebufferMixedSamplesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_framebuffer_mixed_samples\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvFramebufferMixedSamplesFn {}
+unsafe impl Send for NvFramebufferMixedSamplesFn {}
+unsafe impl Sync for NvFramebufferMixedSamplesFn {}
+impl NvFramebufferMixedSamplesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_framebuffer_mixed_samples'"]
+impl StructureType {
+ pub const PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV: Self = Self(1_000_152_000);
+}
+impl NvFillRectangleFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_fill_rectangle\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvFillRectangleFn {}
+unsafe impl Send for NvFillRectangleFn {}
+unsafe impl Sync for NvFillRectangleFn {}
+impl NvFillRectangleFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_fill_rectangle'"]
+impl PolygonMode {
+ pub const FILL_RECTANGLE_NV: Self = Self(1_000_153_000);
+}
+impl NvShaderSmBuiltinsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_sm_builtins\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvShaderSmBuiltinsFn {}
+unsafe impl Send for NvShaderSmBuiltinsFn {}
+unsafe impl Sync for NvShaderSmBuiltinsFn {}
+impl NvShaderSmBuiltinsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_shader_sm_builtins'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV: Self = Self(1_000_154_000);
+ pub const PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV: Self = Self(1_000_154_001);
+}
+impl ExtPostDepthCoverageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_post_depth_coverage\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPostDepthCoverageFn {}
+unsafe impl Send for ExtPostDepthCoverageFn {}
+unsafe impl Sync for ExtPostDepthCoverageFn {}
+impl ExtPostDepthCoverageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrSamplerYcbcrConversionFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_sampler_ycbcr_conversion\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 14u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateSamplerYcbcrConversion = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const SamplerYcbcrConversionCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_ycbcr_conversion: *mut SamplerYcbcrConversion,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroySamplerYcbcrConversion = unsafe extern "system" fn(
+ device: Device,
+ ycbcr_conversion: SamplerYcbcrConversion,
+ p_allocator: *const AllocationCallbacks,
+);
+#[derive(Clone)]
+pub struct KhrSamplerYcbcrConversionFn {
+ pub create_sampler_ycbcr_conversion_khr: PFN_vkCreateSamplerYcbcrConversion,
+ pub destroy_sampler_ycbcr_conversion_khr: PFN_vkDestroySamplerYcbcrConversion,
+}
+unsafe impl Send for KhrSamplerYcbcrConversionFn {}
+unsafe impl Sync for KhrSamplerYcbcrConversionFn {}
+impl KhrSamplerYcbcrConversionFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_sampler_ycbcr_conversion_khr: unsafe {
+ unsafe extern "system" fn create_sampler_ycbcr_conversion_khr(
+ _device: Device,
+ _p_create_info: *const SamplerYcbcrConversionCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_ycbcr_conversion: *mut SamplerYcbcrConversion,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_sampler_ycbcr_conversion_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateSamplerYcbcrConversionKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_sampler_ycbcr_conversion_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_sampler_ycbcr_conversion_khr: unsafe {
+ unsafe extern "system" fn destroy_sampler_ycbcr_conversion_khr(
+ _device: Device,
+ _ycbcr_conversion: SamplerYcbcrConversion,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_sampler_ycbcr_conversion_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroySamplerYcbcrConversionKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_sampler_ycbcr_conversion_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl ChromaLocation {
+ pub const COSITED_EVEN_KHR: Self = Self::COSITED_EVEN;
+ pub const MIDPOINT_KHR: Self = Self::MIDPOINT;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl DebugReportObjectTypeEXT {
+ pub const SAMPLER_YCBCR_CONVERSION_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl Format {
+ pub const G8B8G8R8_422_UNORM_KHR: Self = Self::G8B8G8R8_422_UNORM;
+ pub const B8G8R8G8_422_UNORM_KHR: Self = Self::B8G8R8G8_422_UNORM;
+ pub const G8_B8_R8_3PLANE_420_UNORM_KHR: Self = Self::G8_B8_R8_3PLANE_420_UNORM;
+ pub const G8_B8R8_2PLANE_420_UNORM_KHR: Self = Self::G8_B8R8_2PLANE_420_UNORM;
+ pub const G8_B8_R8_3PLANE_422_UNORM_KHR: Self = Self::G8_B8_R8_3PLANE_422_UNORM;
+ pub const G8_B8R8_2PLANE_422_UNORM_KHR: Self = Self::G8_B8R8_2PLANE_422_UNORM;
+ pub const G8_B8_R8_3PLANE_444_UNORM_KHR: Self = Self::G8_B8_R8_3PLANE_444_UNORM;
+ pub const R10X6_UNORM_PACK16_KHR: Self = Self::R10X6_UNORM_PACK16;
+ pub const R10X6G10X6_UNORM_2PACK16_KHR: Self = Self::R10X6G10X6_UNORM_2PACK16;
+ pub const R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR: Self =
+ Self::R10X6G10X6B10X6A10X6_UNORM_4PACK16;
+ pub const G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR: Self =
+ Self::G10X6B10X6G10X6R10X6_422_UNORM_4PACK16;
+ pub const B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR: Self =
+ Self::B10X6G10X6R10X6G10X6_422_UNORM_4PACK16;
+ pub const G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR: Self =
+ Self::G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16;
+ pub const G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR: Self =
+ Self::G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16;
+ pub const G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR: Self =
+ Self::G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16;
+ pub const G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR: Self =
+ Self::G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16;
+ pub const G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR: Self =
+ Self::G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16;
+ pub const R12X4_UNORM_PACK16_KHR: Self = Self::R12X4_UNORM_PACK16;
+ pub const R12X4G12X4_UNORM_2PACK16_KHR: Self = Self::R12X4G12X4_UNORM_2PACK16;
+ pub const R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR: Self =
+ Self::R12X4G12X4B12X4A12X4_UNORM_4PACK16;
+ pub const G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR: Self =
+ Self::G12X4B12X4G12X4R12X4_422_UNORM_4PACK16;
+ pub const B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR: Self =
+ Self::B12X4G12X4R12X4G12X4_422_UNORM_4PACK16;
+ pub const G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR: Self =
+ Self::G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16;
+ pub const G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR: Self =
+ Self::G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16;
+ pub const G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR: Self =
+ Self::G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16;
+ pub const G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR: Self =
+ Self::G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16;
+ pub const G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR: Self =
+ Self::G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16;
+ pub const G16B16G16R16_422_UNORM_KHR: Self = Self::G16B16G16R16_422_UNORM;
+ pub const B16G16R16G16_422_UNORM_KHR: Self = Self::B16G16R16G16_422_UNORM;
+ pub const G16_B16_R16_3PLANE_420_UNORM_KHR: Self = Self::G16_B16_R16_3PLANE_420_UNORM;
+ pub const G16_B16R16_2PLANE_420_UNORM_KHR: Self = Self::G16_B16R16_2PLANE_420_UNORM;
+ pub const G16_B16_R16_3PLANE_422_UNORM_KHR: Self = Self::G16_B16_R16_3PLANE_422_UNORM;
+ pub const G16_B16R16_2PLANE_422_UNORM_KHR: Self = Self::G16_B16R16_2PLANE_422_UNORM;
+ pub const G16_B16_R16_3PLANE_444_UNORM_KHR: Self = Self::G16_B16_R16_3PLANE_444_UNORM;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl FormatFeatureFlags {
+ pub const MIDPOINT_CHROMA_SAMPLES_KHR: Self = Self::MIDPOINT_CHROMA_SAMPLES;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT;
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_KHR: Self =
+ Self::SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE;
+ pub const DISJOINT_KHR: Self = Self::DISJOINT;
+ pub const COSITED_CHROMA_SAMPLES_KHR: Self = Self::COSITED_CHROMA_SAMPLES;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl ImageAspectFlags {
+ pub const PLANE_0_KHR: Self = Self::PLANE_0;
+ pub const PLANE_1_KHR: Self = Self::PLANE_1;
+ pub const PLANE_2_KHR: Self = Self::PLANE_2;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl ImageCreateFlags {
+ pub const DISJOINT_KHR: Self = Self::DISJOINT;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl ObjectType {
+ pub const SAMPLER_YCBCR_CONVERSION_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl SamplerYcbcrModelConversion {
+ pub const RGB_IDENTITY_KHR: Self = Self::RGB_IDENTITY;
+ pub const YCBCR_IDENTITY_KHR: Self = Self::YCBCR_IDENTITY;
+ pub const YCBCR_709_KHR: Self = Self::YCBCR_709;
+ pub const YCBCR_601_KHR: Self = Self::YCBCR_601;
+ pub const YCBCR_2020_KHR: Self = Self::YCBCR_2020;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl SamplerYcbcrRange {
+ pub const ITU_FULL_KHR: Self = Self::ITU_FULL;
+ pub const ITU_NARROW_KHR: Self = Self::ITU_NARROW;
+}
+#[doc = "Generated from 'VK_KHR_sampler_ycbcr_conversion'"]
+impl StructureType {
+ pub const SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR: Self =
+ Self::SAMPLER_YCBCR_CONVERSION_CREATE_INFO;
+ pub const SAMPLER_YCBCR_CONVERSION_INFO_KHR: Self = Self::SAMPLER_YCBCR_CONVERSION_INFO;
+ pub const BIND_IMAGE_PLANE_MEMORY_INFO_KHR: Self = Self::BIND_IMAGE_PLANE_MEMORY_INFO;
+ pub const IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR: Self =
+ Self::IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO;
+ pub const PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
+ pub const SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR: Self =
+ Self::SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES;
+}
+impl KhrBindMemory2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_bind_memory2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindBufferMemory2 = unsafe extern "system" fn(
+ device: Device,
+ bind_info_count: u32,
+ p_bind_infos: *const BindBufferMemoryInfo,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindImageMemory2 = unsafe extern "system" fn(
+ device: Device,
+ bind_info_count: u32,
+ p_bind_infos: *const BindImageMemoryInfo,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrBindMemory2Fn {
+ pub bind_buffer_memory2_khr: PFN_vkBindBufferMemory2,
+ pub bind_image_memory2_khr: PFN_vkBindImageMemory2,
+}
+unsafe impl Send for KhrBindMemory2Fn {}
+unsafe impl Sync for KhrBindMemory2Fn {}
+impl KhrBindMemory2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ bind_buffer_memory2_khr: unsafe {
+ unsafe extern "system" fn bind_buffer_memory2_khr(
+ _device: Device,
+ _bind_info_count: u32,
+ _p_bind_infos: *const BindBufferMemoryInfo,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(bind_buffer_memory2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBindBufferMemory2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ bind_buffer_memory2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_image_memory2_khr: unsafe {
+ unsafe extern "system" fn bind_image_memory2_khr(
+ _device: Device,
+ _bind_info_count: u32,
+ _p_bind_infos: *const BindImageMemoryInfo,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(bind_image_memory2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBindImageMemory2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ bind_image_memory2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_bind_memory2'"]
+impl ImageCreateFlags {
+ pub const ALIAS_KHR: Self = Self::ALIAS;
+}
+#[doc = "Generated from 'VK_KHR_bind_memory2'"]
+impl StructureType {
+ pub const BIND_BUFFER_MEMORY_INFO_KHR: Self = Self::BIND_BUFFER_MEMORY_INFO;
+ pub const BIND_IMAGE_MEMORY_INFO_KHR: Self = Self::BIND_IMAGE_MEMORY_INFO;
+}
+impl ExtImageDrmFormatModifierFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_drm_format_modifier\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageDrmFormatModifierPropertiesEXT = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ p_properties: *mut ImageDrmFormatModifierPropertiesEXT,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtImageDrmFormatModifierFn {
+ pub get_image_drm_format_modifier_properties_ext: PFN_vkGetImageDrmFormatModifierPropertiesEXT,
+}
+unsafe impl Send for ExtImageDrmFormatModifierFn {}
+unsafe impl Sync for ExtImageDrmFormatModifierFn {}
+impl ExtImageDrmFormatModifierFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_image_drm_format_modifier_properties_ext: unsafe {
+ unsafe extern "system" fn get_image_drm_format_modifier_properties_ext(
+ _device: Device,
+ _image: Image,
+ _p_properties: *mut ImageDrmFormatModifierPropertiesEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_drm_format_modifier_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageDrmFormatModifierPropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_drm_format_modifier_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"]
+impl ImageAspectFlags {
+ pub const MEMORY_PLANE_0_EXT: Self = Self(0b1000_0000);
+ pub const MEMORY_PLANE_1_EXT: Self = Self(0b1_0000_0000);
+ pub const MEMORY_PLANE_2_EXT: Self = Self(0b10_0000_0000);
+ pub const MEMORY_PLANE_3_EXT: Self = Self(0b100_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"]
+impl ImageTiling {
+ pub const DRM_FORMAT_MODIFIER_EXT: Self = Self(1_000_158_000);
+}
+#[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"]
+impl Result {
+ pub const ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT: Self = Self(-1_000_158_000);
+}
+#[doc = "Generated from 'VK_EXT_image_drm_format_modifier'"]
+impl StructureType {
+ pub const DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT: Self = Self(1_000_158_000);
+ pub const PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT: Self = Self(1_000_158_002);
+ pub const IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT: Self = Self(1_000_158_003);
+ pub const IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT: Self = Self(1_000_158_004);
+ pub const IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT: Self = Self(1_000_158_005);
+ pub const DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT: Self = Self(1_000_158_006);
+}
+impl ExtExtension160Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_160\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension160Fn {}
+unsafe impl Send for ExtExtension160Fn {}
+unsafe impl Sync for ExtExtension160Fn {}
+impl ExtExtension160Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtValidationCacheFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_validation_cache\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateValidationCacheEXT = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const ValidationCacheCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_validation_cache: *mut ValidationCacheEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyValidationCacheEXT = unsafe extern "system" fn(
+ device: Device,
+ validation_cache: ValidationCacheEXT,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkMergeValidationCachesEXT = unsafe extern "system" fn(
+ device: Device,
+ dst_cache: ValidationCacheEXT,
+ src_cache_count: u32,
+ p_src_caches: *const ValidationCacheEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetValidationCacheDataEXT = unsafe extern "system" fn(
+ device: Device,
+ validation_cache: ValidationCacheEXT,
+ p_data_size: *mut usize,
+ p_data: *mut c_void,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtValidationCacheFn {
+ pub create_validation_cache_ext: PFN_vkCreateValidationCacheEXT,
+ pub destroy_validation_cache_ext: PFN_vkDestroyValidationCacheEXT,
+ pub merge_validation_caches_ext: PFN_vkMergeValidationCachesEXT,
+ pub get_validation_cache_data_ext: PFN_vkGetValidationCacheDataEXT,
+}
+unsafe impl Send for ExtValidationCacheFn {}
+unsafe impl Sync for ExtValidationCacheFn {}
+impl ExtValidationCacheFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_validation_cache_ext: unsafe {
+ unsafe extern "system" fn create_validation_cache_ext(
+ _device: Device,
+ _p_create_info: *const ValidationCacheCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_validation_cache: *mut ValidationCacheEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_validation_cache_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateValidationCacheEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_validation_cache_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_validation_cache_ext: unsafe {
+ unsafe extern "system" fn destroy_validation_cache_ext(
+ _device: Device,
+ _validation_cache: ValidationCacheEXT,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_validation_cache_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyValidationCacheEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_validation_cache_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ merge_validation_caches_ext: unsafe {
+ unsafe extern "system" fn merge_validation_caches_ext(
+ _device: Device,
+ _dst_cache: ValidationCacheEXT,
+ _src_cache_count: u32,
+ _p_src_caches: *const ValidationCacheEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(merge_validation_caches_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkMergeValidationCachesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ merge_validation_caches_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_validation_cache_data_ext: unsafe {
+ unsafe extern "system" fn get_validation_cache_data_ext(
+ _device: Device,
+ _validation_cache: ValidationCacheEXT,
+ _p_data_size: *mut usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_validation_cache_data_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetValidationCacheDataEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_validation_cache_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_validation_cache'"]
+impl ObjectType {
+ pub const VALIDATION_CACHE_EXT: Self = Self(1_000_160_000);
+}
+#[doc = "Generated from 'VK_EXT_validation_cache'"]
+impl StructureType {
+ pub const VALIDATION_CACHE_CREATE_INFO_EXT: Self = Self(1_000_160_000);
+ pub const SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT: Self = Self(1_000_160_001);
+}
+impl ExtDescriptorIndexingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_descriptor_indexing\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtDescriptorIndexingFn {}
+unsafe impl Send for ExtDescriptorIndexingFn {}
+unsafe impl Sync for ExtDescriptorIndexingFn {}
+impl ExtDescriptorIndexingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_descriptor_indexing'"]
+impl DescriptorBindingFlags {
+ pub const UPDATE_AFTER_BIND_EXT: Self = Self::UPDATE_AFTER_BIND;
+ pub const UPDATE_UNUSED_WHILE_PENDING_EXT: Self = Self::UPDATE_UNUSED_WHILE_PENDING;
+ pub const PARTIALLY_BOUND_EXT: Self = Self::PARTIALLY_BOUND;
+ pub const VARIABLE_DESCRIPTOR_COUNT_EXT: Self = Self::VARIABLE_DESCRIPTOR_COUNT;
+}
+#[doc = "Generated from 'VK_EXT_descriptor_indexing'"]
+impl DescriptorPoolCreateFlags {
+ pub const UPDATE_AFTER_BIND_EXT: Self = Self::UPDATE_AFTER_BIND;
+}
+#[doc = "Generated from 'VK_EXT_descriptor_indexing'"]
+impl DescriptorSetLayoutCreateFlags {
+ pub const UPDATE_AFTER_BIND_POOL_EXT: Self = Self::UPDATE_AFTER_BIND_POOL;
+}
+#[doc = "Generated from 'VK_EXT_descriptor_indexing'"]
+impl Result {
+ pub const ERROR_FRAGMENTATION_EXT: Self = Self::ERROR_FRAGMENTATION;
+}
+#[doc = "Generated from 'VK_EXT_descriptor_indexing'"]
+impl StructureType {
+ pub const DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT: Self =
+ Self::DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO;
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES;
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT: Self =
+ Self::PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES;
+ pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT: Self =
+ Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO;
+ pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT: Self =
+ Self::DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT;
+}
+impl ExtShaderViewportIndexLayerFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_viewport_index_layer\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderViewportIndexLayerFn {}
+unsafe impl Send for ExtShaderViewportIndexLayerFn {}
+unsafe impl Sync for ExtShaderViewportIndexLayerFn {}
+impl ExtShaderViewportIndexLayerFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrPortabilitySubsetFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_portability_subset\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrPortabilitySubsetFn {}
+unsafe impl Send for KhrPortabilitySubsetFn {}
+unsafe impl Sync for KhrPortabilitySubsetFn {}
+impl KhrPortabilitySubsetFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_portability_subset'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR: Self = Self(1_000_163_000);
+ pub const PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR: Self = Self(1_000_163_001);
+}
+impl NvShadingRateImageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_shading_rate_image\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindShadingRateImageNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ image_view: ImageView,
+ image_layout: ImageLayout,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetViewportShadingRatePaletteNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_viewport: u32,
+ viewport_count: u32,
+ p_shading_rate_palettes: *const ShadingRatePaletteNV,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoarseSampleOrderNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ sample_order_type: CoarseSampleOrderTypeNV,
+ custom_sample_order_count: u32,
+ p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
+);
+#[derive(Clone)]
+pub struct NvShadingRateImageFn {
+ pub cmd_bind_shading_rate_image_nv: PFN_vkCmdBindShadingRateImageNV,
+ pub cmd_set_viewport_shading_rate_palette_nv: PFN_vkCmdSetViewportShadingRatePaletteNV,
+ pub cmd_set_coarse_sample_order_nv: PFN_vkCmdSetCoarseSampleOrderNV,
+}
+unsafe impl Send for NvShadingRateImageFn {}
+unsafe impl Sync for NvShadingRateImageFn {}
+impl NvShadingRateImageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_bind_shading_rate_image_nv: unsafe {
+ unsafe extern "system" fn cmd_bind_shading_rate_image_nv(
+ _command_buffer: CommandBuffer,
+ _image_view: ImageView,
+ _image_layout: ImageLayout,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_shading_rate_image_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindShadingRateImageNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_shading_rate_image_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_viewport_shading_rate_palette_nv: unsafe {
+ unsafe extern "system" fn cmd_set_viewport_shading_rate_palette_nv(
+ _command_buffer: CommandBuffer,
+ _first_viewport: u32,
+ _viewport_count: u32,
+ _p_shading_rate_palettes: *const ShadingRatePaletteNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_viewport_shading_rate_palette_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetViewportShadingRatePaletteNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport_shading_rate_palette_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coarse_sample_order_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coarse_sample_order_nv(
+ _command_buffer: CommandBuffer,
+ _sample_order_type: CoarseSampleOrderTypeNV,
+ _custom_sample_order_count: u32,
+ _p_custom_sample_orders: *const CoarseSampleOrderCustomNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coarse_sample_order_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoarseSampleOrderNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coarse_sample_order_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_shading_rate_image'"]
+impl AccessFlags {
+ pub const SHADING_RATE_IMAGE_READ_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR;
+}
+#[doc = "Generated from 'VK_NV_shading_rate_image'"]
+impl DynamicState {
+ pub const VIEWPORT_SHADING_RATE_PALETTE_NV: Self = Self(1_000_164_004);
+ pub const VIEWPORT_COARSE_SAMPLE_ORDER_NV: Self = Self(1_000_164_006);
+}
+#[doc = "Generated from 'VK_NV_shading_rate_image'"]
+impl ImageLayout {
+ pub const SHADING_RATE_OPTIMAL_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR;
+}
+#[doc = "Generated from 'VK_NV_shading_rate_image'"]
+impl ImageUsageFlags {
+ pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR;
+}
+#[doc = "Generated from 'VK_NV_shading_rate_image'"]
+impl PipelineStageFlags {
+ pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR;
+}
+#[doc = "Generated from 'VK_NV_shading_rate_image'"]
+impl StructureType {
+ pub const PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV: Self = Self(1_000_164_000);
+ pub const PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV: Self = Self(1_000_164_001);
+ pub const PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV: Self = Self(1_000_164_002);
+ pub const PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV: Self =
+ Self(1_000_164_005);
+}
+impl NvRayTracingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_ray_tracing\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateAccelerationStructureNV = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const AccelerationStructureCreateInfoNV,
+ p_allocator: *const AllocationCallbacks,
+ p_acceleration_structure: *mut AccelerationStructureNV,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyAccelerationStructureNV = unsafe extern "system" fn(
+ device: Device,
+ acceleration_structure: AccelerationStructureNV,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetAccelerationStructureMemoryRequirementsNV = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const AccelerationStructureMemoryRequirementsInfoNV,
+ p_memory_requirements: *mut MemoryRequirements2KHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindAccelerationStructureMemoryNV = unsafe extern "system" fn(
+ device: Device,
+ bind_info_count: u32,
+ p_bind_infos: *const BindAccelerationStructureMemoryInfoNV,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBuildAccelerationStructureNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_info: *const AccelerationStructureInfoNV,
+ instance_data: Buffer,
+ instance_offset: DeviceSize,
+ update: Bool32,
+ dst: AccelerationStructureNV,
+ src: AccelerationStructureNV,
+ scratch: Buffer,
+ scratch_offset: DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyAccelerationStructureNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ dst: AccelerationStructureNV,
+ src: AccelerationStructureNV,
+ mode: CopyAccelerationStructureModeKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdTraceRaysNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ raygen_shader_binding_table_buffer: Buffer,
+ raygen_shader_binding_offset: DeviceSize,
+ miss_shader_binding_table_buffer: Buffer,
+ miss_shader_binding_offset: DeviceSize,
+ miss_shader_binding_stride: DeviceSize,
+ hit_shader_binding_table_buffer: Buffer,
+ hit_shader_binding_offset: DeviceSize,
+ hit_shader_binding_stride: DeviceSize,
+ callable_shader_binding_table_buffer: Buffer,
+ callable_shader_binding_offset: DeviceSize,
+ callable_shader_binding_stride: DeviceSize,
+ width: u32,
+ height: u32,
+ depth: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateRayTracingPipelinesNV = unsafe extern "system" fn(
+ device: Device,
+ pipeline_cache: PipelineCache,
+ create_info_count: u32,
+ p_create_infos: *const RayTracingPipelineCreateInfoNV,
+ p_allocator: *const AllocationCallbacks,
+ p_pipelines: *mut Pipeline,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetAccelerationStructureHandleNV = unsafe extern "system" fn(
+ device: Device,
+ acceleration_structure: AccelerationStructureNV,
+ data_size: usize,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteAccelerationStructuresPropertiesNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ acceleration_structure_count: u32,
+ p_acceleration_structures: *const AccelerationStructureNV,
+ query_type: QueryType,
+ query_pool: QueryPool,
+ first_query: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCompileDeferredNV =
+ unsafe extern "system" fn(device: Device, pipeline: Pipeline, shader: u32) -> Result;
+#[derive(Clone)]
+pub struct NvRayTracingFn {
+ pub create_acceleration_structure_nv: PFN_vkCreateAccelerationStructureNV,
+ pub destroy_acceleration_structure_nv: PFN_vkDestroyAccelerationStructureNV,
+ pub get_acceleration_structure_memory_requirements_nv:
+ PFN_vkGetAccelerationStructureMemoryRequirementsNV,
+ pub bind_acceleration_structure_memory_nv: PFN_vkBindAccelerationStructureMemoryNV,
+ pub cmd_build_acceleration_structure_nv: PFN_vkCmdBuildAccelerationStructureNV,
+ pub cmd_copy_acceleration_structure_nv: PFN_vkCmdCopyAccelerationStructureNV,
+ pub cmd_trace_rays_nv: PFN_vkCmdTraceRaysNV,
+ pub create_ray_tracing_pipelines_nv: PFN_vkCreateRayTracingPipelinesNV,
+ pub get_ray_tracing_shader_group_handles_nv:
+ crate::vk::PFN_vkGetRayTracingShaderGroupHandlesKHR,
+ pub get_acceleration_structure_handle_nv: PFN_vkGetAccelerationStructureHandleNV,
+ pub cmd_write_acceleration_structures_properties_nv:
+ PFN_vkCmdWriteAccelerationStructuresPropertiesNV,
+ pub compile_deferred_nv: PFN_vkCompileDeferredNV,
+}
+unsafe impl Send for NvRayTracingFn {}
+unsafe impl Sync for NvRayTracingFn {}
+impl NvRayTracingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_acceleration_structure_nv: unsafe {
+ unsafe extern "system" fn create_acceleration_structure_nv(
+ _device: Device,
+ _p_create_info: *const AccelerationStructureCreateInfoNV,
+ _p_allocator: *const AllocationCallbacks,
+ _p_acceleration_structure: *mut AccelerationStructureNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_acceleration_structure_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateAccelerationStructureNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_acceleration_structure_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_acceleration_structure_nv: unsafe {
+ unsafe extern "system" fn destroy_acceleration_structure_nv(
+ _device: Device,
+ _acceleration_structure: AccelerationStructureNV,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_acceleration_structure_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyAccelerationStructureNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_acceleration_structure_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_acceleration_structure_memory_requirements_nv: unsafe {
+ unsafe extern "system" fn get_acceleration_structure_memory_requirements_nv(
+ _device: Device,
+ _p_info: *const AccelerationStructureMemoryRequirementsInfoNV,
+ _p_memory_requirements: *mut MemoryRequirements2KHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_acceleration_structure_memory_requirements_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetAccelerationStructureMemoryRequirementsNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_acceleration_structure_memory_requirements_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_acceleration_structure_memory_nv: unsafe {
+ unsafe extern "system" fn bind_acceleration_structure_memory_nv(
+ _device: Device,
+ _bind_info_count: u32,
+ _p_bind_infos: *const BindAccelerationStructureMemoryInfoNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(bind_acceleration_structure_memory_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkBindAccelerationStructureMemoryNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ bind_acceleration_structure_memory_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_build_acceleration_structure_nv: unsafe {
+ unsafe extern "system" fn cmd_build_acceleration_structure_nv(
+ _command_buffer: CommandBuffer,
+ _p_info: *const AccelerationStructureInfoNV,
+ _instance_data: Buffer,
+ _instance_offset: DeviceSize,
+ _update: Bool32,
+ _dst: AccelerationStructureNV,
+ _src: AccelerationStructureNV,
+ _scratch: Buffer,
+ _scratch_offset: DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_build_acceleration_structure_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBuildAccelerationStructureNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_build_acceleration_structure_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_acceleration_structure_nv: unsafe {
+ unsafe extern "system" fn cmd_copy_acceleration_structure_nv(
+ _command_buffer: CommandBuffer,
+ _dst: AccelerationStructureNV,
+ _src: AccelerationStructureNV,
+ _mode: CopyAccelerationStructureModeKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_acceleration_structure_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyAccelerationStructureNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_acceleration_structure_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_trace_rays_nv: unsafe {
+ unsafe extern "system" fn cmd_trace_rays_nv(
+ _command_buffer: CommandBuffer,
+ _raygen_shader_binding_table_buffer: Buffer,
+ _raygen_shader_binding_offset: DeviceSize,
+ _miss_shader_binding_table_buffer: Buffer,
+ _miss_shader_binding_offset: DeviceSize,
+ _miss_shader_binding_stride: DeviceSize,
+ _hit_shader_binding_table_buffer: Buffer,
+ _hit_shader_binding_offset: DeviceSize,
+ _hit_shader_binding_stride: DeviceSize,
+ _callable_shader_binding_table_buffer: Buffer,
+ _callable_shader_binding_offset: DeviceSize,
+ _callable_shader_binding_stride: DeviceSize,
+ _width: u32,
+ _height: u32,
+ _depth: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_trace_rays_nv)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdTraceRaysNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_trace_rays_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_ray_tracing_pipelines_nv: unsafe {
+ unsafe extern "system" fn create_ray_tracing_pipelines_nv(
+ _device: Device,
+ _pipeline_cache: PipelineCache,
+ _create_info_count: u32,
+ _p_create_infos: *const RayTracingPipelineCreateInfoNV,
+ _p_allocator: *const AllocationCallbacks,
+ _p_pipelines: *mut Pipeline,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_ray_tracing_pipelines_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateRayTracingPipelinesNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_ray_tracing_pipelines_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_ray_tracing_shader_group_handles_nv: unsafe {
+ unsafe extern "system" fn get_ray_tracing_shader_group_handles_nv(
+ _device: Device,
+ _pipeline: Pipeline,
+ _first_group: u32,
+ _group_count: u32,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_ray_tracing_shader_group_handles_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRayTracingShaderGroupHandlesNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_ray_tracing_shader_group_handles_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_acceleration_structure_handle_nv: unsafe {
+ unsafe extern "system" fn get_acceleration_structure_handle_nv(
+ _device: Device,
+ _acceleration_structure: AccelerationStructureNV,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_acceleration_structure_handle_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetAccelerationStructureHandleNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_acceleration_structure_handle_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_acceleration_structures_properties_nv: unsafe {
+ unsafe extern "system" fn cmd_write_acceleration_structures_properties_nv(
+ _command_buffer: CommandBuffer,
+ _acceleration_structure_count: u32,
+ _p_acceleration_structures: *const AccelerationStructureNV,
+ _query_type: QueryType,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_write_acceleration_structures_properties_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdWriteAccelerationStructuresPropertiesNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_acceleration_structures_properties_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ compile_deferred_nv: unsafe {
+ unsafe extern "system" fn compile_deferred_nv(
+ _device: Device,
+ _pipeline: Pipeline,
+ _shader: u32,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(compile_deferred_nv)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCompileDeferredNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ compile_deferred_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl AccelerationStructureTypeKHR {
+ pub const TOP_LEVEL_NV: Self = Self::TOP_LEVEL;
+ pub const BOTTOM_LEVEL_NV: Self = Self::BOTTOM_LEVEL;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl AccessFlags {
+ pub const ACCELERATION_STRUCTURE_READ_NV: Self = Self::ACCELERATION_STRUCTURE_READ_KHR;
+ pub const ACCELERATION_STRUCTURE_WRITE_NV: Self = Self::ACCELERATION_STRUCTURE_WRITE_KHR;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl BufferUsageFlags {
+ pub const RAY_TRACING_NV: Self = Self::SHADER_BINDING_TABLE_KHR;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl BuildAccelerationStructureFlagsKHR {
+ pub const ALLOW_UPDATE_NV: Self = Self::ALLOW_UPDATE;
+ pub const ALLOW_COMPACTION_NV: Self = Self::ALLOW_COMPACTION;
+ pub const PREFER_FAST_TRACE_NV: Self = Self::PREFER_FAST_TRACE;
+ pub const PREFER_FAST_BUILD_NV: Self = Self::PREFER_FAST_BUILD;
+ pub const LOW_MEMORY_NV: Self = Self::LOW_MEMORY;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl CopyAccelerationStructureModeKHR {
+ pub const CLONE_NV: Self = Self::CLONE;
+ pub const COMPACT_NV: Self = Self::COMPACT;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl DebugReportObjectTypeEXT {
+ pub const ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl DescriptorType {
+ pub const ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl GeometryFlagsKHR {
+ pub const OPAQUE_NV: Self = Self::OPAQUE;
+ pub const NO_DUPLICATE_ANY_HIT_INVOCATION_NV: Self = Self::NO_DUPLICATE_ANY_HIT_INVOCATION;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl GeometryInstanceFlagsKHR {
+ pub const TRIANGLE_CULL_DISABLE_NV: Self = Self::TRIANGLE_FACING_CULL_DISABLE;
+ pub const TRIANGLE_FRONT_COUNTERCLOCKWISE_NV: Self = Self::TRIANGLE_FRONT_COUNTERCLOCKWISE;
+ pub const FORCE_OPAQUE_NV: Self = Self::FORCE_OPAQUE;
+ pub const FORCE_NO_OPAQUE_NV: Self = Self::FORCE_NO_OPAQUE;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl GeometryTypeKHR {
+ pub const TRIANGLES_NV: Self = Self::TRIANGLES;
+ pub const AABBS_NV: Self = Self::AABBS;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl IndexType {
+ pub const NONE_NV: Self = Self::NONE_KHR;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl ObjectType {
+ pub const ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl PipelineBindPoint {
+ pub const RAY_TRACING_NV: Self = Self::RAY_TRACING_KHR;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl PipelineCreateFlags {
+ pub const DEFER_COMPILE_NV: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl PipelineStageFlags {
+ pub const RAY_TRACING_SHADER_NV: Self = Self::RAY_TRACING_SHADER_KHR;
+ pub const ACCELERATION_STRUCTURE_BUILD_NV: Self = Self::ACCELERATION_STRUCTURE_BUILD_KHR;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl QueryType {
+ pub const ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV: Self = Self(1_000_165_000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl RayTracingShaderGroupTypeKHR {
+ pub const GENERAL_NV: Self = Self::GENERAL;
+ pub const TRIANGLES_HIT_GROUP_NV: Self = Self::TRIANGLES_HIT_GROUP;
+ pub const PROCEDURAL_HIT_GROUP_NV: Self = Self::PROCEDURAL_HIT_GROUP;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl ShaderStageFlags {
+ pub const RAYGEN_NV: Self = Self::RAYGEN_KHR;
+ pub const ANY_HIT_NV: Self = Self::ANY_HIT_KHR;
+ pub const CLOSEST_HIT_NV: Self = Self::CLOSEST_HIT_KHR;
+ pub const MISS_NV: Self = Self::MISS_KHR;
+ pub const INTERSECTION_NV: Self = Self::INTERSECTION_KHR;
+ pub const CALLABLE_NV: Self = Self::CALLABLE_KHR;
+}
+#[doc = "Generated from 'VK_NV_ray_tracing'"]
+impl StructureType {
+ pub const RAY_TRACING_PIPELINE_CREATE_INFO_NV: Self = Self(1_000_165_000);
+ pub const ACCELERATION_STRUCTURE_CREATE_INFO_NV: Self = Self(1_000_165_001);
+ pub const GEOMETRY_NV: Self = Self(1_000_165_003);
+ pub const GEOMETRY_TRIANGLES_NV: Self = Self(1_000_165_004);
+ pub const GEOMETRY_AABB_NV: Self = Self(1_000_165_005);
+ pub const BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV: Self = Self(1_000_165_006);
+ pub const WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV: Self = Self(1_000_165_007);
+ pub const ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV: Self = Self(1_000_165_008);
+ pub const PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV: Self = Self(1_000_165_009);
+ pub const RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV: Self = Self(1_000_165_011);
+ pub const ACCELERATION_STRUCTURE_INFO_NV: Self = Self(1_000_165_012);
+}
+impl NvRepresentativeFragmentTestFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_representative_fragment_test\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct NvRepresentativeFragmentTestFn {}
+unsafe impl Send for NvRepresentativeFragmentTestFn {}
+unsafe impl Sync for NvRepresentativeFragmentTestFn {}
+impl NvRepresentativeFragmentTestFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_representative_fragment_test'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV: Self = Self(1_000_166_000);
+ pub const PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV: Self =
+ Self(1_000_166_001);
+}
+impl NvExtension168Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_168\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension168Fn {}
+unsafe impl Send for NvExtension168Fn {}
+unsafe impl Sync for NvExtension168Fn {}
+impl NvExtension168Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrMaintenance3Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance3\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDescriptorSetLayoutSupport = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const DescriptorSetLayoutCreateInfo,
+ p_support: *mut DescriptorSetLayoutSupport,
+);
+#[derive(Clone)]
+pub struct KhrMaintenance3Fn {
+ pub get_descriptor_set_layout_support_khr: PFN_vkGetDescriptorSetLayoutSupport,
+}
+unsafe impl Send for KhrMaintenance3Fn {}
+unsafe impl Sync for KhrMaintenance3Fn {}
+impl KhrMaintenance3Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_descriptor_set_layout_support_khr: unsafe {
+ unsafe extern "system" fn get_descriptor_set_layout_support_khr(
+ _device: Device,
+ _p_create_info: *const DescriptorSetLayoutCreateInfo,
+ _p_support: *mut DescriptorSetLayoutSupport,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_descriptor_set_layout_support_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDescriptorSetLayoutSupportKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_set_layout_support_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_maintenance3'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES;
+ pub const DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR: Self = Self::DESCRIPTOR_SET_LAYOUT_SUPPORT;
+}
+impl KhrDrawIndirectCountFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_draw_indirect_count\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrDrawIndirectCountFn {
+ pub cmd_draw_indirect_count_khr: crate::vk::PFN_vkCmdDrawIndirectCount,
+ pub cmd_draw_indexed_indirect_count_khr: crate::vk::PFN_vkCmdDrawIndexedIndirectCount,
+}
+unsafe impl Send for KhrDrawIndirectCountFn {}
+unsafe impl Sync for KhrDrawIndirectCountFn {}
+impl KhrDrawIndirectCountFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_draw_indirect_count_khr: unsafe {
+ unsafe extern "system" fn cmd_draw_indirect_count_khr(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indirect_count_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCountKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indirect_count_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indexed_indirect_count_khr: unsafe {
+ unsafe extern "system" fn cmd_draw_indexed_indirect_count_khr(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indexed_indirect_count_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawIndexedIndirectCountKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indexed_indirect_count_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl ExtFilterCubicFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_filter_cubic\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct ExtFilterCubicFn {}
+unsafe impl Send for ExtFilterCubicFn {}
+unsafe impl Sync for ExtFilterCubicFn {}
+impl ExtFilterCubicFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_filter_cubic'"]
+impl Filter {
+ pub const CUBIC_EXT: Self = Self(1_000_015_000);
+}
+#[doc = "Generated from 'VK_EXT_filter_cubic'"]
+impl FormatFeatureFlags {
+ pub const SAMPLED_IMAGE_FILTER_CUBIC_EXT: Self = Self(0b10_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_filter_cubic'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT: Self = Self(1_000_170_000);
+ pub const FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT: Self = Self(1_000_170_001);
+}
+impl QcomRenderPassShaderResolveFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_render_pass_shader_resolve\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[derive(Clone)]
+pub struct QcomRenderPassShaderResolveFn {}
+unsafe impl Send for QcomRenderPassShaderResolveFn {}
+unsafe impl Sync for QcomRenderPassShaderResolveFn {}
+impl QcomRenderPassShaderResolveFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_render_pass_shader_resolve'"]
+impl SubpassDescriptionFlags {
+ pub const FRAGMENT_REGION_QCOM: Self = Self(0b100);
+ pub const SHADER_RESOLVE_QCOM: Self = Self(0b1000);
+}
+impl QcomExtension173Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_173\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension173Fn {}
+unsafe impl Send for QcomExtension173Fn {}
+unsafe impl Sync for QcomExtension173Fn {}
+impl QcomExtension173Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_extension_173'"]
+impl BufferUsageFlags {
+ pub const RESERVED_18_QCOM: Self = Self(0b100_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_QCOM_extension_173'"]
+impl ImageUsageFlags {
+ pub const RESERVED_16_QCOM: Self = Self(0b1_0000_0000_0000_0000);
+ pub const RESERVED_17_QCOM: Self = Self(0b10_0000_0000_0000_0000);
+}
+impl QcomExtension174Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_174\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension174Fn {}
+unsafe impl Send for QcomExtension174Fn {}
+unsafe impl Sync for QcomExtension174Fn {}
+impl QcomExtension174Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtGlobalPriorityFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_global_priority\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtGlobalPriorityFn {}
+unsafe impl Send for ExtGlobalPriorityFn {}
+unsafe impl Sync for ExtGlobalPriorityFn {}
+impl ExtGlobalPriorityFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_global_priority'"]
+impl Result {
+ pub const ERROR_NOT_PERMITTED_EXT: Self = Self::ERROR_NOT_PERMITTED_KHR;
+}
+#[doc = "Generated from 'VK_EXT_global_priority'"]
+impl StructureType {
+ pub const DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT: Self =
+ Self::DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR;
+}
+impl KhrShaderSubgroupExtendedTypesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_shader_subgroup_extended_types\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderSubgroupExtendedTypesFn {}
+unsafe impl Send for KhrShaderSubgroupExtendedTypesFn {}
+unsafe impl Sync for KhrShaderSubgroupExtendedTypesFn {}
+impl KhrShaderSubgroupExtendedTypesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_subgroup_extended_types'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES;
+}
+impl ExtExtension177Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_177\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension177Fn {}
+unsafe impl Send for ExtExtension177Fn {}
+unsafe impl Sync for ExtExtension177Fn {}
+impl ExtExtension177Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl Khr8bitStorageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_8bit_storage\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct Khr8bitStorageFn {}
+unsafe impl Send for Khr8bitStorageFn {}
+unsafe impl Sync for Khr8bitStorageFn {}
+impl Khr8bitStorageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_8bit_storage'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES;
+}
+impl ExtExternalMemoryHostFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_external_memory_host\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryHostPointerPropertiesEXT = unsafe extern "system" fn(
+ device: Device,
+ handle_type: ExternalMemoryHandleTypeFlags,
+ p_host_pointer: *const c_void,
+ p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtExternalMemoryHostFn {
+ pub get_memory_host_pointer_properties_ext: PFN_vkGetMemoryHostPointerPropertiesEXT,
+}
+unsafe impl Send for ExtExternalMemoryHostFn {}
+unsafe impl Sync for ExtExternalMemoryHostFn {}
+impl ExtExternalMemoryHostFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_memory_host_pointer_properties_ext: unsafe {
+ unsafe extern "system" fn get_memory_host_pointer_properties_ext(
+ _device: Device,
+ _handle_type: ExternalMemoryHandleTypeFlags,
+ _p_host_pointer: *const c_void,
+ _p_memory_host_pointer_properties: *mut MemoryHostPointerPropertiesEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_host_pointer_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryHostPointerPropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_host_pointer_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_external_memory_host'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const HOST_ALLOCATION_EXT: Self = Self(0b1000_0000);
+ pub const HOST_MAPPED_FOREIGN_MEMORY_EXT: Self = Self(0b1_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_external_memory_host'"]
+impl StructureType {
+ pub const IMPORT_MEMORY_HOST_POINTER_INFO_EXT: Self = Self(1_000_178_000);
+ pub const MEMORY_HOST_POINTER_PROPERTIES_EXT: Self = Self(1_000_178_001);
+ pub const PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: Self = Self(1_000_178_002);
+}
+impl AmdBufferMarkerFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_buffer_marker\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteBufferMarkerAMD = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_stage: PipelineStageFlags,
+ dst_buffer: Buffer,
+ dst_offset: DeviceSize,
+ marker: u32,
+);
+#[derive(Clone)]
+pub struct AmdBufferMarkerFn {
+ pub cmd_write_buffer_marker_amd: PFN_vkCmdWriteBufferMarkerAMD,
+}
+unsafe impl Send for AmdBufferMarkerFn {}
+unsafe impl Sync for AmdBufferMarkerFn {}
+impl AmdBufferMarkerFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_write_buffer_marker_amd: unsafe {
+ unsafe extern "system" fn cmd_write_buffer_marker_amd(
+ _command_buffer: CommandBuffer,
+ _pipeline_stage: PipelineStageFlags,
+ _dst_buffer: Buffer,
+ _dst_offset: DeviceSize,
+ _marker: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_write_buffer_marker_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteBufferMarkerAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_buffer_marker_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl KhrShaderAtomicInt64Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_atomic_int64\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderAtomicInt64Fn {}
+unsafe impl Send for KhrShaderAtomicInt64Fn {}
+unsafe impl Sync for KhrShaderAtomicInt64Fn {}
+impl KhrShaderAtomicInt64Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_atomic_int64'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES;
+}
+impl KhrShaderClockFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_clock\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderClockFn {}
+unsafe impl Send for KhrShaderClockFn {}
+unsafe impl Sync for KhrShaderClockFn {}
+impl KhrShaderClockFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_clock'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR: Self = Self(1_000_181_000);
+}
+impl AmdExtension183Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_183\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension183Fn {}
+unsafe impl Send for AmdExtension183Fn {}
+unsafe impl Sync for AmdExtension183Fn {}
+impl AmdExtension183Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdPipelineCompilerControlFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_pipeline_compiler_control\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdPipelineCompilerControlFn {}
+unsafe impl Send for AmdPipelineCompilerControlFn {}
+unsafe impl Sync for AmdPipelineCompilerControlFn {}
+impl AmdPipelineCompilerControlFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_pipeline_compiler_control'"]
+impl StructureType {
+ pub const PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD: Self = Self(1_000_183_000);
+}
+impl ExtCalibratedTimestampsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_calibrated_timestamps\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_time_domain_count: *mut u32,
+ p_time_domains: *mut TimeDomainEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetCalibratedTimestampsEXT = unsafe extern "system" fn(
+ device: Device,
+ timestamp_count: u32,
+ p_timestamp_infos: *const CalibratedTimestampInfoEXT,
+ p_timestamps: *mut u64,
+ p_max_deviation: *mut u64,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtCalibratedTimestampsFn {
+ pub get_physical_device_calibrateable_time_domains_ext:
+ PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT,
+ pub get_calibrated_timestamps_ext: PFN_vkGetCalibratedTimestampsEXT,
+}
+unsafe impl Send for ExtCalibratedTimestampsFn {}
+unsafe impl Sync for ExtCalibratedTimestampsFn {}
+impl ExtCalibratedTimestampsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_calibrateable_time_domains_ext: unsafe {
+ unsafe extern "system" fn get_physical_device_calibrateable_time_domains_ext(
+ _physical_device: PhysicalDevice,
+ _p_time_domain_count: *mut u32,
+ _p_time_domains: *mut TimeDomainEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_calibrateable_time_domains_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceCalibrateableTimeDomainsEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_calibrateable_time_domains_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_calibrated_timestamps_ext: unsafe {
+ unsafe extern "system" fn get_calibrated_timestamps_ext(
+ _device: Device,
+ _timestamp_count: u32,
+ _p_timestamp_infos: *const CalibratedTimestampInfoEXT,
+ _p_timestamps: *mut u64,
+ _p_max_deviation: *mut u64,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_calibrated_timestamps_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetCalibratedTimestampsEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_calibrated_timestamps_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_calibrated_timestamps'"]
+impl StructureType {
+ pub const CALIBRATED_TIMESTAMP_INFO_EXT: Self = Self(1_000_184_000);
+}
+impl AmdShaderCorePropertiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_core_properties\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderCorePropertiesFn {}
+unsafe impl Send for AmdShaderCorePropertiesFn {}
+unsafe impl Sync for AmdShaderCorePropertiesFn {}
+impl AmdShaderCorePropertiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_shader_core_properties'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD: Self = Self(1_000_185_000);
+}
+impl AmdExtension187Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_187\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension187Fn {}
+unsafe impl Send for AmdExtension187Fn {}
+unsafe impl Sync for AmdExtension187Fn {}
+impl AmdExtension187Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrVideoDecodeH265Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_decode_h265\0") }
+ }
+ pub const SPEC_VERSION: u32 = 7u32;
+}
+#[derive(Clone)]
+pub struct KhrVideoDecodeH265Fn {}
+unsafe impl Send for KhrVideoDecodeH265Fn {}
+unsafe impl Sync for KhrVideoDecodeH265Fn {}
+impl KhrVideoDecodeH265Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_video_decode_h265'"]
+impl StructureType {
+ pub const VIDEO_DECODE_H265_CAPABILITIES_KHR: Self = Self(1_000_187_000);
+ pub const VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR: Self = Self(1_000_187_001);
+ pub const VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR: Self = Self(1_000_187_002);
+ pub const VIDEO_DECODE_H265_PROFILE_INFO_KHR: Self = Self(1_000_187_003);
+ pub const VIDEO_DECODE_H265_PICTURE_INFO_KHR: Self = Self(1_000_187_004);
+ pub const VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR: Self = Self(1_000_187_005);
+}
+#[doc = "Generated from 'VK_KHR_video_decode_h265'"]
+impl VideoCodecOperationFlagsKHR {
+ pub const DECODE_H265: Self = Self(0b10);
+}
+impl KhrGlobalPriorityFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_global_priority\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrGlobalPriorityFn {}
+unsafe impl Send for KhrGlobalPriorityFn {}
+unsafe impl Sync for KhrGlobalPriorityFn {}
+impl KhrGlobalPriorityFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_global_priority'"]
+impl Result {
+ pub const ERROR_NOT_PERMITTED_KHR: Self = Self(-1_000_174_001);
+}
+#[doc = "Generated from 'VK_KHR_global_priority'"]
+impl StructureType {
+ pub const DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR: Self = Self(1_000_174_000);
+ pub const PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR: Self = Self(1_000_388_000);
+ pub const QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR: Self = Self(1_000_388_001);
+}
+impl AmdMemoryOverallocationBehaviorFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_AMD_memory_overallocation_behavior\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdMemoryOverallocationBehaviorFn {}
+unsafe impl Send for AmdMemoryOverallocationBehaviorFn {}
+unsafe impl Sync for AmdMemoryOverallocationBehaviorFn {}
+impl AmdMemoryOverallocationBehaviorFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_memory_overallocation_behavior'"]
+impl StructureType {
+ pub const DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD: Self = Self(1_000_189_000);
+}
+impl ExtVertexAttributeDivisorFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_vertex_attribute_divisor\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct ExtVertexAttributeDivisorFn {}
+unsafe impl Send for ExtVertexAttributeDivisorFn {}
+unsafe impl Sync for ExtVertexAttributeDivisorFn {}
+impl ExtVertexAttributeDivisorFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_vertex_attribute_divisor'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: Self = Self(1_000_190_000);
+ pub const PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT: Self = Self(1_000_190_001);
+ pub const PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: Self = Self(1_000_190_002);
+}
+impl GgpFrameTokenFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_frame_token\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct GgpFrameTokenFn {}
+unsafe impl Send for GgpFrameTokenFn {}
+unsafe impl Sync for GgpFrameTokenFn {}
+impl GgpFrameTokenFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_GGP_frame_token'"]
+impl StructureType {
+ pub const PRESENT_FRAME_TOKEN_GGP: Self = Self(1_000_191_000);
+}
+impl ExtPipelineCreationFeedbackFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_creation_feedback\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPipelineCreationFeedbackFn {}
+unsafe impl Send for ExtPipelineCreationFeedbackFn {}
+unsafe impl Sync for ExtPipelineCreationFeedbackFn {}
+impl ExtPipelineCreationFeedbackFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_pipeline_creation_feedback'"]
+impl StructureType {
+ pub const PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT: Self =
+ Self::PIPELINE_CREATION_FEEDBACK_CREATE_INFO;
+}
+impl GoogleExtension194Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_194\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension194Fn {}
+unsafe impl Send for GoogleExtension194Fn {}
+unsafe impl Sync for GoogleExtension194Fn {}
+impl GoogleExtension194Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleExtension195Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_195\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension195Fn {}
+unsafe impl Send for GoogleExtension195Fn {}
+unsafe impl Sync for GoogleExtension195Fn {}
+impl GoogleExtension195Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleExtension196Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_196\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension196Fn {}
+unsafe impl Send for GoogleExtension196Fn {}
+unsafe impl Sync for GoogleExtension196Fn {}
+impl GoogleExtension196Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_GOOGLE_extension_196'"]
+impl PipelineCacheCreateFlags {
+ pub const RESERVED_1_EXT: Self = Self(0b10);
+}
+impl KhrDriverPropertiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_driver_properties\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrDriverPropertiesFn {}
+unsafe impl Send for KhrDriverPropertiesFn {}
+unsafe impl Sync for KhrDriverPropertiesFn {}
+impl KhrDriverPropertiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_driver_properties'"]
+impl DriverId {
+ pub const AMD_PROPRIETARY_KHR: Self = Self::AMD_PROPRIETARY;
+ pub const AMD_OPEN_SOURCE_KHR: Self = Self::AMD_OPEN_SOURCE;
+ pub const MESA_RADV_KHR: Self = Self::MESA_RADV;
+ pub const NVIDIA_PROPRIETARY_KHR: Self = Self::NVIDIA_PROPRIETARY;
+ pub const INTEL_PROPRIETARY_WINDOWS_KHR: Self = Self::INTEL_PROPRIETARY_WINDOWS;
+ pub const INTEL_OPEN_SOURCE_MESA_KHR: Self = Self::INTEL_OPEN_SOURCE_MESA;
+ pub const IMAGINATION_PROPRIETARY_KHR: Self = Self::IMAGINATION_PROPRIETARY;
+ pub const QUALCOMM_PROPRIETARY_KHR: Self = Self::QUALCOMM_PROPRIETARY;
+ pub const ARM_PROPRIETARY_KHR: Self = Self::ARM_PROPRIETARY;
+ pub const GOOGLE_SWIFTSHADER_KHR: Self = Self::GOOGLE_SWIFTSHADER;
+ pub const GGP_PROPRIETARY_KHR: Self = Self::GGP_PROPRIETARY;
+ pub const BROADCOM_PROPRIETARY_KHR: Self = Self::BROADCOM_PROPRIETARY;
+}
+#[doc = "Generated from 'VK_KHR_driver_properties'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR: Self = Self::PHYSICAL_DEVICE_DRIVER_PROPERTIES;
+}
+impl KhrShaderFloatControlsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_float_controls\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderFloatControlsFn {}
+unsafe impl Send for KhrShaderFloatControlsFn {}
+unsafe impl Sync for KhrShaderFloatControlsFn {}
+impl KhrShaderFloatControlsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_float_controls'"]
+impl ShaderFloatControlsIndependence {
+ pub const TYPE_32_ONLY_KHR: Self = Self::TYPE_32_ONLY;
+ pub const ALL_KHR: Self = Self::ALL;
+ pub const NONE_KHR: Self = Self::NONE;
+}
+#[doc = "Generated from 'VK_KHR_shader_float_controls'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES;
+}
+impl NvShaderSubgroupPartitionedFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_subgroup_partitioned\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvShaderSubgroupPartitionedFn {}
+unsafe impl Send for NvShaderSubgroupPartitionedFn {}
+unsafe impl Sync for NvShaderSubgroupPartitionedFn {}
+impl NvShaderSubgroupPartitionedFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_shader_subgroup_partitioned'"]
+impl SubgroupFeatureFlags {
+ pub const PARTITIONED_NV: Self = Self(0b1_0000_0000);
+}
+impl KhrDepthStencilResolveFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_depth_stencil_resolve\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrDepthStencilResolveFn {}
+unsafe impl Send for KhrDepthStencilResolveFn {}
+unsafe impl Sync for KhrDepthStencilResolveFn {}
+impl KhrDepthStencilResolveFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_depth_stencil_resolve'"]
+impl ResolveModeFlags {
+ pub const NONE_KHR: Self = Self::NONE;
+ pub const SAMPLE_ZERO_KHR: Self = Self::SAMPLE_ZERO;
+ pub const AVERAGE_KHR: Self = Self::AVERAGE;
+ pub const MIN_KHR: Self = Self::MIN;
+ pub const MAX_KHR: Self = Self::MAX;
+}
+#[doc = "Generated from 'VK_KHR_depth_stencil_resolve'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES;
+ pub const SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR: Self =
+ Self::SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE;
+}
+impl KhrSwapchainMutableFormatFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_swapchain_mutable_format\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrSwapchainMutableFormatFn {}
+unsafe impl Send for KhrSwapchainMutableFormatFn {}
+unsafe impl Sync for KhrSwapchainMutableFormatFn {}
+impl KhrSwapchainMutableFormatFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_swapchain_mutable_format'"]
+impl SwapchainCreateFlagsKHR {
+ pub const MUTABLE_FORMAT: Self = Self(0b100);
+}
+impl NvComputeShaderDerivativesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_compute_shader_derivatives\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvComputeShaderDerivativesFn {}
+unsafe impl Send for NvComputeShaderDerivativesFn {}
+unsafe impl Sync for NvComputeShaderDerivativesFn {}
+impl NvComputeShaderDerivativesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_compute_shader_derivatives'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV: Self = Self(1_000_201_000);
+}
+impl NvMeshShaderFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_mesh_shader\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMeshTasksNV =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, task_count: u32, first_task: u32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMeshTasksIndirectNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ draw_count: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMeshTasksIndirectCountNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ count_buffer: Buffer,
+ count_buffer_offset: DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+);
+#[derive(Clone)]
+pub struct NvMeshShaderFn {
+ pub cmd_draw_mesh_tasks_nv: PFN_vkCmdDrawMeshTasksNV,
+ pub cmd_draw_mesh_tasks_indirect_nv: PFN_vkCmdDrawMeshTasksIndirectNV,
+ pub cmd_draw_mesh_tasks_indirect_count_nv: PFN_vkCmdDrawMeshTasksIndirectCountNV,
+}
+unsafe impl Send for NvMeshShaderFn {}
+unsafe impl Sync for NvMeshShaderFn {}
+impl NvMeshShaderFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_draw_mesh_tasks_nv: unsafe {
+ unsafe extern "system" fn cmd_draw_mesh_tasks_nv(
+ _command_buffer: CommandBuffer,
+ _task_count: u32,
+ _first_task: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_mesh_tasks_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMeshTasksNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_mesh_tasks_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_mesh_tasks_indirect_nv: unsafe {
+ unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_nv(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_mesh_tasks_indirect_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawMeshTasksIndirectNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_mesh_tasks_indirect_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_mesh_tasks_indirect_count_nv: unsafe {
+ unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_count_nv(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_mesh_tasks_indirect_count_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawMeshTasksIndirectCountNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_mesh_tasks_indirect_count_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_mesh_shader'"]
+impl PipelineStageFlags {
+ pub const TASK_SHADER_NV: Self = Self::TASK_SHADER_EXT;
+ pub const MESH_SHADER_NV: Self = Self::MESH_SHADER_EXT;
+}
+#[doc = "Generated from 'VK_NV_mesh_shader'"]
+impl ShaderStageFlags {
+ pub const TASK_NV: Self = Self::TASK_EXT;
+ pub const MESH_NV: Self = Self::MESH_EXT;
+}
+#[doc = "Generated from 'VK_NV_mesh_shader'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV: Self = Self(1_000_202_000);
+ pub const PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV: Self = Self(1_000_202_001);
+}
+impl NvFragmentShaderBarycentricFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_fragment_shader_barycentric\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvFragmentShaderBarycentricFn {}
+unsafe impl Send for NvFragmentShaderBarycentricFn {}
+unsafe impl Sync for NvFragmentShaderBarycentricFn {}
+impl NvFragmentShaderBarycentricFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_fragment_shader_barycentric'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV: Self =
+ Self::PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR;
+}
+impl NvShaderImageFootprintFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_shader_image_footprint\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct NvShaderImageFootprintFn {}
+unsafe impl Send for NvShaderImageFootprintFn {}
+unsafe impl Sync for NvShaderImageFootprintFn {}
+impl NvShaderImageFootprintFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_shader_image_footprint'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV: Self = Self(1_000_204_000);
+}
+impl NvScissorExclusiveFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_scissor_exclusive\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetExclusiveScissorNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_exclusive_scissor: u32,
+ exclusive_scissor_count: u32,
+ p_exclusive_scissors: *const Rect2D,
+);
+#[derive(Clone)]
+pub struct NvScissorExclusiveFn {
+ pub cmd_set_exclusive_scissor_nv: PFN_vkCmdSetExclusiveScissorNV,
+}
+unsafe impl Send for NvScissorExclusiveFn {}
+unsafe impl Sync for NvScissorExclusiveFn {}
+impl NvScissorExclusiveFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_exclusive_scissor_nv: unsafe {
+ unsafe extern "system" fn cmd_set_exclusive_scissor_nv(
+ _command_buffer: CommandBuffer,
+ _first_exclusive_scissor: u32,
+ _exclusive_scissor_count: u32,
+ _p_exclusive_scissors: *const Rect2D,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_exclusive_scissor_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetExclusiveScissorNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_exclusive_scissor_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_scissor_exclusive'"]
+impl DynamicState {
+ pub const EXCLUSIVE_SCISSOR_NV: Self = Self(1_000_205_001);
+}
+#[doc = "Generated from 'VK_NV_scissor_exclusive'"]
+impl StructureType {
+ pub const PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV: Self = Self(1_000_205_000);
+ pub const PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV: Self = Self(1_000_205_002);
+}
+impl NvDeviceDiagnosticCheckpointsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_NV_device_diagnostic_checkpoints\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCheckpointNV =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, p_checkpoint_marker: *const c_void);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetQueueCheckpointDataNV = unsafe extern "system" fn(
+ queue: Queue,
+ p_checkpoint_data_count: *mut u32,
+ p_checkpoint_data: *mut CheckpointDataNV,
+);
+#[derive(Clone)]
+pub struct NvDeviceDiagnosticCheckpointsFn {
+ pub cmd_set_checkpoint_nv: PFN_vkCmdSetCheckpointNV,
+ pub get_queue_checkpoint_data_nv: PFN_vkGetQueueCheckpointDataNV,
+}
+unsafe impl Send for NvDeviceDiagnosticCheckpointsFn {}
+unsafe impl Sync for NvDeviceDiagnosticCheckpointsFn {}
+impl NvDeviceDiagnosticCheckpointsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_checkpoint_nv: unsafe {
+ unsafe extern "system" fn cmd_set_checkpoint_nv(
+ _command_buffer: CommandBuffer,
+ _p_checkpoint_marker: *const c_void,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_checkpoint_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCheckpointNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_checkpoint_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_queue_checkpoint_data_nv: unsafe {
+ unsafe extern "system" fn get_queue_checkpoint_data_nv(
+ _queue: Queue,
+ _p_checkpoint_data_count: *mut u32,
+ _p_checkpoint_data: *mut CheckpointDataNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_queue_checkpoint_data_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetQueueCheckpointDataNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_queue_checkpoint_data_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_device_diagnostic_checkpoints'"]
+impl StructureType {
+ pub const CHECKPOINT_DATA_NV: Self = Self(1_000_206_000);
+ pub const QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV: Self = Self(1_000_206_001);
+}
+impl KhrTimelineSemaphoreFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_timeline_semaphore\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSemaphoreCounterValue =
+ unsafe extern "system" fn(device: Device, semaphore: Semaphore, p_value: *mut u64) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkWaitSemaphores = unsafe extern "system" fn(
+ device: Device,
+ p_wait_info: *const SemaphoreWaitInfo,
+ timeout: u64,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkSignalSemaphore =
+ unsafe extern "system" fn(device: Device, p_signal_info: *const SemaphoreSignalInfo) -> Result;
+#[derive(Clone)]
+pub struct KhrTimelineSemaphoreFn {
+ pub get_semaphore_counter_value_khr: PFN_vkGetSemaphoreCounterValue,
+ pub wait_semaphores_khr: PFN_vkWaitSemaphores,
+ pub signal_semaphore_khr: PFN_vkSignalSemaphore,
+}
+unsafe impl Send for KhrTimelineSemaphoreFn {}
+unsafe impl Sync for KhrTimelineSemaphoreFn {}
+impl KhrTimelineSemaphoreFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_semaphore_counter_value_khr: unsafe {
+ unsafe extern "system" fn get_semaphore_counter_value_khr(
+ _device: Device,
+ _semaphore: Semaphore,
+ _p_value: *mut u64,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_semaphore_counter_value_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSemaphoreCounterValueKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_semaphore_counter_value_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ wait_semaphores_khr: unsafe {
+ unsafe extern "system" fn wait_semaphores_khr(
+ _device: Device,
+ _p_wait_info: *const SemaphoreWaitInfo,
+ _timeout: u64,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(wait_semaphores_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkWaitSemaphoresKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ wait_semaphores_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ signal_semaphore_khr: unsafe {
+ unsafe extern "system" fn signal_semaphore_khr(
+ _device: Device,
+ _p_signal_info: *const SemaphoreSignalInfo,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(signal_semaphore_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSignalSemaphoreKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ signal_semaphore_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_timeline_semaphore'"]
+impl SemaphoreType {
+ pub const BINARY_KHR: Self = Self::BINARY;
+ pub const TIMELINE_KHR: Self = Self::TIMELINE;
+}
+#[doc = "Generated from 'VK_KHR_timeline_semaphore'"]
+impl SemaphoreWaitFlags {
+ pub const ANY_KHR: Self = Self::ANY;
+}
+#[doc = "Generated from 'VK_KHR_timeline_semaphore'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES;
+ pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES;
+ pub const SEMAPHORE_TYPE_CREATE_INFO_KHR: Self = Self::SEMAPHORE_TYPE_CREATE_INFO;
+ pub const TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR: Self = Self::TIMELINE_SEMAPHORE_SUBMIT_INFO;
+ pub const SEMAPHORE_WAIT_INFO_KHR: Self = Self::SEMAPHORE_WAIT_INFO;
+ pub const SEMAPHORE_SIGNAL_INFO_KHR: Self = Self::SEMAPHORE_SIGNAL_INFO;
+}
+impl KhrExtension209Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_209\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension209Fn {}
+unsafe impl Send for KhrExtension209Fn {}
+unsafe impl Sync for KhrExtension209Fn {}
+impl KhrExtension209Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl IntelShaderIntegerFunctions2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_shader_integer_functions2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct IntelShaderIntegerFunctions2Fn {}
+unsafe impl Send for IntelShaderIntegerFunctions2Fn {}
+unsafe impl Sync for IntelShaderIntegerFunctions2Fn {}
+impl IntelShaderIntegerFunctions2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_INTEL_shader_integer_functions2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL: Self = Self(1_000_209_000);
+}
+impl IntelPerformanceQueryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_performance_query\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkInitializePerformanceApiINTEL = unsafe extern "system" fn(
+ device: Device,
+ p_initialize_info: *const InitializePerformanceApiInfoINTEL,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkUninitializePerformanceApiINTEL = unsafe extern "system" fn(device: Device);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPerformanceMarkerINTEL = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_marker_info: *const PerformanceMarkerInfoINTEL,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPerformanceStreamMarkerINTEL = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_marker_info: *const PerformanceStreamMarkerInfoINTEL,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPerformanceOverrideINTEL = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_override_info: *const PerformanceOverrideInfoINTEL,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquirePerformanceConfigurationINTEL = unsafe extern "system" fn(
+ device: Device,
+ p_acquire_info: *const PerformanceConfigurationAcquireInfoINTEL,
+ p_configuration: *mut PerformanceConfigurationINTEL,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkReleasePerformanceConfigurationINTEL = unsafe extern "system" fn(
+ device: Device,
+ configuration: PerformanceConfigurationINTEL,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueSetPerformanceConfigurationINTEL =
+ unsafe extern "system" fn(queue: Queue, configuration: PerformanceConfigurationINTEL) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPerformanceParameterINTEL = unsafe extern "system" fn(
+ device: Device,
+ parameter: PerformanceParameterTypeINTEL,
+ p_value: *mut PerformanceValueINTEL,
+) -> Result;
+#[derive(Clone)]
+pub struct IntelPerformanceQueryFn {
+ pub initialize_performance_api_intel: PFN_vkInitializePerformanceApiINTEL,
+ pub uninitialize_performance_api_intel: PFN_vkUninitializePerformanceApiINTEL,
+ pub cmd_set_performance_marker_intel: PFN_vkCmdSetPerformanceMarkerINTEL,
+ pub cmd_set_performance_stream_marker_intel: PFN_vkCmdSetPerformanceStreamMarkerINTEL,
+ pub cmd_set_performance_override_intel: PFN_vkCmdSetPerformanceOverrideINTEL,
+ pub acquire_performance_configuration_intel: PFN_vkAcquirePerformanceConfigurationINTEL,
+ pub release_performance_configuration_intel: PFN_vkReleasePerformanceConfigurationINTEL,
+ pub queue_set_performance_configuration_intel: PFN_vkQueueSetPerformanceConfigurationINTEL,
+ pub get_performance_parameter_intel: PFN_vkGetPerformanceParameterINTEL,
+}
+unsafe impl Send for IntelPerformanceQueryFn {}
+unsafe impl Sync for IntelPerformanceQueryFn {}
+impl IntelPerformanceQueryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ initialize_performance_api_intel: unsafe {
+ unsafe extern "system" fn initialize_performance_api_intel(
+ _device: Device,
+ _p_initialize_info: *const InitializePerformanceApiInfoINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(initialize_performance_api_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkInitializePerformanceApiINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ initialize_performance_api_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ uninitialize_performance_api_intel: unsafe {
+ unsafe extern "system" fn uninitialize_performance_api_intel(_device: Device) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(uninitialize_performance_api_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkUninitializePerformanceApiINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ uninitialize_performance_api_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_performance_marker_intel: unsafe {
+ unsafe extern "system" fn cmd_set_performance_marker_intel(
+ _command_buffer: CommandBuffer,
+ _p_marker_info: *const PerformanceMarkerInfoINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_performance_marker_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPerformanceMarkerINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_performance_marker_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_performance_stream_marker_intel: unsafe {
+ unsafe extern "system" fn cmd_set_performance_stream_marker_intel(
+ _command_buffer: CommandBuffer,
+ _p_marker_info: *const PerformanceStreamMarkerInfoINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_performance_stream_marker_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPerformanceStreamMarkerINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_performance_stream_marker_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_performance_override_intel: unsafe {
+ unsafe extern "system" fn cmd_set_performance_override_intel(
+ _command_buffer: CommandBuffer,
+ _p_override_info: *const PerformanceOverrideInfoINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_performance_override_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPerformanceOverrideINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_performance_override_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_performance_configuration_intel: unsafe {
+ unsafe extern "system" fn acquire_performance_configuration_intel(
+ _device: Device,
+ _p_acquire_info: *const PerformanceConfigurationAcquireInfoINTEL,
+ _p_configuration: *mut PerformanceConfigurationINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_performance_configuration_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkAcquirePerformanceConfigurationINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_performance_configuration_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ release_performance_configuration_intel: unsafe {
+ unsafe extern "system" fn release_performance_configuration_intel(
+ _device: Device,
+ _configuration: PerformanceConfigurationINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(release_performance_configuration_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkReleasePerformanceConfigurationINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ release_performance_configuration_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_set_performance_configuration_intel: unsafe {
+ unsafe extern "system" fn queue_set_performance_configuration_intel(
+ _queue: Queue,
+ _configuration: PerformanceConfigurationINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(queue_set_performance_configuration_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkQueueSetPerformanceConfigurationINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ queue_set_performance_configuration_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_performance_parameter_intel: unsafe {
+ unsafe extern "system" fn get_performance_parameter_intel(
+ _device: Device,
+ _parameter: PerformanceParameterTypeINTEL,
+ _p_value: *mut PerformanceValueINTEL,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_performance_parameter_intel)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPerformanceParameterINTEL\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_performance_parameter_intel
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_INTEL_performance_query'"]
+impl ObjectType {
+ pub const PERFORMANCE_CONFIGURATION_INTEL: Self = Self(1_000_210_000);
+}
+#[doc = "Generated from 'VK_INTEL_performance_query'"]
+impl QueryType {
+ pub const PERFORMANCE_QUERY_INTEL: Self = Self(1_000_210_000);
+}
+#[doc = "Generated from 'VK_INTEL_performance_query'"]
+impl StructureType {
+ pub const QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL: Self = Self(1_000_210_000);
+ pub const INITIALIZE_PERFORMANCE_API_INFO_INTEL: Self = Self(1_000_210_001);
+ pub const PERFORMANCE_MARKER_INFO_INTEL: Self = Self(1_000_210_002);
+ pub const PERFORMANCE_STREAM_MARKER_INFO_INTEL: Self = Self(1_000_210_003);
+ pub const PERFORMANCE_OVERRIDE_INFO_INTEL: Self = Self(1_000_210_004);
+ pub const PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL: Self = Self(1_000_210_005);
+}
+impl KhrVulkanMemoryModelFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_vulkan_memory_model\0") }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct KhrVulkanMemoryModelFn {}
+unsafe impl Send for KhrVulkanMemoryModelFn {}
+unsafe impl Sync for KhrVulkanMemoryModelFn {}
+impl KhrVulkanMemoryModelFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_vulkan_memory_model'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES;
+}
+impl ExtPciBusInfoFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pci_bus_info\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtPciBusInfoFn {}
+unsafe impl Send for ExtPciBusInfoFn {}
+unsafe impl Sync for ExtPciBusInfoFn {}
+impl ExtPciBusInfoFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_pci_bus_info'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT: Self = Self(1_000_212_000);
+}
+impl AmdDisplayNativeHdrFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_display_native_hdr\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetLocalDimmingAMD = unsafe extern "system" fn(
+ device: Device,
+ swap_chain: SwapchainKHR,
+ local_dimming_enable: Bool32,
+);
+#[derive(Clone)]
+pub struct AmdDisplayNativeHdrFn {
+ pub set_local_dimming_amd: PFN_vkSetLocalDimmingAMD,
+}
+unsafe impl Send for AmdDisplayNativeHdrFn {}
+unsafe impl Sync for AmdDisplayNativeHdrFn {}
+impl AmdDisplayNativeHdrFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ set_local_dimming_amd: unsafe {
+ unsafe extern "system" fn set_local_dimming_amd(
+ _device: Device,
+ _swap_chain: SwapchainKHR,
+ _local_dimming_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_local_dimming_amd)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSetLocalDimmingAMD\0");
+ let val = _f(cname);
+ if val.is_null() {
+ set_local_dimming_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_AMD_display_native_hdr'"]
+impl ColorSpaceKHR {
+ pub const DISPLAY_NATIVE_AMD: Self = Self(1_000_213_000);
+}
+#[doc = "Generated from 'VK_AMD_display_native_hdr'"]
+impl StructureType {
+ pub const DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD: Self = Self(1_000_213_000);
+ pub const SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD: Self = Self(1_000_213_001);
+}
+impl FuchsiaImagepipeSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_imagepipe_surface\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateImagePipeSurfaceFUCHSIA = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct FuchsiaImagepipeSurfaceFn {
+ pub create_image_pipe_surface_fuchsia: PFN_vkCreateImagePipeSurfaceFUCHSIA,
+}
+unsafe impl Send for FuchsiaImagepipeSurfaceFn {}
+unsafe impl Sync for FuchsiaImagepipeSurfaceFn {}
+impl FuchsiaImagepipeSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_image_pipe_surface_fuchsia: unsafe {
+ unsafe extern "system" fn create_image_pipe_surface_fuchsia(
+ _instance: Instance,
+ _p_create_info: *const ImagePipeSurfaceCreateInfoFUCHSIA,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_image_pipe_surface_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateImagePipeSurfaceFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_image_pipe_surface_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_FUCHSIA_imagepipe_surface'"]
+impl StructureType {
+ pub const IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA: Self = Self(1_000_214_000);
+}
+impl KhrShaderTerminateInvocationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_terminate_invocation\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderTerminateInvocationFn {}
+unsafe impl Send for KhrShaderTerminateInvocationFn {}
+unsafe impl Sync for KhrShaderTerminateInvocationFn {}
+impl KhrShaderTerminateInvocationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_terminate_invocation'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES;
+}
+impl GoogleExtension217Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_217\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension217Fn {}
+unsafe impl Send for GoogleExtension217Fn {}
+unsafe impl Sync for GoogleExtension217Fn {}
+impl GoogleExtension217Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtMetalSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_metal_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateMetalSurfaceEXT = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const MetalSurfaceCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtMetalSurfaceFn {
+ pub create_metal_surface_ext: PFN_vkCreateMetalSurfaceEXT,
+}
+unsafe impl Send for ExtMetalSurfaceFn {}
+unsafe impl Sync for ExtMetalSurfaceFn {}
+impl ExtMetalSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_metal_surface_ext: unsafe {
+ unsafe extern "system" fn create_metal_surface_ext(
+ _instance: Instance,
+ _p_create_info: *const MetalSurfaceCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_metal_surface_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateMetalSurfaceEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_metal_surface_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_metal_surface'"]
+impl StructureType {
+ pub const METAL_SURFACE_CREATE_INFO_EXT: Self = Self(1_000_217_000);
+}
+impl ExtFragmentDensityMapFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_fragment_density_map\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtFragmentDensityMapFn {}
+unsafe impl Send for ExtFragmentDensityMapFn {}
+unsafe impl Sync for ExtFragmentDensityMapFn {}
+impl ExtFragmentDensityMapFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl AccessFlags {
+ pub const FRAGMENT_DENSITY_MAP_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl FormatFeatureFlags {
+ pub const FRAGMENT_DENSITY_MAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl FormatFeatureFlags2 {
+ pub const FRAGMENT_DENSITY_MAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl ImageCreateFlags {
+ pub const SUBSAMPLED_EXT: Self = Self(0b100_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl ImageLayout {
+ pub const FRAGMENT_DENSITY_MAP_OPTIMAL_EXT: Self = Self(1_000_218_000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl ImageUsageFlags {
+ pub const FRAGMENT_DENSITY_MAP_EXT: Self = Self(0b10_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl ImageViewCreateFlags {
+ pub const FRAGMENT_DENSITY_MAP_DYNAMIC_EXT: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl PipelineStageFlags {
+ pub const FRAGMENT_DENSITY_PROCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl SamplerCreateFlags {
+ pub const SUBSAMPLED_EXT: Self = Self(0b1);
+ pub const SUBSAMPLED_COARSE_RECONSTRUCTION_EXT: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT: Self = Self(1_000_218_000);
+ pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT: Self = Self(1_000_218_001);
+ pub const RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT: Self = Self(1_000_218_002);
+}
+impl ExtExtension220Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_220\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension220Fn {}
+unsafe impl Send for ExtExtension220Fn {}
+unsafe impl Sync for ExtExtension220Fn {}
+impl ExtExtension220Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrExtension221Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_221\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension221Fn {}
+unsafe impl Send for KhrExtension221Fn {}
+unsafe impl Sync for KhrExtension221Fn {}
+impl KhrExtension221Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_extension_221'"]
+impl RenderPassCreateFlags {
+ pub const RESERVED_0_KHR: Self = Self(0b1);
+}
+impl ExtScalarBlockLayoutFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_scalar_block_layout\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtScalarBlockLayoutFn {}
+unsafe impl Send for ExtScalarBlockLayoutFn {}
+unsafe impl Sync for ExtScalarBlockLayoutFn {}
+impl ExtScalarBlockLayoutFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_scalar_block_layout'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES;
+}
+impl ExtExtension223Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_223\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension223Fn {}
+unsafe impl Send for ExtExtension223Fn {}
+unsafe impl Sync for ExtExtension223Fn {}
+impl ExtExtension223Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleHlslFunctionality1Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_hlsl_functionality1\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct GoogleHlslFunctionality1Fn {}
+unsafe impl Send for GoogleHlslFunctionality1Fn {}
+unsafe impl Sync for GoogleHlslFunctionality1Fn {}
+impl GoogleHlslFunctionality1Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleDecorateStringFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_decorate_string\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct GoogleDecorateStringFn {}
+unsafe impl Send for GoogleDecorateStringFn {}
+unsafe impl Sync for GoogleDecorateStringFn {}
+impl GoogleDecorateStringFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtSubgroupSizeControlFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_subgroup_size_control\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtSubgroupSizeControlFn {}
+unsafe impl Send for ExtSubgroupSizeControlFn {}
+unsafe impl Sync for ExtSubgroupSizeControlFn {}
+impl ExtSubgroupSizeControlFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_subgroup_size_control'"]
+impl PipelineShaderStageCreateFlags {
+ pub const ALLOW_VARYING_SUBGROUP_SIZE_EXT: Self = Self::ALLOW_VARYING_SUBGROUP_SIZE;
+ pub const REQUIRE_FULL_SUBGROUPS_EXT: Self = Self::REQUIRE_FULL_SUBGROUPS;
+}
+#[doc = "Generated from 'VK_EXT_subgroup_size_control'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT: Self =
+ Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES;
+ pub const PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT: Self =
+ Self::PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO;
+ pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES;
+}
+impl KhrFragmentShadingRateFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_fragment_shading_rate\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_fragment_shading_rate_count: *mut u32,
+ p_fragment_shading_rates: *mut PhysicalDeviceFragmentShadingRateKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetFragmentShadingRateKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_fragment_size: *const Extent2D,
+ combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2],
+);
+#[derive(Clone)]
+pub struct KhrFragmentShadingRateFn {
+ pub get_physical_device_fragment_shading_rates_khr:
+ PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR,
+ pub cmd_set_fragment_shading_rate_khr: PFN_vkCmdSetFragmentShadingRateKHR,
+}
+unsafe impl Send for KhrFragmentShadingRateFn {}
+unsafe impl Sync for KhrFragmentShadingRateFn {}
+impl KhrFragmentShadingRateFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_fragment_shading_rates_khr: unsafe {
+ unsafe extern "system" fn get_physical_device_fragment_shading_rates_khr(
+ _physical_device: PhysicalDevice,
+ _p_fragment_shading_rate_count: *mut u32,
+ _p_fragment_shading_rates: *mut PhysicalDeviceFragmentShadingRateKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_fragment_shading_rates_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFragmentShadingRatesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_fragment_shading_rates_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_fragment_shading_rate_khr: unsafe {
+ unsafe extern "system" fn cmd_set_fragment_shading_rate_khr(
+ _command_buffer: CommandBuffer,
+ _p_fragment_size: *const Extent2D,
+ _combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2],
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_fragment_shading_rate_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetFragmentShadingRateKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_fragment_shading_rate_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl AccessFlags {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR: Self =
+ Self(0b1000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl DynamicState {
+ pub const FRAGMENT_SHADING_RATE_KHR: Self = Self(1_000_226_000);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl FormatFeatureFlags {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl FormatFeatureFlags2 {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl ImageLayout {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR: Self = Self(1_000_164_003);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl ImageUsageFlags {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b1_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl PipelineStageFlags {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b100_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_fragment_shading_rate'"]
+impl StructureType {
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR: Self = Self(1_000_226_000);
+ pub const PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR: Self = Self(1_000_226_001);
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR: Self = Self(1_000_226_002);
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR: Self = Self(1_000_226_003);
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR: Self = Self(1_000_226_004);
+}
+impl AmdShaderCoreProperties2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_shader_core_properties2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderCoreProperties2Fn {}
+unsafe impl Send for AmdShaderCoreProperties2Fn {}
+unsafe impl Sync for AmdShaderCoreProperties2Fn {}
+impl AmdShaderCoreProperties2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_shader_core_properties2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD: Self = Self(1_000_227_000);
+}
+impl AmdExtension229Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_229\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension229Fn {}
+unsafe impl Send for AmdExtension229Fn {}
+unsafe impl Sync for AmdExtension229Fn {}
+impl AmdExtension229Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdDeviceCoherentMemoryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_device_coherent_memory\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdDeviceCoherentMemoryFn {}
+unsafe impl Send for AmdDeviceCoherentMemoryFn {}
+unsafe impl Sync for AmdDeviceCoherentMemoryFn {}
+impl AmdDeviceCoherentMemoryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_device_coherent_memory'"]
+impl MemoryPropertyFlags {
+ pub const DEVICE_COHERENT_AMD: Self = Self(0b100_0000);
+ pub const DEVICE_UNCACHED_AMD: Self = Self(0b1000_0000);
+}
+#[doc = "Generated from 'VK_AMD_device_coherent_memory'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD: Self = Self(1_000_229_000);
+}
+impl AmdExtension231Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_231\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension231Fn {}
+unsafe impl Send for AmdExtension231Fn {}
+unsafe impl Sync for AmdExtension231Fn {}
+impl AmdExtension231Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension232Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_232\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension232Fn {}
+unsafe impl Send for AmdExtension232Fn {}
+unsafe impl Sync for AmdExtension232Fn {}
+impl AmdExtension232Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension233Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_233\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension233Fn {}
+unsafe impl Send for AmdExtension233Fn {}
+unsafe impl Sync for AmdExtension233Fn {}
+impl AmdExtension233Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension234Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_234\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension234Fn {}
+unsafe impl Send for AmdExtension234Fn {}
+unsafe impl Sync for AmdExtension234Fn {}
+impl AmdExtension234Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtShaderImageAtomicInt64Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_image_atomic_int64\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderImageAtomicInt64Fn {}
+unsafe impl Send for ExtShaderImageAtomicInt64Fn {}
+unsafe impl Sync for ExtShaderImageAtomicInt64Fn {}
+impl ExtShaderImageAtomicInt64Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_shader_image_atomic_int64'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT: Self = Self(1_000_234_000);
+}
+impl AmdExtension236Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_236\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension236Fn {}
+unsafe impl Send for AmdExtension236Fn {}
+unsafe impl Sync for AmdExtension236Fn {}
+impl AmdExtension236Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrSpirv14Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_spirv_1_4\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrSpirv14Fn {}
+unsafe impl Send for KhrSpirv14Fn {}
+unsafe impl Sync for KhrSpirv14Fn {}
+impl KhrSpirv14Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtMemoryBudgetFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_memory_budget\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtMemoryBudgetFn {}
+unsafe impl Send for ExtMemoryBudgetFn {}
+unsafe impl Sync for ExtMemoryBudgetFn {}
+impl ExtMemoryBudgetFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_memory_budget'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT: Self = Self(1_000_237_000);
+}
+impl ExtMemoryPriorityFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_memory_priority\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtMemoryPriorityFn {}
+unsafe impl Send for ExtMemoryPriorityFn {}
+unsafe impl Sync for ExtMemoryPriorityFn {}
+impl ExtMemoryPriorityFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_memory_priority'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: Self = Self(1_000_238_000);
+ pub const MEMORY_PRIORITY_ALLOCATE_INFO_EXT: Self = Self(1_000_238_001);
+}
+impl KhrSurfaceProtectedCapabilitiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_surface_protected_capabilities\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrSurfaceProtectedCapabilitiesFn {}
+unsafe impl Send for KhrSurfaceProtectedCapabilitiesFn {}
+unsafe impl Sync for KhrSurfaceProtectedCapabilitiesFn {}
+impl KhrSurfaceProtectedCapabilitiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_surface_protected_capabilities'"]
+impl StructureType {
+ pub const SURFACE_PROTECTED_CAPABILITIES_KHR: Self = Self(1_000_239_000);
+}
+impl NvDedicatedAllocationImageAliasingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_NV_dedicated_allocation_image_aliasing\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvDedicatedAllocationImageAliasingFn {}
+unsafe impl Send for NvDedicatedAllocationImageAliasingFn {}
+unsafe impl Sync for NvDedicatedAllocationImageAliasingFn {}
+impl NvDedicatedAllocationImageAliasingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_dedicated_allocation_image_aliasing'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV: Self =
+ Self(1_000_240_000);
+}
+impl KhrSeparateDepthStencilLayoutsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_separate_depth_stencil_layouts\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrSeparateDepthStencilLayoutsFn {}
+unsafe impl Send for KhrSeparateDepthStencilLayoutsFn {}
+unsafe impl Sync for KhrSeparateDepthStencilLayoutsFn {}
+impl KhrSeparateDepthStencilLayoutsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_separate_depth_stencil_layouts'"]
+impl ImageLayout {
+ pub const DEPTH_ATTACHMENT_OPTIMAL_KHR: Self = Self::DEPTH_ATTACHMENT_OPTIMAL;
+ pub const DEPTH_READ_ONLY_OPTIMAL_KHR: Self = Self::DEPTH_READ_ONLY_OPTIMAL;
+ pub const STENCIL_ATTACHMENT_OPTIMAL_KHR: Self = Self::STENCIL_ATTACHMENT_OPTIMAL;
+ pub const STENCIL_READ_ONLY_OPTIMAL_KHR: Self = Self::STENCIL_READ_ONLY_OPTIMAL;
+}
+#[doc = "Generated from 'VK_KHR_separate_depth_stencil_layouts'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES;
+ pub const ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR: Self =
+ Self::ATTACHMENT_REFERENCE_STENCIL_LAYOUT;
+ pub const ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR: Self =
+ Self::ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT;
+}
+impl IntelExtension243Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_extension_243\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct IntelExtension243Fn {}
+unsafe impl Send for IntelExtension243Fn {}
+unsafe impl Sync for IntelExtension243Fn {}
+impl IntelExtension243Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_INTEL_extension_243'"]
+impl AccessFlags2 {
+ pub const RESERVED_46_EXT: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+impl MesaExtension244Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_MESA_extension_244\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct MesaExtension244Fn {}
+unsafe impl Send for MesaExtension244Fn {}
+unsafe impl Sync for MesaExtension244Fn {}
+impl MesaExtension244Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtBufferDeviceAddressFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_buffer_device_address\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetBufferDeviceAddress = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const BufferDeviceAddressInfo,
+) -> DeviceAddress;
+#[derive(Clone)]
+pub struct ExtBufferDeviceAddressFn {
+ pub get_buffer_device_address_ext: PFN_vkGetBufferDeviceAddress,
+}
+unsafe impl Send for ExtBufferDeviceAddressFn {}
+unsafe impl Sync for ExtBufferDeviceAddressFn {}
+impl ExtBufferDeviceAddressFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_buffer_device_address_ext: unsafe {
+ unsafe extern "system" fn get_buffer_device_address_ext(
+ _device: Device,
+ _p_info: *const BufferDeviceAddressInfo,
+ ) -> DeviceAddress {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_device_address_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferDeviceAddressEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_device_address_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_buffer_device_address'"]
+impl BufferCreateFlags {
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY_EXT: Self = Self::DEVICE_ADDRESS_CAPTURE_REPLAY;
+}
+#[doc = "Generated from 'VK_EXT_buffer_device_address'"]
+impl BufferUsageFlags {
+ pub const SHADER_DEVICE_ADDRESS_EXT: Self = Self::SHADER_DEVICE_ADDRESS;
+}
+#[doc = "Generated from 'VK_EXT_buffer_device_address'"]
+impl Result {
+ pub const ERROR_INVALID_DEVICE_ADDRESS_EXT: Self = Self::ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS;
+}
+#[doc = "Generated from 'VK_EXT_buffer_device_address'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: Self = Self(1_000_244_000);
+ pub const PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT;
+ pub const BUFFER_DEVICE_ADDRESS_INFO_EXT: Self = Self::BUFFER_DEVICE_ADDRESS_INFO;
+ pub const BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT: Self = Self(1_000_244_002);
+}
+impl ExtToolingInfoFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_tooling_info\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceToolProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_tool_count: *mut u32,
+ p_tool_properties: *mut PhysicalDeviceToolProperties,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtToolingInfoFn {
+ pub get_physical_device_tool_properties_ext: PFN_vkGetPhysicalDeviceToolProperties,
+}
+unsafe impl Send for ExtToolingInfoFn {}
+unsafe impl Sync for ExtToolingInfoFn {}
+impl ExtToolingInfoFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_tool_properties_ext: unsafe {
+ unsafe extern "system" fn get_physical_device_tool_properties_ext(
+ _physical_device: PhysicalDevice,
+ _p_tool_count: *mut u32,
+ _p_tool_properties: *mut PhysicalDeviceToolProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_tool_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceToolPropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_tool_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_tooling_info'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT: Self = Self::PHYSICAL_DEVICE_TOOL_PROPERTIES;
+}
+#[doc = "Generated from 'VK_EXT_tooling_info'"]
+impl ToolPurposeFlags {
+ pub const DEBUG_REPORTING_EXT: Self = Self(0b10_0000);
+ pub const DEBUG_MARKERS_EXT: Self = Self(0b100_0000);
+}
+impl ExtSeparateStencilUsageFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_separate_stencil_usage\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtSeparateStencilUsageFn {}
+unsafe impl Send for ExtSeparateStencilUsageFn {}
+unsafe impl Sync for ExtSeparateStencilUsageFn {}
+impl ExtSeparateStencilUsageFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_separate_stencil_usage'"]
+impl StructureType {
+ pub const IMAGE_STENCIL_USAGE_CREATE_INFO_EXT: Self = Self::IMAGE_STENCIL_USAGE_CREATE_INFO;
+}
+impl ExtValidationFeaturesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_validation_features\0") }
+ }
+ pub const SPEC_VERSION: u32 = 5u32;
+}
+#[derive(Clone)]
+pub struct ExtValidationFeaturesFn {}
+unsafe impl Send for ExtValidationFeaturesFn {}
+unsafe impl Sync for ExtValidationFeaturesFn {}
+impl ExtValidationFeaturesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_validation_features'"]
+impl StructureType {
+ pub const VALIDATION_FEATURES_EXT: Self = Self(1_000_247_000);
+}
+impl KhrPresentWaitFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_present_wait\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkWaitForPresentKHR = unsafe extern "system" fn(
+ device: Device,
+ swapchain: SwapchainKHR,
+ present_id: u64,
+ timeout: u64,
+) -> Result;
+#[derive(Clone)]
+pub struct KhrPresentWaitFn {
+ pub wait_for_present_khr: PFN_vkWaitForPresentKHR,
+}
+unsafe impl Send for KhrPresentWaitFn {}
+unsafe impl Sync for KhrPresentWaitFn {}
+impl KhrPresentWaitFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ wait_for_present_khr: unsafe {
+ unsafe extern "system" fn wait_for_present_khr(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ _present_id: u64,
+ _timeout: u64,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(wait_for_present_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkWaitForPresentKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ wait_for_present_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_present_wait'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR: Self = Self(1_000_248_000);
+}
+impl NvCooperativeMatrixFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_cooperative_matrix\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_property_count: *mut u32,
+ p_properties: *mut CooperativeMatrixPropertiesNV,
+)
+ -> Result;
+#[derive(Clone)]
+pub struct NvCooperativeMatrixFn {
+ pub get_physical_device_cooperative_matrix_properties_nv:
+ PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV,
+}
+unsafe impl Send for NvCooperativeMatrixFn {}
+unsafe impl Sync for NvCooperativeMatrixFn {}
+impl NvCooperativeMatrixFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_cooperative_matrix_properties_nv: unsafe {
+ unsafe extern "system" fn get_physical_device_cooperative_matrix_properties_nv(
+ _physical_device: PhysicalDevice,
+ _p_property_count: *mut u32,
+ _p_properties: *mut CooperativeMatrixPropertiesNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_cooperative_matrix_properties_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceCooperativeMatrixPropertiesNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_cooperative_matrix_properties_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_cooperative_matrix'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV: Self = Self(1_000_249_000);
+ pub const COOPERATIVE_MATRIX_PROPERTIES_NV: Self = Self(1_000_249_001);
+ pub const PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV: Self = Self(1_000_249_002);
+}
+impl NvCoverageReductionModeFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_coverage_reduction_mode\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV =
+ unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_combination_count: *mut u32,
+ p_combinations: *mut FramebufferMixedSamplesCombinationNV,
+ ) -> Result;
+#[derive(Clone)]
+pub struct NvCoverageReductionModeFn {
+ pub get_physical_device_supported_framebuffer_mixed_samples_combinations_nv:
+ PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV,
+}
+unsafe impl Send for NvCoverageReductionModeFn {}
+unsafe impl Sync for NvCoverageReductionModeFn {}
+impl NvCoverageReductionModeFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_supported_framebuffer_mixed_samples_combinations_nv: unsafe {
+ unsafe extern "system" fn get_physical_device_supported_framebuffer_mixed_samples_combinations_nv(
+ _physical_device: PhysicalDevice,
+ _p_combination_count: *mut u32,
+ _p_combinations: *mut FramebufferMixedSamplesCombinationNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(
+ get_physical_device_supported_framebuffer_mixed_samples_combinations_nv
+ )
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_supported_framebuffer_mixed_samples_combinations_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_coverage_reduction_mode'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV: Self = Self(1_000_250_000);
+ pub const PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV: Self = Self(1_000_250_001);
+ pub const FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV: Self = Self(1_000_250_002);
+}
+impl ExtFragmentShaderInterlockFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_fragment_shader_interlock\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtFragmentShaderInterlockFn {}
+unsafe impl Send for ExtFragmentShaderInterlockFn {}
+unsafe impl Sync for ExtFragmentShaderInterlockFn {}
+impl ExtFragmentShaderInterlockFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_fragment_shader_interlock'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT: Self = Self(1_000_251_000);
+}
+impl ExtYcbcrImageArraysFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_ycbcr_image_arrays\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtYcbcrImageArraysFn {}
+unsafe impl Send for ExtYcbcrImageArraysFn {}
+unsafe impl Sync for ExtYcbcrImageArraysFn {}
+impl ExtYcbcrImageArraysFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_ycbcr_image_arrays'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT: Self = Self(1_000_252_000);
+}
+impl KhrUniformBufferStandardLayoutFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_uniform_buffer_standard_layout\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrUniformBufferStandardLayoutFn {}
+unsafe impl Send for KhrUniformBufferStandardLayoutFn {}
+unsafe impl Sync for KhrUniformBufferStandardLayoutFn {}
+impl KhrUniformBufferStandardLayoutFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_uniform_buffer_standard_layout'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES;
+}
+impl ExtProvokingVertexFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_provoking_vertex\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtProvokingVertexFn {}
+unsafe impl Send for ExtProvokingVertexFn {}
+unsafe impl Sync for ExtProvokingVertexFn {}
+impl ExtProvokingVertexFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_provoking_vertex'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: Self = Self(1_000_254_000);
+ pub const PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT: Self =
+ Self(1_000_254_001);
+ pub const PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: Self = Self(1_000_254_002);
+}
+impl ExtFullScreenExclusiveFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_full_screen_exclusive\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ p_present_mode_count: *mut u32,
+ p_present_modes: *mut PresentModeKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireFullScreenExclusiveModeEXT =
+ unsafe extern "system" fn(device: Device, swapchain: SwapchainKHR) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkReleaseFullScreenExclusiveModeEXT =
+ unsafe extern "system" fn(device: Device, swapchain: SwapchainKHR) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceGroupSurfacePresentModes2EXT = unsafe extern "system" fn(
+ device: Device,
+ p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ p_modes: *mut DeviceGroupPresentModeFlagsKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtFullScreenExclusiveFn {
+ pub get_physical_device_surface_present_modes2_ext:
+ PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT,
+ pub acquire_full_screen_exclusive_mode_ext: PFN_vkAcquireFullScreenExclusiveModeEXT,
+ pub release_full_screen_exclusive_mode_ext: PFN_vkReleaseFullScreenExclusiveModeEXT,
+ pub get_device_group_surface_present_modes2_ext: PFN_vkGetDeviceGroupSurfacePresentModes2EXT,
+}
+unsafe impl Send for ExtFullScreenExclusiveFn {}
+unsafe impl Sync for ExtFullScreenExclusiveFn {}
+impl ExtFullScreenExclusiveFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_surface_present_modes2_ext: unsafe {
+ unsafe extern "system" fn get_physical_device_surface_present_modes2_ext(
+ _physical_device: PhysicalDevice,
+ _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ _p_present_mode_count: *mut u32,
+ _p_present_modes: *mut PresentModeKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_surface_present_modes2_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSurfacePresentModes2EXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_surface_present_modes2_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ acquire_full_screen_exclusive_mode_ext: unsafe {
+ unsafe extern "system" fn acquire_full_screen_exclusive_mode_ext(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_full_screen_exclusive_mode_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkAcquireFullScreenExclusiveModeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_full_screen_exclusive_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ release_full_screen_exclusive_mode_ext: unsafe {
+ unsafe extern "system" fn release_full_screen_exclusive_mode_ext(
+ _device: Device,
+ _swapchain: SwapchainKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(release_full_screen_exclusive_mode_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkReleaseFullScreenExclusiveModeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ release_full_screen_exclusive_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_group_surface_present_modes2_ext: unsafe {
+ unsafe extern "system" fn get_device_group_surface_present_modes2_ext(
+ _device: Device,
+ _p_surface_info: *const PhysicalDeviceSurfaceInfo2KHR,
+ _p_modes: *mut DeviceGroupPresentModeFlagsKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_surface_present_modes2_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupSurfacePresentModes2EXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_surface_present_modes2_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_full_screen_exclusive'"]
+impl Result {
+ pub const ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT: Self = Self(-1_000_255_000);
+}
+#[doc = "Generated from 'VK_EXT_full_screen_exclusive'"]
+impl StructureType {
+ pub const SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT: Self = Self(1_000_255_000);
+ pub const SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT: Self = Self(1_000_255_002);
+ pub const SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT: Self = Self(1_000_255_001);
+}
+impl ExtHeadlessSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_headless_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateHeadlessSurfaceEXT = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const HeadlessSurfaceCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtHeadlessSurfaceFn {
+ pub create_headless_surface_ext: PFN_vkCreateHeadlessSurfaceEXT,
+}
+unsafe impl Send for ExtHeadlessSurfaceFn {}
+unsafe impl Sync for ExtHeadlessSurfaceFn {}
+impl ExtHeadlessSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_headless_surface_ext: unsafe {
+ unsafe extern "system" fn create_headless_surface_ext(
+ _instance: Instance,
+ _p_create_info: *const HeadlessSurfaceCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_headless_surface_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateHeadlessSurfaceEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_headless_surface_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_headless_surface'"]
+impl StructureType {
+ pub const HEADLESS_SURFACE_CREATE_INFO_EXT: Self = Self(1_000_256_000);
+}
+impl KhrBufferDeviceAddressFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_buffer_device_address\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetBufferOpaqueCaptureAddress =
+ unsafe extern "system" fn(device: Device, p_info: *const BufferDeviceAddressInfo) -> u64;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceMemoryOpaqueCaptureAddress = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const DeviceMemoryOpaqueCaptureAddressInfo,
+) -> u64;
+#[derive(Clone)]
+pub struct KhrBufferDeviceAddressFn {
+ pub get_buffer_device_address_khr: crate::vk::PFN_vkGetBufferDeviceAddress,
+ pub get_buffer_opaque_capture_address_khr: PFN_vkGetBufferOpaqueCaptureAddress,
+ pub get_device_memory_opaque_capture_address_khr: PFN_vkGetDeviceMemoryOpaqueCaptureAddress,
+}
+unsafe impl Send for KhrBufferDeviceAddressFn {}
+unsafe impl Sync for KhrBufferDeviceAddressFn {}
+impl KhrBufferDeviceAddressFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_buffer_device_address_khr: unsafe {
+ unsafe extern "system" fn get_buffer_device_address_khr(
+ _device: Device,
+ _p_info: *const BufferDeviceAddressInfo,
+ ) -> DeviceAddress {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_device_address_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferDeviceAddressKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_device_address_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_opaque_capture_address_khr: unsafe {
+ unsafe extern "system" fn get_buffer_opaque_capture_address_khr(
+ _device: Device,
+ _p_info: *const BufferDeviceAddressInfo,
+ ) -> u64 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_opaque_capture_address_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferOpaqueCaptureAddressKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_opaque_capture_address_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_memory_opaque_capture_address_khr: unsafe {
+ unsafe extern "system" fn get_device_memory_opaque_capture_address_khr(
+ _device: Device,
+ _p_info: *const DeviceMemoryOpaqueCaptureAddressInfo,
+ ) -> u64 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_memory_opaque_capture_address_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceMemoryOpaqueCaptureAddressKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_memory_opaque_capture_address_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_buffer_device_address'"]
+impl BufferCreateFlags {
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY_KHR: Self = Self::DEVICE_ADDRESS_CAPTURE_REPLAY;
+}
+#[doc = "Generated from 'VK_KHR_buffer_device_address'"]
+impl BufferUsageFlags {
+ pub const SHADER_DEVICE_ADDRESS_KHR: Self = Self::SHADER_DEVICE_ADDRESS;
+}
+#[doc = "Generated from 'VK_KHR_buffer_device_address'"]
+impl MemoryAllocateFlags {
+ pub const DEVICE_ADDRESS_KHR: Self = Self::DEVICE_ADDRESS;
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY_KHR: Self = Self::DEVICE_ADDRESS_CAPTURE_REPLAY;
+}
+#[doc = "Generated from 'VK_KHR_buffer_device_address'"]
+impl Result {
+ pub const ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR: Self =
+ Self::ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS;
+}
+#[doc = "Generated from 'VK_KHR_buffer_device_address'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES;
+ pub const BUFFER_DEVICE_ADDRESS_INFO_KHR: Self = Self::BUFFER_DEVICE_ADDRESS_INFO;
+ pub const BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR: Self =
+ Self::BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO;
+ pub const MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR: Self =
+ Self::MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO;
+ pub const DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR: Self =
+ Self::DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO;
+}
+impl ExtExtension259Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_259\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension259Fn {}
+unsafe impl Send for ExtExtension259Fn {}
+unsafe impl Sync for ExtExtension259Fn {}
+impl ExtExtension259Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_extension_259'"]
+impl FormatFeatureFlags2 {
+ pub const RESERVED_44_EXT: Self =
+ Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const RESERVED_45_EXT: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_extension_259'"]
+impl ImageCreateFlags {
+ pub const RESERVED_19_EXT: Self = Self(0b1000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_extension_259'"]
+impl QueueFlags {
+ pub const RESERVED_9_EXT: Self = Self(0b10_0000_0000);
+}
+impl ExtLineRasterizationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_line_rasterization\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetLineStippleEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ line_stipple_factor: u32,
+ line_stipple_pattern: u16,
+);
+#[derive(Clone)]
+pub struct ExtLineRasterizationFn {
+ pub cmd_set_line_stipple_ext: PFN_vkCmdSetLineStippleEXT,
+}
+unsafe impl Send for ExtLineRasterizationFn {}
+unsafe impl Sync for ExtLineRasterizationFn {}
+impl ExtLineRasterizationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_line_stipple_ext: unsafe {
+ unsafe extern "system" fn cmd_set_line_stipple_ext(
+ _command_buffer: CommandBuffer,
+ _line_stipple_factor: u32,
+ _line_stipple_pattern: u16,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_line_stipple_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineStippleEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_line_stipple_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_line_rasterization'"]
+impl DynamicState {
+ pub const LINE_STIPPLE_EXT: Self = Self(1_000_259_000);
+}
+#[doc = "Generated from 'VK_EXT_line_rasterization'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: Self = Self(1_000_259_000);
+ pub const PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT: Self = Self(1_000_259_001);
+ pub const PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT: Self = Self(1_000_259_002);
+}
+impl ExtShaderAtomicFloatFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_atomic_float\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderAtomicFloatFn {}
+unsafe impl Send for ExtShaderAtomicFloatFn {}
+unsafe impl Sync for ExtShaderAtomicFloatFn {}
+impl ExtShaderAtomicFloatFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_shader_atomic_float'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT: Self = Self(1_000_260_000);
+}
+impl ExtHostQueryResetFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_host_query_reset\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetQueryPool = unsafe extern "system" fn(
+ device: Device,
+ query_pool: QueryPool,
+ first_query: u32,
+ query_count: u32,
+);
+#[derive(Clone)]
+pub struct ExtHostQueryResetFn {
+ pub reset_query_pool_ext: PFN_vkResetQueryPool,
+}
+unsafe impl Send for ExtHostQueryResetFn {}
+unsafe impl Sync for ExtHostQueryResetFn {}
+impl ExtHostQueryResetFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ reset_query_pool_ext: unsafe {
+ unsafe extern "system" fn reset_query_pool_ext(
+ _device: Device,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ _query_count: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(reset_query_pool_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetQueryPoolEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_query_pool_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_host_query_reset'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES;
+}
+impl GgpExtension263Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_extension_263\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GgpExtension263Fn {}
+unsafe impl Send for GgpExtension263Fn {}
+unsafe impl Sync for GgpExtension263Fn {}
+impl GgpExtension263Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl BrcmExtension264Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_BRCM_extension_264\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct BrcmExtension264Fn {}
+unsafe impl Send for BrcmExtension264Fn {}
+unsafe impl Sync for BrcmExtension264Fn {}
+impl BrcmExtension264Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl BrcmExtension265Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_BRCM_extension_265\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct BrcmExtension265Fn {}
+unsafe impl Send for BrcmExtension265Fn {}
+unsafe impl Sync for BrcmExtension265Fn {}
+impl BrcmExtension265Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtIndexTypeUint8Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_index_type_uint8\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtIndexTypeUint8Fn {}
+unsafe impl Send for ExtIndexTypeUint8Fn {}
+unsafe impl Sync for ExtIndexTypeUint8Fn {}
+impl ExtIndexTypeUint8Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_index_type_uint8'"]
+impl IndexType {
+ pub const UINT8_EXT: Self = Self(1_000_265_000);
+}
+#[doc = "Generated from 'VK_EXT_index_type_uint8'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: Self = Self(1_000_265_000);
+}
+impl ExtExtension267Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_267\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension267Fn {}
+unsafe impl Send for ExtExtension267Fn {}
+unsafe impl Sync for ExtExtension267Fn {}
+impl ExtExtension267Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtendedDynamicStateFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extended_dynamic_state\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCullMode =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, cull_mode: CullModeFlags);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetFrontFace =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, front_face: FrontFace);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPrimitiveTopology =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, primitive_topology: PrimitiveTopology);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetViewportWithCount = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ viewport_count: u32,
+ p_viewports: *const Viewport,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetScissorWithCount = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ scissor_count: u32,
+ p_scissors: *const Rect2D,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindVertexBuffers2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_binding: u32,
+ binding_count: u32,
+ p_buffers: *const Buffer,
+ p_offsets: *const DeviceSize,
+ p_sizes: *const DeviceSize,
+ p_strides: *const DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthTestEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_test_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthWriteEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_write_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthCompareOp =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_compare_op: CompareOp);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthBoundsTestEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_bounds_test_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetStencilTestEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, stencil_test_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetStencilOp = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ face_mask: StencilFaceFlags,
+ fail_op: StencilOp,
+ pass_op: StencilOp,
+ depth_fail_op: StencilOp,
+ compare_op: CompareOp,
+);
+#[derive(Clone)]
+pub struct ExtExtendedDynamicStateFn {
+ pub cmd_set_cull_mode_ext: PFN_vkCmdSetCullMode,
+ pub cmd_set_front_face_ext: PFN_vkCmdSetFrontFace,
+ pub cmd_set_primitive_topology_ext: PFN_vkCmdSetPrimitiveTopology,
+ pub cmd_set_viewport_with_count_ext: PFN_vkCmdSetViewportWithCount,
+ pub cmd_set_scissor_with_count_ext: PFN_vkCmdSetScissorWithCount,
+ pub cmd_bind_vertex_buffers2_ext: PFN_vkCmdBindVertexBuffers2,
+ pub cmd_set_depth_test_enable_ext: PFN_vkCmdSetDepthTestEnable,
+ pub cmd_set_depth_write_enable_ext: PFN_vkCmdSetDepthWriteEnable,
+ pub cmd_set_depth_compare_op_ext: PFN_vkCmdSetDepthCompareOp,
+ pub cmd_set_depth_bounds_test_enable_ext: PFN_vkCmdSetDepthBoundsTestEnable,
+ pub cmd_set_stencil_test_enable_ext: PFN_vkCmdSetStencilTestEnable,
+ pub cmd_set_stencil_op_ext: PFN_vkCmdSetStencilOp,
+}
+unsafe impl Send for ExtExtendedDynamicStateFn {}
+unsafe impl Sync for ExtExtendedDynamicStateFn {}
+impl ExtExtendedDynamicStateFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_cull_mode_ext: unsafe {
+ unsafe extern "system" fn cmd_set_cull_mode_ext(
+ _command_buffer: CommandBuffer,
+ _cull_mode: CullModeFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_cull_mode_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCullModeEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_cull_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_front_face_ext: unsafe {
+ unsafe extern "system" fn cmd_set_front_face_ext(
+ _command_buffer: CommandBuffer,
+ _front_face: FrontFace,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_front_face_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetFrontFaceEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_front_face_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_primitive_topology_ext: unsafe {
+ unsafe extern "system" fn cmd_set_primitive_topology_ext(
+ _command_buffer: CommandBuffer,
+ _primitive_topology: PrimitiveTopology,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_primitive_topology_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPrimitiveTopologyEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_primitive_topology_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_viewport_with_count_ext: unsafe {
+ unsafe extern "system" fn cmd_set_viewport_with_count_ext(
+ _command_buffer: CommandBuffer,
+ _viewport_count: u32,
+ _p_viewports: *const Viewport,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_viewport_with_count_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetViewportWithCountEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport_with_count_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_scissor_with_count_ext: unsafe {
+ unsafe extern "system" fn cmd_set_scissor_with_count_ext(
+ _command_buffer: CommandBuffer,
+ _scissor_count: u32,
+ _p_scissors: *const Rect2D,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_scissor_with_count_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetScissorWithCountEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_scissor_with_count_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_vertex_buffers2_ext: unsafe {
+ unsafe extern "system" fn cmd_bind_vertex_buffers2_ext(
+ _command_buffer: CommandBuffer,
+ _first_binding: u32,
+ _binding_count: u32,
+ _p_buffers: *const Buffer,
+ _p_offsets: *const DeviceSize,
+ _p_sizes: *const DeviceSize,
+ _p_strides: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_vertex_buffers2_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindVertexBuffers2EXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_vertex_buffers2_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_test_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_test_enable_ext(
+ _command_buffer: CommandBuffer,
+ _depth_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_test_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthTestEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_test_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_write_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_write_enable_ext(
+ _command_buffer: CommandBuffer,
+ _depth_write_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_write_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthWriteEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_write_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_compare_op_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_compare_op_ext(
+ _command_buffer: CommandBuffer,
+ _depth_compare_op: CompareOp,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_compare_op_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthCompareOpEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_compare_op_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_bounds_test_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_bounds_test_enable_ext(
+ _command_buffer: CommandBuffer,
+ _depth_bounds_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_bounds_test_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthBoundsTestEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_bounds_test_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_test_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_test_enable_ext(
+ _command_buffer: CommandBuffer,
+ _stencil_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_stencil_test_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetStencilTestEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_test_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_op_ext: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_op_ext(
+ _command_buffer: CommandBuffer,
+ _face_mask: StencilFaceFlags,
+ _fail_op: StencilOp,
+ _pass_op: StencilOp,
+ _depth_fail_op: StencilOp,
+ _compare_op: CompareOp,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_stencil_op_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilOpEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_op_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_extended_dynamic_state'"]
+impl DynamicState {
+ pub const CULL_MODE_EXT: Self = Self::CULL_MODE;
+ pub const FRONT_FACE_EXT: Self = Self::FRONT_FACE;
+ pub const PRIMITIVE_TOPOLOGY_EXT: Self = Self::PRIMITIVE_TOPOLOGY;
+ pub const VIEWPORT_WITH_COUNT_EXT: Self = Self::VIEWPORT_WITH_COUNT;
+ pub const SCISSOR_WITH_COUNT_EXT: Self = Self::SCISSOR_WITH_COUNT;
+ pub const VERTEX_INPUT_BINDING_STRIDE_EXT: Self = Self::VERTEX_INPUT_BINDING_STRIDE;
+ pub const DEPTH_TEST_ENABLE_EXT: Self = Self::DEPTH_TEST_ENABLE;
+ pub const DEPTH_WRITE_ENABLE_EXT: Self = Self::DEPTH_WRITE_ENABLE;
+ pub const DEPTH_COMPARE_OP_EXT: Self = Self::DEPTH_COMPARE_OP;
+ pub const DEPTH_BOUNDS_TEST_ENABLE_EXT: Self = Self::DEPTH_BOUNDS_TEST_ENABLE;
+ pub const STENCIL_TEST_ENABLE_EXT: Self = Self::STENCIL_TEST_ENABLE;
+ pub const STENCIL_OP_EXT: Self = Self::STENCIL_OP;
+}
+#[doc = "Generated from 'VK_EXT_extended_dynamic_state'"]
+impl StructureType {
+ #[doc = "Not promoted to 1.3"]
+ pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: Self = Self(1_000_267_000);
+}
+impl KhrDeferredHostOperationsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_deferred_host_operations\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 4u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDeferredOperationKHR = unsafe extern "system" fn(
+ device: Device,
+ p_allocator: *const AllocationCallbacks,
+ p_deferred_operation: *mut DeferredOperationKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDeferredOperationKHR = unsafe extern "system" fn(
+ device: Device,
+ operation: DeferredOperationKHR,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeferredOperationMaxConcurrencyKHR =
+ unsafe extern "system" fn(device: Device, operation: DeferredOperationKHR) -> u32;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeferredOperationResultKHR =
+ unsafe extern "system" fn(device: Device, operation: DeferredOperationKHR) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDeferredOperationJoinKHR =
+ unsafe extern "system" fn(device: Device, operation: DeferredOperationKHR) -> Result;
+#[derive(Clone)]
+pub struct KhrDeferredHostOperationsFn {
+ pub create_deferred_operation_khr: PFN_vkCreateDeferredOperationKHR,
+ pub destroy_deferred_operation_khr: PFN_vkDestroyDeferredOperationKHR,
+ pub get_deferred_operation_max_concurrency_khr: PFN_vkGetDeferredOperationMaxConcurrencyKHR,
+ pub get_deferred_operation_result_khr: PFN_vkGetDeferredOperationResultKHR,
+ pub deferred_operation_join_khr: PFN_vkDeferredOperationJoinKHR,
+}
+unsafe impl Send for KhrDeferredHostOperationsFn {}
+unsafe impl Sync for KhrDeferredHostOperationsFn {}
+impl KhrDeferredHostOperationsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_deferred_operation_khr: unsafe {
+ unsafe extern "system" fn create_deferred_operation_khr(
+ _device: Device,
+ _p_allocator: *const AllocationCallbacks,
+ _p_deferred_operation: *mut DeferredOperationKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_deferred_operation_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDeferredOperationKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_deferred_operation_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_deferred_operation_khr: unsafe {
+ unsafe extern "system" fn destroy_deferred_operation_khr(
+ _device: Device,
+ _operation: DeferredOperationKHR,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_deferred_operation_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyDeferredOperationKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_deferred_operation_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_deferred_operation_max_concurrency_khr: unsafe {
+ unsafe extern "system" fn get_deferred_operation_max_concurrency_khr(
+ _device: Device,
+ _operation: DeferredOperationKHR,
+ ) -> u32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_deferred_operation_max_concurrency_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeferredOperationMaxConcurrencyKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_deferred_operation_max_concurrency_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_deferred_operation_result_khr: unsafe {
+ unsafe extern "system" fn get_deferred_operation_result_khr(
+ _device: Device,
+ _operation: DeferredOperationKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_deferred_operation_result_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeferredOperationResultKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_deferred_operation_result_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ deferred_operation_join_khr: unsafe {
+ unsafe extern "system" fn deferred_operation_join_khr(
+ _device: Device,
+ _operation: DeferredOperationKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(deferred_operation_join_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDeferredOperationJoinKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ deferred_operation_join_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_deferred_host_operations'"]
+impl ObjectType {
+ pub const DEFERRED_OPERATION_KHR: Self = Self(1_000_268_000);
+}
+#[doc = "Generated from 'VK_KHR_deferred_host_operations'"]
+impl Result {
+ pub const THREAD_IDLE_KHR: Self = Self(1_000_268_000);
+ pub const THREAD_DONE_KHR: Self = Self(1_000_268_001);
+ pub const OPERATION_DEFERRED_KHR: Self = Self(1_000_268_002);
+ pub const OPERATION_NOT_DEFERRED_KHR: Self = Self(1_000_268_003);
+}
+impl KhrPipelineExecutablePropertiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_pipeline_executable_properties\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPipelineExecutablePropertiesKHR = unsafe extern "system" fn(
+ device: Device,
+ p_pipeline_info: *const PipelineInfoKHR,
+ p_executable_count: *mut u32,
+ p_properties: *mut PipelineExecutablePropertiesKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPipelineExecutableStatisticsKHR = unsafe extern "system" fn(
+ device: Device,
+ p_executable_info: *const PipelineExecutableInfoKHR,
+ p_statistic_count: *mut u32,
+ p_statistics: *mut PipelineExecutableStatisticKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPipelineExecutableInternalRepresentationsKHR =
+ unsafe extern "system" fn(
+ device: Device,
+ p_executable_info: *const PipelineExecutableInfoKHR,
+ p_internal_representation_count: *mut u32,
+ p_internal_representations: *mut PipelineExecutableInternalRepresentationKHR,
+ ) -> Result;
+#[derive(Clone)]
+pub struct KhrPipelineExecutablePropertiesFn {
+ pub get_pipeline_executable_properties_khr: PFN_vkGetPipelineExecutablePropertiesKHR,
+ pub get_pipeline_executable_statistics_khr: PFN_vkGetPipelineExecutableStatisticsKHR,
+ pub get_pipeline_executable_internal_representations_khr:
+ PFN_vkGetPipelineExecutableInternalRepresentationsKHR,
+}
+unsafe impl Send for KhrPipelineExecutablePropertiesFn {}
+unsafe impl Sync for KhrPipelineExecutablePropertiesFn {}
+impl KhrPipelineExecutablePropertiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_pipeline_executable_properties_khr: unsafe {
+ unsafe extern "system" fn get_pipeline_executable_properties_khr(
+ _device: Device,
+ _p_pipeline_info: *const PipelineInfoKHR,
+ _p_executable_count: *mut u32,
+ _p_properties: *mut PipelineExecutablePropertiesKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_pipeline_executable_properties_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPipelineExecutablePropertiesKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_pipeline_executable_properties_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_pipeline_executable_statistics_khr: unsafe {
+ unsafe extern "system" fn get_pipeline_executable_statistics_khr(
+ _device: Device,
+ _p_executable_info: *const PipelineExecutableInfoKHR,
+ _p_statistic_count: *mut u32,
+ _p_statistics: *mut PipelineExecutableStatisticKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_pipeline_executable_statistics_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPipelineExecutableStatisticsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_pipeline_executable_statistics_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_pipeline_executable_internal_representations_khr: unsafe {
+ unsafe extern "system" fn get_pipeline_executable_internal_representations_khr(
+ _device: Device,
+ _p_executable_info: *const PipelineExecutableInfoKHR,
+ _p_internal_representation_count: *mut u32,
+ _p_internal_representations: *mut PipelineExecutableInternalRepresentationKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_pipeline_executable_internal_representations_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPipelineExecutableInternalRepresentationsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_pipeline_executable_internal_representations_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_pipeline_executable_properties'"]
+impl PipelineCreateFlags {
+ pub const CAPTURE_STATISTICS_KHR: Self = Self(0b100_0000);
+ pub const CAPTURE_INTERNAL_REPRESENTATIONS_KHR: Self = Self(0b1000_0000);
+}
+#[doc = "Generated from 'VK_KHR_pipeline_executable_properties'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR: Self =
+ Self(1_000_269_000);
+ pub const PIPELINE_INFO_KHR: Self = Self(1_000_269_001);
+ pub const PIPELINE_EXECUTABLE_PROPERTIES_KHR: Self = Self(1_000_269_002);
+ pub const PIPELINE_EXECUTABLE_INFO_KHR: Self = Self(1_000_269_003);
+ pub const PIPELINE_EXECUTABLE_STATISTIC_KHR: Self = Self(1_000_269_004);
+ pub const PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR: Self = Self(1_000_269_005);
+}
+impl IntelExtension271Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_extension_271\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct IntelExtension271Fn {}
+unsafe impl Send for IntelExtension271Fn {}
+unsafe impl Sync for IntelExtension271Fn {}
+impl IntelExtension271Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_INTEL_extension_271'"]
+impl ImageUsageFlags {
+ pub const RESERVED_22_EXT: Self = Self(0b100_0000_0000_0000_0000_0000);
+}
+impl IntelExtension272Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_extension_272\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct IntelExtension272Fn {}
+unsafe impl Send for IntelExtension272Fn {}
+unsafe impl Sync for IntelExtension272Fn {}
+impl IntelExtension272Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl IntelExtension273Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_INTEL_extension_273\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct IntelExtension273Fn {}
+unsafe impl Send for IntelExtension273Fn {}
+unsafe impl Sync for IntelExtension273Fn {}
+impl IntelExtension273Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtShaderAtomicFloat2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_atomic_float2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderAtomicFloat2Fn {}
+unsafe impl Send for ExtShaderAtomicFloat2Fn {}
+unsafe impl Sync for ExtShaderAtomicFloat2Fn {}
+impl ExtShaderAtomicFloat2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_shader_atomic_float2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT: Self = Self(1_000_273_000);
+}
+impl ExtSurfaceMaintenance1Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_surface_maintenance1\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtSurfaceMaintenance1Fn {}
+unsafe impl Send for ExtSurfaceMaintenance1Fn {}
+unsafe impl Sync for ExtSurfaceMaintenance1Fn {}
+impl ExtSurfaceMaintenance1Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_surface_maintenance1'"]
+impl StructureType {
+ pub const SURFACE_PRESENT_MODE_EXT: Self = Self(1_000_274_000);
+ pub const SURFACE_PRESENT_SCALING_CAPABILITIES_EXT: Self = Self(1_000_274_001);
+ pub const SURFACE_PRESENT_MODE_COMPATIBILITY_EXT: Self = Self(1_000_274_002);
+}
+impl ExtSwapchainMaintenance1Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_swapchain_maintenance1\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkReleaseSwapchainImagesEXT = unsafe extern "system" fn(
+ device: Device,
+ p_release_info: *const ReleaseSwapchainImagesInfoEXT,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtSwapchainMaintenance1Fn {
+ pub release_swapchain_images_ext: PFN_vkReleaseSwapchainImagesEXT,
+}
+unsafe impl Send for ExtSwapchainMaintenance1Fn {}
+unsafe impl Sync for ExtSwapchainMaintenance1Fn {}
+impl ExtSwapchainMaintenance1Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ release_swapchain_images_ext: unsafe {
+ unsafe extern "system" fn release_swapchain_images_ext(
+ _device: Device,
+ _p_release_info: *const ReleaseSwapchainImagesInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(release_swapchain_images_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkReleaseSwapchainImagesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ release_swapchain_images_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_swapchain_maintenance1'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT: Self = Self(1_000_275_000);
+ pub const SWAPCHAIN_PRESENT_FENCE_INFO_EXT: Self = Self(1_000_275_001);
+ pub const SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT: Self = Self(1_000_275_002);
+ pub const SWAPCHAIN_PRESENT_MODE_INFO_EXT: Self = Self(1_000_275_003);
+ pub const SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT: Self = Self(1_000_275_004);
+ pub const RELEASE_SWAPCHAIN_IMAGES_INFO_EXT: Self = Self(1_000_275_005);
+}
+#[doc = "Generated from 'VK_EXT_swapchain_maintenance1'"]
+impl SwapchainCreateFlagsKHR {
+ pub const DEFERRED_MEMORY_ALLOCATION_EXT: Self = Self(0b1000);
+}
+impl ExtShaderDemoteToHelperInvocationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_shader_demote_to_helper_invocation\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtShaderDemoteToHelperInvocationFn {}
+unsafe impl Send for ExtShaderDemoteToHelperInvocationFn {}
+unsafe impl Sync for ExtShaderDemoteToHelperInvocationFn {}
+impl ExtShaderDemoteToHelperInvocationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_shader_demote_to_helper_invocation'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES;
+}
+impl NvDeviceGeneratedCommandsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_device_generated_commands\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetGeneratedCommandsMemoryRequirementsNV = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const GeneratedCommandsMemoryRequirementsInfoNV,
+ p_memory_requirements: *mut MemoryRequirements2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdPreprocessGeneratedCommandsNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_generated_commands_info: *const GeneratedCommandsInfoNV,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdExecuteGeneratedCommandsNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ is_preprocessed: Bool32,
+ p_generated_commands_info: *const GeneratedCommandsInfoNV,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindPipelineShaderGroupNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_bind_point: PipelineBindPoint,
+ pipeline: Pipeline,
+ group_index: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateIndirectCommandsLayoutNV = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const IndirectCommandsLayoutCreateInfoNV,
+ p_allocator: *const AllocationCallbacks,
+ p_indirect_commands_layout: *mut IndirectCommandsLayoutNV,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyIndirectCommandsLayoutNV = unsafe extern "system" fn(
+ device: Device,
+ indirect_commands_layout: IndirectCommandsLayoutNV,
+ p_allocator: *const AllocationCallbacks,
+);
+#[derive(Clone)]
+pub struct NvDeviceGeneratedCommandsFn {
+ pub get_generated_commands_memory_requirements_nv:
+ PFN_vkGetGeneratedCommandsMemoryRequirementsNV,
+ pub cmd_preprocess_generated_commands_nv: PFN_vkCmdPreprocessGeneratedCommandsNV,
+ pub cmd_execute_generated_commands_nv: PFN_vkCmdExecuteGeneratedCommandsNV,
+ pub cmd_bind_pipeline_shader_group_nv: PFN_vkCmdBindPipelineShaderGroupNV,
+ pub create_indirect_commands_layout_nv: PFN_vkCreateIndirectCommandsLayoutNV,
+ pub destroy_indirect_commands_layout_nv: PFN_vkDestroyIndirectCommandsLayoutNV,
+}
+unsafe impl Send for NvDeviceGeneratedCommandsFn {}
+unsafe impl Sync for NvDeviceGeneratedCommandsFn {}
+impl NvDeviceGeneratedCommandsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_generated_commands_memory_requirements_nv: unsafe {
+ unsafe extern "system" fn get_generated_commands_memory_requirements_nv(
+ _device: Device,
+ _p_info: *const GeneratedCommandsMemoryRequirementsInfoNV,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_generated_commands_memory_requirements_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetGeneratedCommandsMemoryRequirementsNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_generated_commands_memory_requirements_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_preprocess_generated_commands_nv: unsafe {
+ unsafe extern "system" fn cmd_preprocess_generated_commands_nv(
+ _command_buffer: CommandBuffer,
+ _p_generated_commands_info: *const GeneratedCommandsInfoNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_preprocess_generated_commands_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdPreprocessGeneratedCommandsNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_preprocess_generated_commands_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_execute_generated_commands_nv: unsafe {
+ unsafe extern "system" fn cmd_execute_generated_commands_nv(
+ _command_buffer: CommandBuffer,
+ _is_preprocessed: Bool32,
+ _p_generated_commands_info: *const GeneratedCommandsInfoNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_execute_generated_commands_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdExecuteGeneratedCommandsNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_execute_generated_commands_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_pipeline_shader_group_nv: unsafe {
+ unsafe extern "system" fn cmd_bind_pipeline_shader_group_nv(
+ _command_buffer: CommandBuffer,
+ _pipeline_bind_point: PipelineBindPoint,
+ _pipeline: Pipeline,
+ _group_index: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_pipeline_shader_group_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindPipelineShaderGroupNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_pipeline_shader_group_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_indirect_commands_layout_nv: unsafe {
+ unsafe extern "system" fn create_indirect_commands_layout_nv(
+ _device: Device,
+ _p_create_info: *const IndirectCommandsLayoutCreateInfoNV,
+ _p_allocator: *const AllocationCallbacks,
+ _p_indirect_commands_layout: *mut IndirectCommandsLayoutNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_indirect_commands_layout_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateIndirectCommandsLayoutNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_indirect_commands_layout_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_indirect_commands_layout_nv: unsafe {
+ unsafe extern "system" fn destroy_indirect_commands_layout_nv(
+ _device: Device,
+ _indirect_commands_layout: IndirectCommandsLayoutNV,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_indirect_commands_layout_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyIndirectCommandsLayoutNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_indirect_commands_layout_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_device_generated_commands'"]
+impl AccessFlags {
+ pub const COMMAND_PREPROCESS_READ_NV: Self = Self(0b10_0000_0000_0000_0000);
+ pub const COMMAND_PREPROCESS_WRITE_NV: Self = Self(0b100_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_device_generated_commands'"]
+impl ObjectType {
+ pub const INDIRECT_COMMANDS_LAYOUT_NV: Self = Self(1_000_277_000);
+}
+#[doc = "Generated from 'VK_NV_device_generated_commands'"]
+impl PipelineCreateFlags {
+ pub const INDIRECT_BINDABLE_NV: Self = Self(0b100_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_device_generated_commands'"]
+impl PipelineStageFlags {
+ pub const COMMAND_PREPROCESS_NV: Self = Self(0b10_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_device_generated_commands'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV: Self = Self(1_000_277_000);
+ pub const GRAPHICS_SHADER_GROUP_CREATE_INFO_NV: Self = Self(1_000_277_001);
+ pub const GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV: Self = Self(1_000_277_002);
+ pub const INDIRECT_COMMANDS_LAYOUT_TOKEN_NV: Self = Self(1_000_277_003);
+ pub const INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV: Self = Self(1_000_277_004);
+ pub const GENERATED_COMMANDS_INFO_NV: Self = Self(1_000_277_005);
+ pub const GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV: Self = Self(1_000_277_006);
+ pub const PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV: Self = Self(1_000_277_007);
+}
+impl NvInheritedViewportScissorFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_inherited_viewport_scissor\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvInheritedViewportScissorFn {}
+unsafe impl Send for NvInheritedViewportScissorFn {}
+unsafe impl Sync for NvInheritedViewportScissorFn {}
+impl NvInheritedViewportScissorFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_inherited_viewport_scissor'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV: Self = Self(1_000_278_000);
+ pub const COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV: Self = Self(1_000_278_001);
+}
+impl KhrExtension280Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_280\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension280Fn {}
+unsafe impl Send for KhrExtension280Fn {}
+unsafe impl Sync for KhrExtension280Fn {}
+impl KhrExtension280Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrShaderIntegerDotProductFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_integer_dot_product\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderIntegerDotProductFn {}
+unsafe impl Send for KhrShaderIntegerDotProductFn {}
+unsafe impl Sync for KhrShaderIntegerDotProductFn {}
+impl KhrShaderIntegerDotProductFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_integer_dot_product'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES;
+ pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES;
+}
+impl ExtTexelBufferAlignmentFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_texel_buffer_alignment\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtTexelBufferAlignmentFn {}
+unsafe impl Send for ExtTexelBufferAlignmentFn {}
+unsafe impl Sync for ExtTexelBufferAlignmentFn {}
+impl ExtTexelBufferAlignmentFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_texel_buffer_alignment'"]
+impl StructureType {
+ #[doc = "Not promoted to 1.3"]
+ pub const PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT: Self = Self(1_000_281_000);
+ pub const PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT: Self =
+ Self::PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES;
+}
+impl QcomRenderPassTransformFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_render_pass_transform\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct QcomRenderPassTransformFn {}
+unsafe impl Send for QcomRenderPassTransformFn {}
+unsafe impl Sync for QcomRenderPassTransformFn {}
+impl QcomRenderPassTransformFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_render_pass_transform'"]
+impl RenderPassCreateFlags {
+ pub const TRANSFORM_QCOM: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_QCOM_render_pass_transform'"]
+impl StructureType {
+ pub const COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM: Self =
+ Self(1_000_282_000);
+ pub const RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM: Self = Self(1_000_282_001);
+}
+impl ExtExtension284Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_284\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension284Fn {}
+unsafe impl Send for ExtExtension284Fn {}
+unsafe impl Sync for ExtExtension284Fn {}
+impl ExtExtension284Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtDeviceMemoryReportFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_device_memory_report\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtDeviceMemoryReportFn {}
+unsafe impl Send for ExtDeviceMemoryReportFn {}
+unsafe impl Sync for ExtDeviceMemoryReportFn {}
+impl ExtDeviceMemoryReportFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_device_memory_report'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT: Self = Self(1_000_284_000);
+ pub const DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT: Self = Self(1_000_284_001);
+ pub const DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT: Self = Self(1_000_284_002);
+}
+impl ExtAcquireDrmDisplayFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_acquire_drm_display\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireDrmDisplayEXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ drm_fd: i32,
+ display: DisplayKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDrmDisplayEXT = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ drm_fd: i32,
+ connector_id: u32,
+ display: *mut DisplayKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtAcquireDrmDisplayFn {
+ pub acquire_drm_display_ext: PFN_vkAcquireDrmDisplayEXT,
+ pub get_drm_display_ext: PFN_vkGetDrmDisplayEXT,
+}
+unsafe impl Send for ExtAcquireDrmDisplayFn {}
+unsafe impl Sync for ExtAcquireDrmDisplayFn {}
+impl ExtAcquireDrmDisplayFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ acquire_drm_display_ext: unsafe {
+ unsafe extern "system" fn acquire_drm_display_ext(
+ _physical_device: PhysicalDevice,
+ _drm_fd: i32,
+ _display: DisplayKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_drm_display_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireDrmDisplayEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_drm_display_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_drm_display_ext: unsafe {
+ unsafe extern "system" fn get_drm_display_ext(
+ _physical_device: PhysicalDevice,
+ _drm_fd: i32,
+ _connector_id: u32,
+ _display: *mut DisplayKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_drm_display_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetDrmDisplayEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_drm_display_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl ExtRobustness2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_robustness2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtRobustness2Fn {}
+unsafe impl Send for ExtRobustness2Fn {}
+unsafe impl Sync for ExtRobustness2Fn {}
+impl ExtRobustness2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_robustness2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT: Self = Self(1_000_286_000);
+ pub const PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT: Self = Self(1_000_286_001);
+}
+impl ExtCustomBorderColorFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_custom_border_color\0") }
+ }
+ pub const SPEC_VERSION: u32 = 12u32;
+}
+#[derive(Clone)]
+pub struct ExtCustomBorderColorFn {}
+unsafe impl Send for ExtCustomBorderColorFn {}
+unsafe impl Sync for ExtCustomBorderColorFn {}
+impl ExtCustomBorderColorFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_custom_border_color'"]
+impl BorderColor {
+ pub const FLOAT_CUSTOM_EXT: Self = Self(1_000_287_003);
+ pub const INT_CUSTOM_EXT: Self = Self(1_000_287_004);
+}
+#[doc = "Generated from 'VK_EXT_custom_border_color'"]
+impl StructureType {
+ pub const SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT: Self = Self(1_000_287_000);
+ pub const PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: Self = Self(1_000_287_001);
+ pub const PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: Self = Self(1_000_287_002);
+}
+impl ExtExtension289Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_289\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension289Fn {}
+unsafe impl Send for ExtExtension289Fn {}
+unsafe impl Sync for ExtExtension289Fn {}
+impl ExtExtension289Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_extension_289'"]
+impl Format {
+ pub const ASTC_3X3X3_UNORM_BLOCK_EXT: Self = Self(1_000_288_000);
+ pub const ASTC_3X3X3_SRGB_BLOCK_EXT: Self = Self(1_000_288_001);
+ pub const ASTC_3X3X3_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_002);
+ pub const ASTC_4X3X3_UNORM_BLOCK_EXT: Self = Self(1_000_288_003);
+ pub const ASTC_4X3X3_SRGB_BLOCK_EXT: Self = Self(1_000_288_004);
+ pub const ASTC_4X3X3_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_005);
+ pub const ASTC_4X4X3_UNORM_BLOCK_EXT: Self = Self(1_000_288_006);
+ pub const ASTC_4X4X3_SRGB_BLOCK_EXT: Self = Self(1_000_288_007);
+ pub const ASTC_4X4X3_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_008);
+ pub const ASTC_4X4X4_UNORM_BLOCK_EXT: Self = Self(1_000_288_009);
+ pub const ASTC_4X4X4_SRGB_BLOCK_EXT: Self = Self(1_000_288_010);
+ pub const ASTC_4X4X4_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_011);
+ pub const ASTC_5X4X4_UNORM_BLOCK_EXT: Self = Self(1_000_288_012);
+ pub const ASTC_5X4X4_SRGB_BLOCK_EXT: Self = Self(1_000_288_013);
+ pub const ASTC_5X4X4_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_014);
+ pub const ASTC_5X5X4_UNORM_BLOCK_EXT: Self = Self(1_000_288_015);
+ pub const ASTC_5X5X4_SRGB_BLOCK_EXT: Self = Self(1_000_288_016);
+ pub const ASTC_5X5X4_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_017);
+ pub const ASTC_5X5X5_UNORM_BLOCK_EXT: Self = Self(1_000_288_018);
+ pub const ASTC_5X5X5_SRGB_BLOCK_EXT: Self = Self(1_000_288_019);
+ pub const ASTC_5X5X5_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_020);
+ pub const ASTC_6X5X5_UNORM_BLOCK_EXT: Self = Self(1_000_288_021);
+ pub const ASTC_6X5X5_SRGB_BLOCK_EXT: Self = Self(1_000_288_022);
+ pub const ASTC_6X5X5_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_023);
+ pub const ASTC_6X6X5_UNORM_BLOCK_EXT: Self = Self(1_000_288_024);
+ pub const ASTC_6X6X5_SRGB_BLOCK_EXT: Self = Self(1_000_288_025);
+ pub const ASTC_6X6X5_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_026);
+ pub const ASTC_6X6X6_UNORM_BLOCK_EXT: Self = Self(1_000_288_027);
+ pub const ASTC_6X6X6_SRGB_BLOCK_EXT: Self = Self(1_000_288_028);
+ pub const ASTC_6X6X6_SFLOAT_BLOCK_EXT: Self = Self(1_000_288_029);
+}
+impl GoogleUserTypeFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_user_type\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct GoogleUserTypeFn {}
+unsafe impl Send for GoogleUserTypeFn {}
+unsafe impl Sync for GoogleUserTypeFn {}
+impl GoogleUserTypeFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrPipelineLibraryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_pipeline_library\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrPipelineLibraryFn {}
+unsafe impl Send for KhrPipelineLibraryFn {}
+unsafe impl Sync for KhrPipelineLibraryFn {}
+impl KhrPipelineLibraryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_pipeline_library'"]
+impl PipelineCreateFlags {
+ pub const LIBRARY_KHR: Self = Self(0b1000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_pipeline_library'"]
+impl StructureType {
+ pub const PIPELINE_LIBRARY_CREATE_INFO_KHR: Self = Self(1_000_290_000);
+}
+impl NvExtension292Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_292\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension292Fn {}
+unsafe impl Send for NvExtension292Fn {}
+unsafe impl Sync for NvExtension292Fn {}
+impl NvExtension292Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvPresentBarrierFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_present_barrier\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvPresentBarrierFn {}
+unsafe impl Send for NvPresentBarrierFn {}
+unsafe impl Sync for NvPresentBarrierFn {}
+impl NvPresentBarrierFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_present_barrier'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV: Self = Self(1_000_292_000);
+ pub const SURFACE_CAPABILITIES_PRESENT_BARRIER_NV: Self = Self(1_000_292_001);
+ pub const SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV: Self = Self(1_000_292_002);
+}
+impl KhrShaderNonSemanticInfoFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_shader_non_semantic_info\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderNonSemanticInfoFn {}
+unsafe impl Send for KhrShaderNonSemanticInfoFn {}
+unsafe impl Sync for KhrShaderNonSemanticInfoFn {}
+impl KhrShaderNonSemanticInfoFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrPresentIdFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_present_id\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrPresentIdFn {}
+unsafe impl Send for KhrPresentIdFn {}
+unsafe impl Sync for KhrPresentIdFn {}
+impl KhrPresentIdFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_present_id'"]
+impl StructureType {
+ pub const PRESENT_ID_KHR: Self = Self(1_000_294_000);
+ pub const PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR: Self = Self(1_000_294_001);
+}
+impl ExtPrivateDataFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_private_data\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreatePrivateDataSlot = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const PrivateDataSlotCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_private_data_slot: *mut PrivateDataSlot,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyPrivateDataSlot = unsafe extern "system" fn(
+ device: Device,
+ private_data_slot: PrivateDataSlot,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetPrivateData = unsafe extern "system" fn(
+ device: Device,
+ object_type: ObjectType,
+ object_handle: u64,
+ private_data_slot: PrivateDataSlot,
+ data: u64,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPrivateData = unsafe extern "system" fn(
+ device: Device,
+ object_type: ObjectType,
+ object_handle: u64,
+ private_data_slot: PrivateDataSlot,
+ p_data: *mut u64,
+);
+#[derive(Clone)]
+pub struct ExtPrivateDataFn {
+ pub create_private_data_slot_ext: PFN_vkCreatePrivateDataSlot,
+ pub destroy_private_data_slot_ext: PFN_vkDestroyPrivateDataSlot,
+ pub set_private_data_ext: PFN_vkSetPrivateData,
+ pub get_private_data_ext: PFN_vkGetPrivateData,
+}
+unsafe impl Send for ExtPrivateDataFn {}
+unsafe impl Sync for ExtPrivateDataFn {}
+impl ExtPrivateDataFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_private_data_slot_ext: unsafe {
+ unsafe extern "system" fn create_private_data_slot_ext(
+ _device: Device,
+ _p_create_info: *const PrivateDataSlotCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_private_data_slot: *mut PrivateDataSlot,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_private_data_slot_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreatePrivateDataSlotEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_private_data_slot_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_private_data_slot_ext: unsafe {
+ unsafe extern "system" fn destroy_private_data_slot_ext(
+ _device: Device,
+ _private_data_slot: PrivateDataSlot,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_private_data_slot_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyPrivateDataSlotEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_private_data_slot_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_private_data_ext: unsafe {
+ unsafe extern "system" fn set_private_data_ext(
+ _device: Device,
+ _object_type: ObjectType,
+ _object_handle: u64,
+ _private_data_slot: PrivateDataSlot,
+ _data: u64,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(set_private_data_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSetPrivateDataEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ set_private_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_private_data_ext: unsafe {
+ unsafe extern "system" fn get_private_data_ext(
+ _device: Device,
+ _object_type: ObjectType,
+ _object_handle: u64,
+ _private_data_slot: PrivateDataSlot,
+ _p_data: *mut u64,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(get_private_data_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetPrivateDataEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_private_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_private_data'"]
+impl ObjectType {
+ pub const PRIVATE_DATA_SLOT_EXT: Self = Self::PRIVATE_DATA_SLOT;
+}
+#[doc = "Generated from 'VK_EXT_private_data'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES;
+ pub const DEVICE_PRIVATE_DATA_CREATE_INFO_EXT: Self = Self::DEVICE_PRIVATE_DATA_CREATE_INFO;
+ pub const PRIVATE_DATA_SLOT_CREATE_INFO_EXT: Self = Self::PRIVATE_DATA_SLOT_CREATE_INFO;
+}
+impl KhrExtension297Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_297\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension297Fn {}
+unsafe impl Send for KhrExtension297Fn {}
+unsafe impl Sync for KhrExtension297Fn {}
+impl KhrExtension297Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_extension_297'"]
+impl PipelineShaderStageCreateFlags {
+ pub const RESERVED_3_KHR: Self = Self(0b1000);
+}
+impl ExtPipelineCreationCacheControlFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_pipeline_creation_cache_control\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 3u32;
+}
+#[derive(Clone)]
+pub struct ExtPipelineCreationCacheControlFn {}
+unsafe impl Send for ExtPipelineCreationCacheControlFn {}
+unsafe impl Sync for ExtPipelineCreationCacheControlFn {}
+impl ExtPipelineCreationCacheControlFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"]
+impl PipelineCacheCreateFlags {
+ pub const EXTERNALLY_SYNCHRONIZED_EXT: Self = Self::EXTERNALLY_SYNCHRONIZED;
+}
+#[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"]
+impl PipelineCreateFlags {
+ pub const FAIL_ON_PIPELINE_COMPILE_REQUIRED_EXT: Self = Self::FAIL_ON_PIPELINE_COMPILE_REQUIRED;
+ pub const EARLY_RETURN_ON_FAILURE_EXT: Self = Self::EARLY_RETURN_ON_FAILURE;
+}
+#[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"]
+impl Result {
+ pub const PIPELINE_COMPILE_REQUIRED_EXT: Self = Self::PIPELINE_COMPILE_REQUIRED;
+ pub const ERROR_PIPELINE_COMPILE_REQUIRED_EXT: Self = Self::PIPELINE_COMPILE_REQUIRED;
+}
+#[doc = "Generated from 'VK_EXT_pipeline_creation_cache_control'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES;
+}
+impl KhrExtension299Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_299\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension299Fn {}
+unsafe impl Send for KhrExtension299Fn {}
+unsafe impl Sync for KhrExtension299Fn {}
+impl KhrExtension299Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_extension_299'"]
+impl MemoryHeapFlags {
+ pub const RESERVED_2_KHR: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_KHR_extension_299'"]
+impl PipelineCacheCreateFlags {
+ pub const RESERVED_1_KHR: Self = Self::RESERVED_1_EXT;
+ pub const RESERVED_2_KHR: Self = Self(0b100);
+}
+impl KhrVideoEncodeQueueFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_video_encode_queue\0") }
+ }
+ pub const SPEC_VERSION: u32 = 7u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEncodeVideoKHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_encode_info: *const VideoEncodeInfoKHR,
+);
+#[derive(Clone)]
+pub struct KhrVideoEncodeQueueFn {
+ pub cmd_encode_video_khr: PFN_vkCmdEncodeVideoKHR,
+}
+unsafe impl Send for KhrVideoEncodeQueueFn {}
+unsafe impl Sync for KhrVideoEncodeQueueFn {}
+impl KhrVideoEncodeQueueFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_encode_video_khr: unsafe {
+ unsafe extern "system" fn cmd_encode_video_khr(
+ _command_buffer: CommandBuffer,
+ _p_encode_info: *const VideoEncodeInfoKHR,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_encode_video_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEncodeVideoKHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_encode_video_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl AccessFlags2 {
+ pub const VIDEO_ENCODE_READ_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const VIDEO_ENCODE_WRITE_KHR: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl BufferUsageFlags {
+ pub const VIDEO_ENCODE_DST_KHR: Self = Self(0b1000_0000_0000_0000);
+ pub const VIDEO_ENCODE_SRC_KHR: Self = Self(0b1_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl FormatFeatureFlags {
+ pub const VIDEO_ENCODE_INPUT_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000);
+ pub const VIDEO_ENCODE_DPB_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl FormatFeatureFlags2 {
+ pub const VIDEO_ENCODE_INPUT_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000);
+ pub const VIDEO_ENCODE_DPB_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl ImageLayout {
+ pub const VIDEO_ENCODE_DST_KHR: Self = Self(1_000_299_000);
+ pub const VIDEO_ENCODE_SRC_KHR: Self = Self(1_000_299_001);
+ pub const VIDEO_ENCODE_DPB_KHR: Self = Self(1_000_299_002);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl ImageUsageFlags {
+ pub const VIDEO_ENCODE_DST_KHR: Self = Self(0b10_0000_0000_0000);
+ pub const VIDEO_ENCODE_SRC_KHR: Self = Self(0b100_0000_0000_0000);
+ pub const VIDEO_ENCODE_DPB_KHR: Self = Self(0b1000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl PipelineStageFlags2 {
+ pub const VIDEO_ENCODE_KHR: Self = Self(0b1000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl QueryType {
+ pub const VIDEO_ENCODESTREAM_BUFFER_RANGE_KHR: Self = Self(1_000_299_000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl QueueFlags {
+ pub const VIDEO_ENCODE_KHR: Self = Self(0b100_0000);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl StructureType {
+ pub const VIDEO_ENCODE_INFO_KHR: Self = Self(1_000_299_000);
+ pub const VIDEO_ENCODE_RATE_CONTROL_INFO_KHR: Self = Self(1_000_299_001);
+ pub const VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR: Self = Self(1_000_299_002);
+ pub const VIDEO_ENCODE_CAPABILITIES_KHR: Self = Self(1_000_299_003);
+ pub const VIDEO_ENCODE_USAGE_INFO_KHR: Self = Self(1_000_299_004);
+}
+#[doc = "Generated from 'VK_KHR_video_encode_queue'"]
+impl VideoCodingControlFlagsKHR {
+ pub const ENCODE_RATE_CONTROL: Self = Self(0b10);
+ pub const ENCODE_RATE_CONTROL_LAYER: Self = Self(0b100);
+}
+impl NvDeviceDiagnosticsConfigFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_device_diagnostics_config\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct NvDeviceDiagnosticsConfigFn {}
+unsafe impl Send for NvDeviceDiagnosticsConfigFn {}
+unsafe impl Sync for NvDeviceDiagnosticsConfigFn {}
+impl NvDeviceDiagnosticsConfigFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_device_diagnostics_config'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV: Self = Self(1_000_300_000);
+ pub const DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV: Self = Self(1_000_300_001);
+}
+impl QcomRenderPassStoreOpsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_render_pass_store_ops\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct QcomRenderPassStoreOpsFn {}
+unsafe impl Send for QcomRenderPassStoreOpsFn {}
+unsafe impl Sync for QcomRenderPassStoreOpsFn {}
+impl QcomRenderPassStoreOpsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_render_pass_store_ops'"]
+impl AttachmentStoreOp {
+ pub const NONE_QCOM: Self = Self::NONE;
+}
+impl QcomExtension303Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_303\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension303Fn {}
+unsafe impl Send for QcomExtension303Fn {}
+unsafe impl Sync for QcomExtension303Fn {}
+impl QcomExtension303Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension304Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_304\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension304Fn {}
+unsafe impl Send for QcomExtension304Fn {}
+unsafe impl Sync for QcomExtension304Fn {}
+impl QcomExtension304Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension305Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_305\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension305Fn {}
+unsafe impl Send for QcomExtension305Fn {}
+unsafe impl Sync for QcomExtension305Fn {}
+impl QcomExtension305Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension306Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_306\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension306Fn {}
+unsafe impl Send for QcomExtension306Fn {}
+unsafe impl Sync for QcomExtension306Fn {}
+impl QcomExtension306Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension307Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_307\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension307Fn {}
+unsafe impl Send for QcomExtension307Fn {}
+unsafe impl Sync for QcomExtension307Fn {}
+impl QcomExtension307Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension308Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_308\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension308Fn {}
+unsafe impl Send for NvExtension308Fn {}
+unsafe impl Sync for NvExtension308Fn {}
+impl NvExtension308Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrExtension309Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_309\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension309Fn {}
+unsafe impl Send for KhrExtension309Fn {}
+unsafe impl Sync for KhrExtension309Fn {}
+impl KhrExtension309Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension310Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_310\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension310Fn {}
+unsafe impl Send for QcomExtension310Fn {}
+unsafe impl Sync for QcomExtension310Fn {}
+impl QcomExtension310Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_extension_310'"]
+impl StructureType {
+ pub const RESERVED_QCOM: Self = Self(1_000_309_000);
+}
+impl NvExtension311Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_311\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension311Fn {}
+unsafe impl Send for NvExtension311Fn {}
+unsafe impl Sync for NvExtension311Fn {}
+impl NvExtension311Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtMetalObjectsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_metal_objects\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkExportMetalObjectsEXT =
+ unsafe extern "system" fn(device: Device, p_metal_objects_info: *mut ExportMetalObjectsInfoEXT);
+#[derive(Clone)]
+pub struct ExtMetalObjectsFn {
+ pub export_metal_objects_ext: PFN_vkExportMetalObjectsEXT,
+}
+unsafe impl Send for ExtMetalObjectsFn {}
+unsafe impl Sync for ExtMetalObjectsFn {}
+impl ExtMetalObjectsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ export_metal_objects_ext: unsafe {
+ unsafe extern "system" fn export_metal_objects_ext(
+ _device: Device,
+ _p_metal_objects_info: *mut ExportMetalObjectsInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(export_metal_objects_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkExportMetalObjectsEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ export_metal_objects_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_metal_objects'"]
+impl StructureType {
+ pub const EXPORT_METAL_OBJECT_CREATE_INFO_EXT: Self = Self(1_000_311_000);
+ pub const EXPORT_METAL_OBJECTS_INFO_EXT: Self = Self(1_000_311_001);
+ pub const EXPORT_METAL_DEVICE_INFO_EXT: Self = Self(1_000_311_002);
+ pub const EXPORT_METAL_COMMAND_QUEUE_INFO_EXT: Self = Self(1_000_311_003);
+ pub const EXPORT_METAL_BUFFER_INFO_EXT: Self = Self(1_000_311_004);
+ pub const IMPORT_METAL_BUFFER_INFO_EXT: Self = Self(1_000_311_005);
+ pub const EXPORT_METAL_TEXTURE_INFO_EXT: Self = Self(1_000_311_006);
+ pub const IMPORT_METAL_TEXTURE_INFO_EXT: Self = Self(1_000_311_007);
+ pub const EXPORT_METAL_IO_SURFACE_INFO_EXT: Self = Self(1_000_311_008);
+ pub const IMPORT_METAL_IO_SURFACE_INFO_EXT: Self = Self(1_000_311_009);
+ pub const EXPORT_METAL_SHARED_EVENT_INFO_EXT: Self = Self(1_000_311_010);
+ pub const IMPORT_METAL_SHARED_EVENT_INFO_EXT: Self = Self(1_000_311_011);
+}
+impl ExtExtension313Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_313\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension313Fn {}
+unsafe impl Send for ExtExtension313Fn {}
+unsafe impl Sync for ExtExtension313Fn {}
+impl ExtExtension313Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension314Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_314\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension314Fn {}
+unsafe impl Send for AmdExtension314Fn {}
+unsafe impl Sync for AmdExtension314Fn {}
+impl AmdExtension314Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrSynchronization2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_synchronization2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetEvent2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ event: Event,
+ p_dependency_info: *const DependencyInfo,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdResetEvent2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ event: Event,
+ stage_mask: PipelineStageFlags2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWaitEvents2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ event_count: u32,
+ p_events: *const Event,
+ p_dependency_infos: *const DependencyInfo,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdPipelineBarrier2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_dependency_info: *const DependencyInfo,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteTimestamp2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ stage: PipelineStageFlags2,
+ query_pool: QueryPool,
+ query: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueSubmit2 = unsafe extern "system" fn(
+ queue: Queue,
+ submit_count: u32,
+ p_submits: *const SubmitInfo2,
+ fence: Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteBufferMarker2AMD = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ stage: PipelineStageFlags2,
+ dst_buffer: Buffer,
+ dst_offset: DeviceSize,
+ marker: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetQueueCheckpointData2NV = unsafe extern "system" fn(
+ queue: Queue,
+ p_checkpoint_data_count: *mut u32,
+ p_checkpoint_data: *mut CheckpointData2NV,
+);
+#[derive(Clone)]
+pub struct KhrSynchronization2Fn {
+ pub cmd_set_event2_khr: PFN_vkCmdSetEvent2,
+ pub cmd_reset_event2_khr: PFN_vkCmdResetEvent2,
+ pub cmd_wait_events2_khr: PFN_vkCmdWaitEvents2,
+ pub cmd_pipeline_barrier2_khr: PFN_vkCmdPipelineBarrier2,
+ pub cmd_write_timestamp2_khr: PFN_vkCmdWriteTimestamp2,
+ pub queue_submit2_khr: PFN_vkQueueSubmit2,
+ pub cmd_write_buffer_marker2_amd: PFN_vkCmdWriteBufferMarker2AMD,
+ pub get_queue_checkpoint_data2_nv: PFN_vkGetQueueCheckpointData2NV,
+}
+unsafe impl Send for KhrSynchronization2Fn {}
+unsafe impl Sync for KhrSynchronization2Fn {}
+impl KhrSynchronization2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_event2_khr: unsafe {
+ unsafe extern "system" fn cmd_set_event2_khr(
+ _command_buffer: CommandBuffer,
+ _event: Event,
+ _p_dependency_info: *const DependencyInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_event2_khr)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_event2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_reset_event2_khr: unsafe {
+ unsafe extern "system" fn cmd_reset_event2_khr(
+ _command_buffer: CommandBuffer,
+ _event: Event,
+ _stage_mask: PipelineStageFlags2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_reset_event2_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_reset_event2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_wait_events2_khr: unsafe {
+ unsafe extern "system" fn cmd_wait_events2_khr(
+ _command_buffer: CommandBuffer,
+ _event_count: u32,
+ _p_events: *const Event,
+ _p_dependency_infos: *const DependencyInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_wait_events2_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_wait_events2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_pipeline_barrier2_khr: unsafe {
+ unsafe extern "system" fn cmd_pipeline_barrier2_khr(
+ _command_buffer: CommandBuffer,
+ _p_dependency_info: *const DependencyInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_pipeline_barrier2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_pipeline_barrier2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_timestamp2_khr: unsafe {
+ unsafe extern "system" fn cmd_write_timestamp2_khr(
+ _command_buffer: CommandBuffer,
+ _stage: PipelineStageFlags2,
+ _query_pool: QueryPool,
+ _query: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_write_timestamp2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_timestamp2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_submit2_khr: unsafe {
+ unsafe extern "system" fn queue_submit2_khr(
+ _queue: Queue,
+ _submit_count: u32,
+ _p_submits: *const SubmitInfo2,
+ _fence: Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(queue_submit2_khr)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ queue_submit2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_buffer_marker2_amd: unsafe {
+ unsafe extern "system" fn cmd_write_buffer_marker2_amd(
+ _command_buffer: CommandBuffer,
+ _stage: PipelineStageFlags2,
+ _dst_buffer: Buffer,
+ _dst_offset: DeviceSize,
+ _marker: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_write_buffer_marker2_amd)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdWriteBufferMarker2AMD\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_buffer_marker2_amd
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_queue_checkpoint_data2_nv: unsafe {
+ unsafe extern "system" fn get_queue_checkpoint_data2_nv(
+ _queue: Queue,
+ _p_checkpoint_data_count: *mut u32,
+ _p_checkpoint_data: *mut CheckpointData2NV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_queue_checkpoint_data2_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetQueueCheckpointData2NV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_queue_checkpoint_data2_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl AccessFlags {
+ pub const NONE_KHR: Self = Self::NONE;
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl AccessFlags2 {
+ pub const TRANSFORM_FEEDBACK_WRITE_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+ pub const TRANSFORM_FEEDBACK_COUNTER_READ_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000);
+ pub const TRANSFORM_FEEDBACK_COUNTER_WRITE_EXT: Self =
+ Self(0b1000_0000_0000_0000_0000_0000_0000);
+ #[doc = "read access flag for reading conditional rendering predicate"]
+ pub const CONDITIONAL_RENDERING_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000);
+ pub const COMMAND_PREPROCESS_READ_NV: Self = Self(0b10_0000_0000_0000_0000);
+ pub const COMMAND_PREPROCESS_WRITE_NV: Self = Self(0b100_0000_0000_0000_0000);
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR: Self =
+ Self(0b1000_0000_0000_0000_0000_0000);
+ pub const SHADING_RATE_IMAGE_READ_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_READ_KHR;
+ pub const ACCELERATION_STRUCTURE_READ_KHR: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const ACCELERATION_STRUCTURE_WRITE_KHR: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const ACCELERATION_STRUCTURE_READ_NV: Self = Self::ACCELERATION_STRUCTURE_READ_KHR;
+ pub const ACCELERATION_STRUCTURE_WRITE_NV: Self = Self::ACCELERATION_STRUCTURE_WRITE_KHR;
+ pub const FRAGMENT_DENSITY_MAP_READ_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+ pub const COLOR_ATTACHMENT_READ_NONCOHERENT_EXT: Self = Self(0b1000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl EventCreateFlags {
+ pub const DEVICE_ONLY_KHR: Self = Self::DEVICE_ONLY;
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl ImageLayout {
+ pub const READ_ONLY_OPTIMAL_KHR: Self = Self::READ_ONLY_OPTIMAL;
+ pub const ATTACHMENT_OPTIMAL_KHR: Self = Self::ATTACHMENT_OPTIMAL;
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl PipelineStageFlags {
+ pub const NONE_KHR: Self = Self::NONE;
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl PipelineStageFlags2 {
+ pub const TRANSFORM_FEEDBACK_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+ #[doc = "A pipeline stage for conditional rendering predicate fetch"]
+ pub const CONDITIONAL_RENDERING_EXT: Self = Self(0b100_0000_0000_0000_0000);
+ pub const COMMAND_PREPROCESS_NV: Self = Self(0b10_0000_0000_0000_0000);
+ pub const FRAGMENT_SHADING_RATE_ATTACHMENT_KHR: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const SHADING_RATE_IMAGE_NV: Self = Self::FRAGMENT_SHADING_RATE_ATTACHMENT_KHR;
+ pub const ACCELERATION_STRUCTURE_BUILD_KHR: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+ pub const RAY_TRACING_SHADER_KHR: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const RAY_TRACING_SHADER_NV: Self = Self::RAY_TRACING_SHADER_KHR;
+ pub const ACCELERATION_STRUCTURE_BUILD_NV: Self = Self::ACCELERATION_STRUCTURE_BUILD_KHR;
+ pub const FRAGMENT_DENSITY_PROCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000);
+ pub const TASK_SHADER_NV: Self = Self::TASK_SHADER_EXT;
+ pub const MESH_SHADER_NV: Self = Self::MESH_SHADER_EXT;
+ pub const TASK_SHADER_EXT: Self = Self(0b1000_0000_0000_0000_0000);
+ pub const MESH_SHADER_EXT: Self = Self(0b1_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_synchronization2'"]
+impl StructureType {
+ pub const MEMORY_BARRIER_2_KHR: Self = Self::MEMORY_BARRIER_2;
+ pub const BUFFER_MEMORY_BARRIER_2_KHR: Self = Self::BUFFER_MEMORY_BARRIER_2;
+ pub const IMAGE_MEMORY_BARRIER_2_KHR: Self = Self::IMAGE_MEMORY_BARRIER_2;
+ pub const DEPENDENCY_INFO_KHR: Self = Self::DEPENDENCY_INFO;
+ pub const SUBMIT_INFO_2_KHR: Self = Self::SUBMIT_INFO_2;
+ pub const SEMAPHORE_SUBMIT_INFO_KHR: Self = Self::SEMAPHORE_SUBMIT_INFO;
+ pub const COMMAND_BUFFER_SUBMIT_INFO_KHR: Self = Self::COMMAND_BUFFER_SUBMIT_INFO;
+ pub const PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES;
+ pub const QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV: Self = Self(1_000_314_008);
+ pub const CHECKPOINT_DATA_2_NV: Self = Self(1_000_314_009);
+}
+impl AmdExtension316Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_316\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension316Fn {}
+unsafe impl Send for AmdExtension316Fn {}
+unsafe impl Sync for AmdExtension316Fn {}
+impl AmdExtension316Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtDescriptorBufferFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_descriptor_buffer\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDescriptorSetLayoutSizeEXT = unsafe extern "system" fn(
+ device: Device,
+ layout: DescriptorSetLayout,
+ p_layout_size_in_bytes: *mut DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDescriptorSetLayoutBindingOffsetEXT = unsafe extern "system" fn(
+ device: Device,
+ layout: DescriptorSetLayout,
+ binding: u32,
+ p_offset: *mut DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDescriptorEXT = unsafe extern "system" fn(
+ device: Device,
+ p_descriptor_info: *const DescriptorGetInfoEXT,
+ data_size: usize,
+ p_descriptor: *mut c_void,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindDescriptorBuffersEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer_count: u32,
+ p_binding_infos: *const DescriptorBufferBindingInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDescriptorBufferOffsetsEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_bind_point: PipelineBindPoint,
+ layout: PipelineLayout,
+ first_set: u32,
+ set_count: u32,
+ p_buffer_indices: *const u32,
+ p_offsets: *const DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_bind_point: PipelineBindPoint,
+ layout: PipelineLayout,
+ set: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const BufferCaptureDescriptorDataInfoEXT,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const ImageCaptureDescriptorDataInfoEXT,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const ImageViewCaptureDescriptorDataInfoEXT,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const SamplerCaptureDescriptorDataInfoEXT,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT =
+ unsafe extern "system" fn(
+ device: Device,
+ p_info: *const AccelerationStructureCaptureDescriptorDataInfoEXT,
+ p_data: *mut c_void,
+ ) -> Result;
+#[derive(Clone)]
+pub struct ExtDescriptorBufferFn {
+ pub get_descriptor_set_layout_size_ext: PFN_vkGetDescriptorSetLayoutSizeEXT,
+ pub get_descriptor_set_layout_binding_offset_ext: PFN_vkGetDescriptorSetLayoutBindingOffsetEXT,
+ pub get_descriptor_ext: PFN_vkGetDescriptorEXT,
+ pub cmd_bind_descriptor_buffers_ext: PFN_vkCmdBindDescriptorBuffersEXT,
+ pub cmd_set_descriptor_buffer_offsets_ext: PFN_vkCmdSetDescriptorBufferOffsetsEXT,
+ pub cmd_bind_descriptor_buffer_embedded_samplers_ext:
+ PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT,
+ pub get_buffer_opaque_capture_descriptor_data_ext:
+ PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT,
+ pub get_image_opaque_capture_descriptor_data_ext: PFN_vkGetImageOpaqueCaptureDescriptorDataEXT,
+ pub get_image_view_opaque_capture_descriptor_data_ext:
+ PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT,
+ pub get_sampler_opaque_capture_descriptor_data_ext:
+ PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT,
+ pub get_acceleration_structure_opaque_capture_descriptor_data_ext:
+ PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT,
+}
+unsafe impl Send for ExtDescriptorBufferFn {}
+unsafe impl Sync for ExtDescriptorBufferFn {}
+impl ExtDescriptorBufferFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_descriptor_set_layout_size_ext: unsafe {
+ unsafe extern "system" fn get_descriptor_set_layout_size_ext(
+ _device: Device,
+ _layout: DescriptorSetLayout,
+ _p_layout_size_in_bytes: *mut DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_descriptor_set_layout_size_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDescriptorSetLayoutSizeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_set_layout_size_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_descriptor_set_layout_binding_offset_ext: unsafe {
+ unsafe extern "system" fn get_descriptor_set_layout_binding_offset_ext(
+ _device: Device,
+ _layout: DescriptorSetLayout,
+ _binding: u32,
+ _p_offset: *mut DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_descriptor_set_layout_binding_offset_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDescriptorSetLayoutBindingOffsetEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_set_layout_binding_offset_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_descriptor_ext: unsafe {
+ unsafe extern "system" fn get_descriptor_ext(
+ _device: Device,
+ _p_descriptor_info: *const DescriptorGetInfoEXT,
+ _data_size: usize,
+ _p_descriptor: *mut c_void,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(get_descriptor_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetDescriptorEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_descriptor_buffers_ext: unsafe {
+ unsafe extern "system" fn cmd_bind_descriptor_buffers_ext(
+ _command_buffer: CommandBuffer,
+ _buffer_count: u32,
+ _p_binding_infos: *const DescriptorBufferBindingInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_descriptor_buffers_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindDescriptorBuffersEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_descriptor_buffers_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_descriptor_buffer_offsets_ext: unsafe {
+ unsafe extern "system" fn cmd_set_descriptor_buffer_offsets_ext(
+ _command_buffer: CommandBuffer,
+ _pipeline_bind_point: PipelineBindPoint,
+ _layout: PipelineLayout,
+ _first_set: u32,
+ _set_count: u32,
+ _p_buffer_indices: *const u32,
+ _p_offsets: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_descriptor_buffer_offsets_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDescriptorBufferOffsetsEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_descriptor_buffer_offsets_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_descriptor_buffer_embedded_samplers_ext: unsafe {
+ unsafe extern "system" fn cmd_bind_descriptor_buffer_embedded_samplers_ext(
+ _command_buffer: CommandBuffer,
+ _pipeline_bind_point: PipelineBindPoint,
+ _layout: PipelineLayout,
+ _set: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_descriptor_buffer_embedded_samplers_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindDescriptorBufferEmbeddedSamplersEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_descriptor_buffer_embedded_samplers_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_opaque_capture_descriptor_data_ext: unsafe {
+ unsafe extern "system" fn get_buffer_opaque_capture_descriptor_data_ext(
+ _device: Device,
+ _p_info: *const BufferCaptureDescriptorDataInfoEXT,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_opaque_capture_descriptor_data_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferOpaqueCaptureDescriptorDataEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_opaque_capture_descriptor_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_opaque_capture_descriptor_data_ext: unsafe {
+ unsafe extern "system" fn get_image_opaque_capture_descriptor_data_ext(
+ _device: Device,
+ _p_info: *const ImageCaptureDescriptorDataInfoEXT,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_opaque_capture_descriptor_data_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageOpaqueCaptureDescriptorDataEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_opaque_capture_descriptor_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_view_opaque_capture_descriptor_data_ext: unsafe {
+ unsafe extern "system" fn get_image_view_opaque_capture_descriptor_data_ext(
+ _device: Device,
+ _p_info: *const ImageViewCaptureDescriptorDataInfoEXT,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_view_opaque_capture_descriptor_data_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageViewOpaqueCaptureDescriptorDataEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_view_opaque_capture_descriptor_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_sampler_opaque_capture_descriptor_data_ext: unsafe {
+ unsafe extern "system" fn get_sampler_opaque_capture_descriptor_data_ext(
+ _device: Device,
+ _p_info: *const SamplerCaptureDescriptorDataInfoEXT,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_sampler_opaque_capture_descriptor_data_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSamplerOpaqueCaptureDescriptorDataEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_sampler_opaque_capture_descriptor_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_acceleration_structure_opaque_capture_descriptor_data_ext: unsafe {
+ unsafe extern "system" fn get_acceleration_structure_opaque_capture_descriptor_data_ext(
+ _device: Device,
+ _p_info: *const AccelerationStructureCaptureDescriptorDataInfoEXT,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_acceleration_structure_opaque_capture_descriptor_data_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_acceleration_structure_opaque_capture_descriptor_data_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl AccelerationStructureCreateFlagsKHR {
+ pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl AccessFlags2 {
+ pub const DESCRIPTOR_BUFFER_READ_EXT: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl BufferCreateFlags {
+ pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl BufferUsageFlags {
+ pub const SAMPLER_DESCRIPTOR_BUFFER_EXT: Self = Self(0b10_0000_0000_0000_0000_0000);
+ pub const RESOURCE_DESCRIPTOR_BUFFER_EXT: Self = Self(0b100_0000_0000_0000_0000_0000);
+ pub const PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_EXT: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl DescriptorSetLayoutCreateFlags {
+ pub const DESCRIPTOR_BUFFER_EXT: Self = Self(0b1_0000);
+ pub const EMBEDDED_IMMUTABLE_SAMPLERS_EXT: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl ImageCreateFlags {
+ pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b1_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl ImageViewCreateFlags {
+ pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl PipelineCreateFlags {
+ pub const DESCRIPTOR_BUFFER_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl SamplerCreateFlags {
+ pub const DESCRIPTOR_BUFFER_CAPTURE_REPLAY_EXT: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_EXT_descriptor_buffer'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT: Self = Self(1_000_316_000);
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT: Self =
+ Self(1_000_316_001);
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT: Self = Self(1_000_316_002);
+ pub const DESCRIPTOR_ADDRESS_INFO_EXT: Self = Self(1_000_316_003);
+ pub const DESCRIPTOR_GET_INFO_EXT: Self = Self(1_000_316_004);
+ pub const BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_005);
+ pub const IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_006);
+ pub const IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_007);
+ pub const SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_008);
+ pub const OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT: Self = Self(1_000_316_010);
+ pub const DESCRIPTOR_BUFFER_BINDING_INFO_EXT: Self = Self(1_000_316_011);
+ pub const DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT: Self =
+ Self(1_000_316_012);
+ pub const ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT: Self = Self(1_000_316_009);
+}
+impl AmdExtension318Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_318\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension318Fn {}
+unsafe impl Send for AmdExtension318Fn {}
+unsafe impl Sync for AmdExtension318Fn {}
+impl AmdExtension318Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension319Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_319\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension319Fn {}
+unsafe impl Send for AmdExtension319Fn {}
+unsafe impl Sync for AmdExtension319Fn {}
+impl AmdExtension319Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_extension_319'"]
+impl DescriptorSetLayoutCreateFlags {
+ pub const RESERVED_3_AMD: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_AMD_extension_319'"]
+impl PipelineLayoutCreateFlags {
+ pub const RESERVED_0_AMD: Self = Self(0b1);
+}
+impl AmdExtension320Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_320\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension320Fn {}
+unsafe impl Send for AmdExtension320Fn {}
+unsafe impl Sync for AmdExtension320Fn {}
+impl AmdExtension320Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtGraphicsPipelineLibraryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_graphics_pipeline_library\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtGraphicsPipelineLibraryFn {}
+unsafe impl Send for ExtGraphicsPipelineLibraryFn {}
+unsafe impl Sync for ExtGraphicsPipelineLibraryFn {}
+impl ExtGraphicsPipelineLibraryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_graphics_pipeline_library'"]
+impl PipelineCreateFlags {
+ pub const RETAIN_LINK_TIME_OPTIMIZATION_INFO_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000);
+ pub const LINK_TIME_OPTIMIZATION_EXT: Self = Self(0b100_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_graphics_pipeline_library'"]
+impl PipelineLayoutCreateFlags {
+ pub const INDEPENDENT_SETS_EXT: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_EXT_graphics_pipeline_library'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT: Self = Self(1_000_320_000);
+ pub const PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT: Self = Self(1_000_320_001);
+ pub const GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT: Self = Self(1_000_320_002);
+}
+impl AmdShaderEarlyAndLateFragmentTestsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_AMD_shader_early_and_late_fragment_tests\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct AmdShaderEarlyAndLateFragmentTestsFn {}
+unsafe impl Send for AmdShaderEarlyAndLateFragmentTestsFn {}
+unsafe impl Sync for AmdShaderEarlyAndLateFragmentTestsFn {}
+impl AmdShaderEarlyAndLateFragmentTestsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_AMD_shader_early_and_late_fragment_tests'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD: Self =
+ Self(1_000_321_000);
+}
+impl KhrFragmentShaderBarycentricFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_fragment_shader_barycentric\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrFragmentShaderBarycentricFn {}
+unsafe impl Send for KhrFragmentShaderBarycentricFn {}
+unsafe impl Sync for KhrFragmentShaderBarycentricFn {}
+impl KhrFragmentShaderBarycentricFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_fragment_shader_barycentric'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR: Self = Self(1_000_203_000);
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR: Self =
+ Self(1_000_322_000);
+}
+impl KhrShaderSubgroupUniformControlFlowFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_shader_subgroup_uniform_control_flow\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrShaderSubgroupUniformControlFlowFn {}
+unsafe impl Send for KhrShaderSubgroupUniformControlFlowFn {}
+unsafe impl Sync for KhrShaderSubgroupUniformControlFlowFn {}
+impl KhrShaderSubgroupUniformControlFlowFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_shader_subgroup_uniform_control_flow'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR: Self =
+ Self(1_000_323_000);
+}
+impl KhrExtension325Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_325\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension325Fn {}
+unsafe impl Send for KhrExtension325Fn {}
+unsafe impl Sync for KhrExtension325Fn {}
+impl KhrExtension325Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrZeroInitializeWorkgroupMemoryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_zero_initialize_workgroup_memory\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrZeroInitializeWorkgroupMemoryFn {}
+unsafe impl Send for KhrZeroInitializeWorkgroupMemoryFn {}
+unsafe impl Sync for KhrZeroInitializeWorkgroupMemoryFn {}
+impl KhrZeroInitializeWorkgroupMemoryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_zero_initialize_workgroup_memory'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES;
+}
+impl NvFragmentShadingRateEnumsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_fragment_shading_rate_enums\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetFragmentShadingRateEnumNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ shading_rate: FragmentShadingRateNV,
+ combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2],
+);
+#[derive(Clone)]
+pub struct NvFragmentShadingRateEnumsFn {
+ pub cmd_set_fragment_shading_rate_enum_nv: PFN_vkCmdSetFragmentShadingRateEnumNV,
+}
+unsafe impl Send for NvFragmentShadingRateEnumsFn {}
+unsafe impl Sync for NvFragmentShadingRateEnumsFn {}
+impl NvFragmentShadingRateEnumsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_fragment_shading_rate_enum_nv: unsafe {
+ unsafe extern "system" fn cmd_set_fragment_shading_rate_enum_nv(
+ _command_buffer: CommandBuffer,
+ _shading_rate: FragmentShadingRateNV,
+ _combiner_ops: *const [FragmentShadingRateCombinerOpKHR; 2],
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_fragment_shading_rate_enum_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetFragmentShadingRateEnumNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_fragment_shading_rate_enum_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_fragment_shading_rate_enums'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV: Self = Self(1_000_326_000);
+ pub const PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV: Self = Self(1_000_326_001);
+ pub const PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV: Self = Self(1_000_326_002);
+}
+impl NvRayTracingMotionBlurFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_ray_tracing_motion_blur\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvRayTracingMotionBlurFn {}
+unsafe impl Send for NvRayTracingMotionBlurFn {}
+unsafe impl Sync for NvRayTracingMotionBlurFn {}
+impl NvRayTracingMotionBlurFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"]
+impl AccelerationStructureCreateFlagsKHR {
+ pub const MOTION_NV: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"]
+impl BuildAccelerationStructureFlagsKHR {
+ pub const MOTION_NV: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"]
+impl PipelineCreateFlags {
+ pub const RAY_TRACING_ALLOW_MOTION_NV: Self = Self(0b1_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_ray_tracing_motion_blur'"]
+impl StructureType {
+ pub const ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV: Self = Self(1_000_327_000);
+ pub const PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV: Self = Self(1_000_327_001);
+ pub const ACCELERATION_STRUCTURE_MOTION_INFO_NV: Self = Self(1_000_327_002);
+}
+impl ExtMeshShaderFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_mesh_shader\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMeshTasksEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ group_count_x: u32,
+ group_count_y: u32,
+ group_count_z: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMeshTasksIndirectEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ draw_count: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMeshTasksIndirectCountEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ count_buffer: Buffer,
+ count_buffer_offset: DeviceSize,
+ max_draw_count: u32,
+ stride: u32,
+);
+#[derive(Clone)]
+pub struct ExtMeshShaderFn {
+ pub cmd_draw_mesh_tasks_ext: PFN_vkCmdDrawMeshTasksEXT,
+ pub cmd_draw_mesh_tasks_indirect_ext: PFN_vkCmdDrawMeshTasksIndirectEXT,
+ pub cmd_draw_mesh_tasks_indirect_count_ext: PFN_vkCmdDrawMeshTasksIndirectCountEXT,
+}
+unsafe impl Send for ExtMeshShaderFn {}
+unsafe impl Sync for ExtMeshShaderFn {}
+impl ExtMeshShaderFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_draw_mesh_tasks_ext: unsafe {
+ unsafe extern "system" fn cmd_draw_mesh_tasks_ext(
+ _command_buffer: CommandBuffer,
+ _group_count_x: u32,
+ _group_count_y: u32,
+ _group_count_z: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_mesh_tasks_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMeshTasksEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_mesh_tasks_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_mesh_tasks_indirect_ext: unsafe {
+ unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_ext(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_mesh_tasks_indirect_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawMeshTasksIndirectEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_mesh_tasks_indirect_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_mesh_tasks_indirect_count_ext: unsafe {
+ unsafe extern "system" fn cmd_draw_mesh_tasks_indirect_count_ext(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_mesh_tasks_indirect_count_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawMeshTasksIndirectCountEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_mesh_tasks_indirect_count_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_mesh_shader'"]
+impl IndirectCommandsTokenTypeNV {
+ pub const DRAW_MESH_TASKS: Self = Self(1_000_328_000);
+}
+#[doc = "Generated from 'VK_EXT_mesh_shader'"]
+impl PipelineStageFlags {
+ pub const TASK_SHADER_EXT: Self = Self(0b1000_0000_0000_0000_0000);
+ pub const MESH_SHADER_EXT: Self = Self(0b1_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_mesh_shader'"]
+impl QueryPipelineStatisticFlags {
+ pub const TASK_SHADER_INVOCATIONS_EXT: Self = Self(0b1000_0000_0000);
+ pub const MESH_SHADER_INVOCATIONS_EXT: Self = Self(0b1_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_mesh_shader'"]
+impl QueryType {
+ pub const MESH_PRIMITIVES_GENERATED_EXT: Self = Self(1_000_328_000);
+}
+#[doc = "Generated from 'VK_EXT_mesh_shader'"]
+impl ShaderStageFlags {
+ pub const TASK_EXT: Self = Self(0b100_0000);
+ pub const MESH_EXT: Self = Self(0b1000_0000);
+}
+#[doc = "Generated from 'VK_EXT_mesh_shader'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT: Self = Self(1_000_328_000);
+ pub const PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT: Self = Self(1_000_328_001);
+}
+impl NvExtension330Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_330\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension330Fn {}
+unsafe impl Send for NvExtension330Fn {}
+unsafe impl Sync for NvExtension330Fn {}
+impl NvExtension330Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtYcbcr2plane444FormatsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_ycbcr_2plane_444_formats\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtYcbcr2plane444FormatsFn {}
+unsafe impl Send for ExtYcbcr2plane444FormatsFn {}
+unsafe impl Sync for ExtYcbcr2plane444FormatsFn {}
+impl ExtYcbcr2plane444FormatsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_ycbcr_2plane_444_formats'"]
+impl Format {
+ pub const G8_B8R8_2PLANE_444_UNORM_EXT: Self = Self::G8_B8R8_2PLANE_444_UNORM;
+ pub const G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT: Self =
+ Self::G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16;
+ pub const G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT: Self =
+ Self::G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16;
+ pub const G16_B16R16_2PLANE_444_UNORM_EXT: Self = Self::G16_B16R16_2PLANE_444_UNORM;
+}
+#[doc = "Generated from 'VK_EXT_ycbcr_2plane_444_formats'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT: Self = Self(1_000_330_000);
+}
+impl NvExtension332Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_332\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension332Fn {}
+unsafe impl Send for NvExtension332Fn {}
+unsafe impl Sync for NvExtension332Fn {}
+impl NvExtension332Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtFragmentDensityMap2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_fragment_density_map2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtFragmentDensityMap2Fn {}
+unsafe impl Send for ExtFragmentDensityMap2Fn {}
+unsafe impl Sync for ExtFragmentDensityMap2Fn {}
+impl ExtFragmentDensityMap2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map2'"]
+impl ImageViewCreateFlags {
+ pub const FRAGMENT_DENSITY_MAP_DEFERRED_EXT: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_EXT_fragment_density_map2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT: Self = Self(1_000_332_000);
+ pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT: Self = Self(1_000_332_001);
+}
+impl QcomRotatedCopyCommandsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_rotated_copy_commands\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct QcomRotatedCopyCommandsFn {}
+unsafe impl Send for QcomRotatedCopyCommandsFn {}
+unsafe impl Sync for QcomRotatedCopyCommandsFn {}
+impl QcomRotatedCopyCommandsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_rotated_copy_commands'"]
+impl StructureType {
+ pub const COPY_COMMAND_TRANSFORM_INFO_QCOM: Self = Self(1_000_333_000);
+}
+impl KhrExtension335Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_335\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension335Fn {}
+unsafe impl Send for KhrExtension335Fn {}
+unsafe impl Sync for KhrExtension335Fn {}
+impl KhrExtension335Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtImageRobustnessFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_robustness\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtImageRobustnessFn {}
+unsafe impl Send for ExtImageRobustnessFn {}
+unsafe impl Sync for ExtImageRobustnessFn {}
+impl ExtImageRobustnessFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_image_robustness'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES;
+}
+impl KhrWorkgroupMemoryExplicitLayoutFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_KHR_workgroup_memory_explicit_layout\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrWorkgroupMemoryExplicitLayoutFn {}
+unsafe impl Send for KhrWorkgroupMemoryExplicitLayoutFn {}
+unsafe impl Sync for KhrWorkgroupMemoryExplicitLayoutFn {}
+impl KhrWorkgroupMemoryExplicitLayoutFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_workgroup_memory_explicit_layout'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR: Self =
+ Self(1_000_336_000);
+}
+impl KhrCopyCommands2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_copy_commands2\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyBuffer2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_copy_buffer_info: *const CopyBufferInfo2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyImage2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_copy_image_info: *const CopyImageInfo2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyBufferToImage2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyImageToBuffer2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBlitImage2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_blit_image_info: *const BlitImageInfo2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdResolveImage2 = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_resolve_image_info: *const ResolveImageInfo2,
+);
+#[derive(Clone)]
+pub struct KhrCopyCommands2Fn {
+ pub cmd_copy_buffer2_khr: PFN_vkCmdCopyBuffer2,
+ pub cmd_copy_image2_khr: PFN_vkCmdCopyImage2,
+ pub cmd_copy_buffer_to_image2_khr: PFN_vkCmdCopyBufferToImage2,
+ pub cmd_copy_image_to_buffer2_khr: PFN_vkCmdCopyImageToBuffer2,
+ pub cmd_blit_image2_khr: PFN_vkCmdBlitImage2,
+ pub cmd_resolve_image2_khr: PFN_vkCmdResolveImage2,
+}
+unsafe impl Send for KhrCopyCommands2Fn {}
+unsafe impl Sync for KhrCopyCommands2Fn {}
+impl KhrCopyCommands2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_copy_buffer2_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_buffer2_khr(
+ _command_buffer: CommandBuffer,
+ _p_copy_buffer_info: *const CopyBufferInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer2_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBuffer2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_buffer2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_image2_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_image2_khr(
+ _command_buffer: CommandBuffer,
+ _p_copy_image_info: *const CopyImageInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_copy_image2_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImage2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_image2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_buffer_to_image2_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_buffer_to_image2_khr(
+ _command_buffer: CommandBuffer,
+ _p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_buffer_to_image2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyBufferToImage2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_buffer_to_image2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_image_to_buffer2_khr: unsafe {
+ unsafe extern "system" fn cmd_copy_image_to_buffer2_khr(
+ _command_buffer: CommandBuffer,
+ _p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_image_to_buffer2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyImageToBuffer2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_image_to_buffer2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_blit_image2_khr: unsafe {
+ unsafe extern "system" fn cmd_blit_image2_khr(
+ _command_buffer: CommandBuffer,
+ _p_blit_image_info: *const BlitImageInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_blit_image2_khr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBlitImage2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_blit_image2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_resolve_image2_khr: unsafe {
+ unsafe extern "system" fn cmd_resolve_image2_khr(
+ _command_buffer: CommandBuffer,
+ _p_resolve_image_info: *const ResolveImageInfo2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_resolve_image2_khr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResolveImage2KHR\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_resolve_image2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_copy_commands2'"]
+impl StructureType {
+ pub const COPY_BUFFER_INFO_2_KHR: Self = Self::COPY_BUFFER_INFO_2;
+ pub const COPY_IMAGE_INFO_2_KHR: Self = Self::COPY_IMAGE_INFO_2;
+ pub const COPY_BUFFER_TO_IMAGE_INFO_2_KHR: Self = Self::COPY_BUFFER_TO_IMAGE_INFO_2;
+ pub const COPY_IMAGE_TO_BUFFER_INFO_2_KHR: Self = Self::COPY_IMAGE_TO_BUFFER_INFO_2;
+ pub const BLIT_IMAGE_INFO_2_KHR: Self = Self::BLIT_IMAGE_INFO_2;
+ pub const RESOLVE_IMAGE_INFO_2_KHR: Self = Self::RESOLVE_IMAGE_INFO_2;
+ pub const BUFFER_COPY_2_KHR: Self = Self::BUFFER_COPY_2;
+ pub const IMAGE_COPY_2_KHR: Self = Self::IMAGE_COPY_2;
+ pub const IMAGE_BLIT_2_KHR: Self = Self::IMAGE_BLIT_2;
+ pub const BUFFER_IMAGE_COPY_2_KHR: Self = Self::BUFFER_IMAGE_COPY_2;
+ pub const IMAGE_RESOLVE_2_KHR: Self = Self::IMAGE_RESOLVE_2;
+}
+impl ExtImageCompressionControlFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_compression_control\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageSubresourceLayout2EXT = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ p_subresource: *const ImageSubresource2EXT,
+ p_layout: *mut SubresourceLayout2EXT,
+);
+#[derive(Clone)]
+pub struct ExtImageCompressionControlFn {
+ pub get_image_subresource_layout2_ext: PFN_vkGetImageSubresourceLayout2EXT,
+}
+unsafe impl Send for ExtImageCompressionControlFn {}
+unsafe impl Sync for ExtImageCompressionControlFn {}
+impl ExtImageCompressionControlFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_image_subresource_layout2_ext: unsafe {
+ unsafe extern "system" fn get_image_subresource_layout2_ext(
+ _device: Device,
+ _image: Image,
+ _p_subresource: *const ImageSubresource2EXT,
+ _p_layout: *mut SubresourceLayout2EXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_subresource_layout2_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageSubresourceLayout2EXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_subresource_layout2_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_image_compression_control'"]
+impl Result {
+ pub const ERROR_COMPRESSION_EXHAUSTED_EXT: Self = Self(-1_000_338_000);
+}
+#[doc = "Generated from 'VK_EXT_image_compression_control'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT: Self = Self(1_000_338_000);
+ pub const IMAGE_COMPRESSION_CONTROL_EXT: Self = Self(1_000_338_001);
+ pub const SUBRESOURCE_LAYOUT_2_EXT: Self = Self(1_000_338_002);
+ pub const IMAGE_SUBRESOURCE_2_EXT: Self = Self(1_000_338_003);
+ pub const IMAGE_COMPRESSION_PROPERTIES_EXT: Self = Self(1_000_338_004);
+}
+impl ExtAttachmentFeedbackLoopLayoutFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_attachment_feedback_loop_layout\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtAttachmentFeedbackLoopLayoutFn {}
+unsafe impl Send for ExtAttachmentFeedbackLoopLayoutFn {}
+unsafe impl Sync for ExtAttachmentFeedbackLoopLayoutFn {}
+impl ExtAttachmentFeedbackLoopLayoutFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"]
+impl DependencyFlags {
+ #[doc = "Dependency may be a feedback loop"]
+ pub const FEEDBACK_LOOP_EXT: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"]
+impl ImageLayout {
+ pub const ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT: Self = Self(1_000_339_000);
+}
+#[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"]
+impl ImageUsageFlags {
+ pub const ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b1000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"]
+impl PipelineCreateFlags {
+ pub const COLOR_ATTACHMENT_FEEDBACK_LOOP_EXT: Self = Self(0b10_0000_0000_0000_0000_0000_0000);
+ pub const DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_EXT: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_attachment_feedback_loop_layout'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT: Self =
+ Self(1_000_339_000);
+}
+impl Ext4444FormatsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_4444_formats\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct Ext4444FormatsFn {}
+unsafe impl Send for Ext4444FormatsFn {}
+unsafe impl Sync for Ext4444FormatsFn {}
+impl Ext4444FormatsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_4444_formats'"]
+impl Format {
+ pub const A4R4G4B4_UNORM_PACK16_EXT: Self = Self::A4R4G4B4_UNORM_PACK16;
+ pub const A4B4G4R4_UNORM_PACK16_EXT: Self = Self::A4B4G4R4_UNORM_PACK16;
+}
+#[doc = "Generated from 'VK_EXT_4444_formats'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: Self = Self(1_000_340_000);
+}
+impl ExtDeviceFaultFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_device_fault\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceFaultInfoEXT = unsafe extern "system" fn(
+ device: Device,
+ p_fault_counts: *mut DeviceFaultCountsEXT,
+ p_fault_info: *mut DeviceFaultInfoEXT,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtDeviceFaultFn {
+ pub get_device_fault_info_ext: PFN_vkGetDeviceFaultInfoEXT,
+}
+unsafe impl Send for ExtDeviceFaultFn {}
+unsafe impl Sync for ExtDeviceFaultFn {}
+impl ExtDeviceFaultFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_device_fault_info_ext: unsafe {
+ unsafe extern "system" fn get_device_fault_info_ext(
+ _device: Device,
+ _p_fault_counts: *mut DeviceFaultCountsEXT,
+ _p_fault_info: *mut DeviceFaultInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_fault_info_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceFaultInfoEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_fault_info_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_device_fault'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FAULT_FEATURES_EXT: Self = Self(1_000_341_000);
+ pub const DEVICE_FAULT_COUNTS_EXT: Self = Self(1_000_341_001);
+ pub const DEVICE_FAULT_INFO_EXT: Self = Self(1_000_341_002);
+}
+impl ArmRasterizationOrderAttachmentAccessFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_ARM_rasterization_order_attachment_access\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ArmRasterizationOrderAttachmentAccessFn {}
+unsafe impl Send for ArmRasterizationOrderAttachmentAccessFn {}
+unsafe impl Sync for ArmRasterizationOrderAttachmentAccessFn {}
+impl ArmRasterizationOrderAttachmentAccessFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"]
+impl PipelineColorBlendStateCreateFlags {
+ pub const RASTERIZATION_ORDER_ATTACHMENT_ACCESS_ARM: Self =
+ Self::RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT;
+}
+#[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"]
+impl PipelineDepthStencilStateCreateFlags {
+ pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_ARM: Self =
+ Self::RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT;
+ pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_ARM: Self =
+ Self::RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT;
+}
+#[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM: Self =
+ Self::PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT;
+}
+#[doc = "Generated from 'VK_ARM_rasterization_order_attachment_access'"]
+impl SubpassDescriptionFlags {
+ pub const RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_ARM: Self =
+ Self::RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT;
+ pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_ARM: Self =
+ Self::RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT;
+ pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_ARM: Self =
+ Self::RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT;
+}
+impl ArmExtension344Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_extension_344\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ArmExtension344Fn {}
+unsafe impl Send for ArmExtension344Fn {}
+unsafe impl Sync for ArmExtension344Fn {}
+impl ArmExtension344Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtRgba10x6FormatsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_rgba10x6_formats\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtRgba10x6FormatsFn {}
+unsafe impl Send for ExtRgba10x6FormatsFn {}
+unsafe impl Sync for ExtRgba10x6FormatsFn {}
+impl ExtRgba10x6FormatsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_rgba10x6_formats'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT: Self = Self(1_000_344_000);
+}
+impl NvAcquireWinrtDisplayFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_acquire_winrt_display\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkAcquireWinrtDisplayNV =
+ unsafe extern "system" fn(physical_device: PhysicalDevice, display: DisplayKHR) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetWinrtDisplayNV = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ device_relative_id: u32,
+ p_display: *mut DisplayKHR,
+) -> Result;
+#[derive(Clone)]
+pub struct NvAcquireWinrtDisplayFn {
+ pub acquire_winrt_display_nv: PFN_vkAcquireWinrtDisplayNV,
+ pub get_winrt_display_nv: PFN_vkGetWinrtDisplayNV,
+}
+unsafe impl Send for NvAcquireWinrtDisplayFn {}
+unsafe impl Sync for NvAcquireWinrtDisplayFn {}
+impl NvAcquireWinrtDisplayFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ acquire_winrt_display_nv: unsafe {
+ unsafe extern "system" fn acquire_winrt_display_nv(
+ _physical_device: PhysicalDevice,
+ _display: DisplayKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(acquire_winrt_display_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAcquireWinrtDisplayNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ acquire_winrt_display_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_winrt_display_nv: unsafe {
+ unsafe extern "system" fn get_winrt_display_nv(
+ _physical_device: PhysicalDevice,
+ _device_relative_id: u32,
+ _p_display: *mut DisplayKHR,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_winrt_display_nv)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetWinrtDisplayNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_winrt_display_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+impl ExtDirectfbSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_directfb_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDirectFBSurfaceEXT = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const DirectFBSurfaceCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT =
+ unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ dfb: *mut IDirectFB,
+ ) -> Bool32;
+#[derive(Clone)]
+pub struct ExtDirectfbSurfaceFn {
+ pub create_direct_fb_surface_ext: PFN_vkCreateDirectFBSurfaceEXT,
+ pub get_physical_device_direct_fb_presentation_support_ext:
+ PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT,
+}
+unsafe impl Send for ExtDirectfbSurfaceFn {}
+unsafe impl Sync for ExtDirectfbSurfaceFn {}
+impl ExtDirectfbSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_direct_fb_surface_ext: unsafe {
+ unsafe extern "system" fn create_direct_fb_surface_ext(
+ _instance: Instance,
+ _p_create_info: *const DirectFBSurfaceCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_direct_fb_surface_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDirectFBSurfaceEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_direct_fb_surface_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_direct_fb_presentation_support_ext: unsafe {
+ unsafe extern "system" fn get_physical_device_direct_fb_presentation_support_ext(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _dfb: *mut IDirectFB,
+ ) -> Bool32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_direct_fb_presentation_support_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceDirectFBPresentationSupportEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_direct_fb_presentation_support_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_directfb_surface'"]
+impl StructureType {
+ pub const DIRECTFB_SURFACE_CREATE_INFO_EXT: Self = Self(1_000_346_000);
+}
+impl KhrExtension350Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_350\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension350Fn {}
+unsafe impl Send for KhrExtension350Fn {}
+unsafe impl Sync for KhrExtension350Fn {}
+impl KhrExtension350Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension351Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_351\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension351Fn {}
+unsafe impl Send for NvExtension351Fn {}
+unsafe impl Sync for NvExtension351Fn {}
+impl NvExtension351Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ValveMutableDescriptorTypeFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_VALVE_mutable_descriptor_type\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ValveMutableDescriptorTypeFn {}
+unsafe impl Send for ValveMutableDescriptorTypeFn {}
+unsafe impl Sync for ValveMutableDescriptorTypeFn {}
+impl ValveMutableDescriptorTypeFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"]
+impl DescriptorPoolCreateFlags {
+ pub const HOST_ONLY_VALVE: Self = Self::HOST_ONLY_EXT;
+}
+#[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"]
+impl DescriptorSetLayoutCreateFlags {
+ pub const HOST_ONLY_POOL_VALVE: Self = Self::HOST_ONLY_POOL_EXT;
+}
+#[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"]
+impl DescriptorType {
+ pub const MUTABLE_VALVE: Self = Self::MUTABLE_EXT;
+}
+#[doc = "Generated from 'VK_VALVE_mutable_descriptor_type'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE: Self =
+ Self::PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT;
+ pub const MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE: Self =
+ Self::MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT;
+}
+impl ExtVertexInputDynamicStateFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_vertex_input_dynamic_state\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetVertexInputEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ vertex_binding_description_count: u32,
+ p_vertex_binding_descriptions: *const VertexInputBindingDescription2EXT,
+ vertex_attribute_description_count: u32,
+ p_vertex_attribute_descriptions: *const VertexInputAttributeDescription2EXT,
+);
+#[derive(Clone)]
+pub struct ExtVertexInputDynamicStateFn {
+ pub cmd_set_vertex_input_ext: PFN_vkCmdSetVertexInputEXT,
+}
+unsafe impl Send for ExtVertexInputDynamicStateFn {}
+unsafe impl Sync for ExtVertexInputDynamicStateFn {}
+impl ExtVertexInputDynamicStateFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_vertex_input_ext: unsafe {
+ unsafe extern "system" fn cmd_set_vertex_input_ext(
+ _command_buffer: CommandBuffer,
+ _vertex_binding_description_count: u32,
+ _p_vertex_binding_descriptions: *const VertexInputBindingDescription2EXT,
+ _vertex_attribute_description_count: u32,
+ _p_vertex_attribute_descriptions: *const VertexInputAttributeDescription2EXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_vertex_input_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetVertexInputEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_vertex_input_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_vertex_input_dynamic_state'"]
+impl DynamicState {
+ pub const VERTEX_INPUT_EXT: Self = Self(1_000_352_000);
+}
+#[doc = "Generated from 'VK_EXT_vertex_input_dynamic_state'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT: Self = Self(1_000_352_000);
+ pub const VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT: Self = Self(1_000_352_001);
+ pub const VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT: Self = Self(1_000_352_002);
+}
+impl ExtPhysicalDeviceDrmFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_physical_device_drm\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPhysicalDeviceDrmFn {}
+unsafe impl Send for ExtPhysicalDeviceDrmFn {}
+unsafe impl Sync for ExtPhysicalDeviceDrmFn {}
+impl ExtPhysicalDeviceDrmFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_physical_device_drm'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DRM_PROPERTIES_EXT: Self = Self(1_000_353_000);
+}
+impl ExtDeviceAddressBindingReportFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_device_address_binding_report\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtDeviceAddressBindingReportFn {}
+unsafe impl Send for ExtDeviceAddressBindingReportFn {}
+unsafe impl Sync for ExtDeviceAddressBindingReportFn {}
+impl ExtDeviceAddressBindingReportFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_device_address_binding_report'"]
+impl DebugUtilsMessageTypeFlagsEXT {
+ pub const DEVICE_ADDRESS_BINDING: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_EXT_device_address_binding_report'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT: Self = Self(1_000_354_000);
+ pub const DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT: Self = Self(1_000_354_001);
+}
+impl ExtDepthClipControlFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_clip_control\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtDepthClipControlFn {}
+unsafe impl Send for ExtDepthClipControlFn {}
+unsafe impl Sync for ExtDepthClipControlFn {}
+impl ExtDepthClipControlFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_depth_clip_control'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT: Self = Self(1_000_355_000);
+ pub const PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT: Self = Self(1_000_355_001);
+}
+impl ExtPrimitiveTopologyListRestartFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_primitive_topology_list_restart\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPrimitiveTopologyListRestartFn {}
+unsafe impl Send for ExtPrimitiveTopologyListRestartFn {}
+unsafe impl Sync for ExtPrimitiveTopologyListRestartFn {}
+impl ExtPrimitiveTopologyListRestartFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_primitive_topology_list_restart'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: Self =
+ Self(1_000_356_000);
+}
+impl KhrExtension358Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_358\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension358Fn {}
+unsafe impl Send for KhrExtension358Fn {}
+unsafe impl Sync for KhrExtension358Fn {}
+impl KhrExtension358Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension359Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_359\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension359Fn {}
+unsafe impl Send for ExtExtension359Fn {}
+unsafe impl Sync for ExtExtension359Fn {}
+impl ExtExtension359Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension360Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_360\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension360Fn {}
+unsafe impl Send for ExtExtension360Fn {}
+unsafe impl Sync for ExtExtension360Fn {}
+impl ExtExtension360Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrFormatFeatureFlags2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_format_feature_flags2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct KhrFormatFeatureFlags2Fn {}
+unsafe impl Send for KhrFormatFeatureFlags2Fn {}
+unsafe impl Sync for KhrFormatFeatureFlags2Fn {}
+impl KhrFormatFeatureFlags2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_format_feature_flags2'"]
+impl StructureType {
+ pub const FORMAT_PROPERTIES_3_KHR: Self = Self::FORMAT_PROPERTIES_3;
+}
+impl ExtExtension362Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_362\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension362Fn {}
+unsafe impl Send for ExtExtension362Fn {}
+unsafe impl Sync for ExtExtension362Fn {}
+impl ExtExtension362Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension363Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_363\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension363Fn {}
+unsafe impl Send for ExtExtension363Fn {}
+unsafe impl Sync for ExtExtension363Fn {}
+impl ExtExtension363Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl FuchsiaExtension364Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_extension_364\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct FuchsiaExtension364Fn {}
+unsafe impl Send for FuchsiaExtension364Fn {}
+unsafe impl Sync for FuchsiaExtension364Fn {}
+impl FuchsiaExtension364Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl FuchsiaExternalMemoryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_external_memory\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryZirconHandleFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ p_get_zircon_handle_info: *const MemoryGetZirconHandleInfoFUCHSIA,
+ p_zircon_handle: *mut zx_handle_t,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ handle_type: ExternalMemoryHandleTypeFlags,
+ zircon_handle: zx_handle_t,
+ p_memory_zircon_handle_properties: *mut MemoryZirconHandlePropertiesFUCHSIA,
+) -> Result;
+#[derive(Clone)]
+pub struct FuchsiaExternalMemoryFn {
+ pub get_memory_zircon_handle_fuchsia: PFN_vkGetMemoryZirconHandleFUCHSIA,
+ pub get_memory_zircon_handle_properties_fuchsia: PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA,
+}
+unsafe impl Send for FuchsiaExternalMemoryFn {}
+unsafe impl Sync for FuchsiaExternalMemoryFn {}
+impl FuchsiaExternalMemoryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_memory_zircon_handle_fuchsia: unsafe {
+ unsafe extern "system" fn get_memory_zircon_handle_fuchsia(
+ _device: Device,
+ _p_get_zircon_handle_info: *const MemoryGetZirconHandleInfoFUCHSIA,
+ _p_zircon_handle: *mut zx_handle_t,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_zircon_handle_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryZirconHandleFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_zircon_handle_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_memory_zircon_handle_properties_fuchsia: unsafe {
+ unsafe extern "system" fn get_memory_zircon_handle_properties_fuchsia(
+ _device: Device,
+ _handle_type: ExternalMemoryHandleTypeFlags,
+ _zircon_handle: zx_handle_t,
+ _p_memory_zircon_handle_properties: *mut MemoryZirconHandlePropertiesFUCHSIA,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_zircon_handle_properties_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryZirconHandlePropertiesFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_zircon_handle_properties_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_FUCHSIA_external_memory'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const ZIRCON_VMO_FUCHSIA: Self = Self(0b1000_0000_0000);
+}
+#[doc = "Generated from 'VK_FUCHSIA_external_memory'"]
+impl StructureType {
+ pub const IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_364_000);
+ pub const MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA: Self = Self(1_000_364_001);
+ pub const MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_364_002);
+}
+impl FuchsiaExternalSemaphoreFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_external_semaphore\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkImportSemaphoreZirconHandleFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ p_import_semaphore_zircon_handle_info: *const ImportSemaphoreZirconHandleInfoFUCHSIA,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetSemaphoreZirconHandleFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ p_get_zircon_handle_info: *const SemaphoreGetZirconHandleInfoFUCHSIA,
+ p_zircon_handle: *mut zx_handle_t,
+) -> Result;
+#[derive(Clone)]
+pub struct FuchsiaExternalSemaphoreFn {
+ pub import_semaphore_zircon_handle_fuchsia: PFN_vkImportSemaphoreZirconHandleFUCHSIA,
+ pub get_semaphore_zircon_handle_fuchsia: PFN_vkGetSemaphoreZirconHandleFUCHSIA,
+}
+unsafe impl Send for FuchsiaExternalSemaphoreFn {}
+unsafe impl Sync for FuchsiaExternalSemaphoreFn {}
+impl FuchsiaExternalSemaphoreFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ import_semaphore_zircon_handle_fuchsia: unsafe {
+ unsafe extern "system" fn import_semaphore_zircon_handle_fuchsia(
+ _device: Device,
+ _p_import_semaphore_zircon_handle_info : * const ImportSemaphoreZirconHandleInfoFUCHSIA,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(import_semaphore_zircon_handle_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkImportSemaphoreZirconHandleFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ import_semaphore_zircon_handle_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_semaphore_zircon_handle_fuchsia: unsafe {
+ unsafe extern "system" fn get_semaphore_zircon_handle_fuchsia(
+ _device: Device,
+ _p_get_zircon_handle_info: *const SemaphoreGetZirconHandleInfoFUCHSIA,
+ _p_zircon_handle: *mut zx_handle_t,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_semaphore_zircon_handle_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSemaphoreZirconHandleFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_semaphore_zircon_handle_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_FUCHSIA_external_semaphore'"]
+impl ExternalSemaphoreHandleTypeFlags {
+ pub const ZIRCON_EVENT_FUCHSIA: Self = Self(0b1000_0000);
+}
+#[doc = "Generated from 'VK_FUCHSIA_external_semaphore'"]
+impl StructureType {
+ pub const IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_365_000);
+ pub const SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA: Self = Self(1_000_365_001);
+}
+impl FuchsiaBufferCollectionFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_buffer_collection\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateBufferCollectionFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const BufferCollectionCreateInfoFUCHSIA,
+ p_allocator: *const AllocationCallbacks,
+ p_collection: *mut BufferCollectionFUCHSIA,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetBufferCollectionImageConstraintsFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ collection: BufferCollectionFUCHSIA,
+ p_image_constraints_info: *const ImageConstraintsInfoFUCHSIA,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ collection: BufferCollectionFUCHSIA,
+ p_buffer_constraints_info: *const BufferConstraintsInfoFUCHSIA,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyBufferCollectionFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ collection: BufferCollectionFUCHSIA,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetBufferCollectionPropertiesFUCHSIA = unsafe extern "system" fn(
+ device: Device,
+ collection: BufferCollectionFUCHSIA,
+ p_properties: *mut BufferCollectionPropertiesFUCHSIA,
+) -> Result;
+#[derive(Clone)]
+pub struct FuchsiaBufferCollectionFn {
+ pub create_buffer_collection_fuchsia: PFN_vkCreateBufferCollectionFUCHSIA,
+ pub set_buffer_collection_image_constraints_fuchsia:
+ PFN_vkSetBufferCollectionImageConstraintsFUCHSIA,
+ pub set_buffer_collection_buffer_constraints_fuchsia:
+ PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA,
+ pub destroy_buffer_collection_fuchsia: PFN_vkDestroyBufferCollectionFUCHSIA,
+ pub get_buffer_collection_properties_fuchsia: PFN_vkGetBufferCollectionPropertiesFUCHSIA,
+}
+unsafe impl Send for FuchsiaBufferCollectionFn {}
+unsafe impl Sync for FuchsiaBufferCollectionFn {}
+impl FuchsiaBufferCollectionFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_buffer_collection_fuchsia: unsafe {
+ unsafe extern "system" fn create_buffer_collection_fuchsia(
+ _device: Device,
+ _p_create_info: *const BufferCollectionCreateInfoFUCHSIA,
+ _p_allocator: *const AllocationCallbacks,
+ _p_collection: *mut BufferCollectionFUCHSIA,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_buffer_collection_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateBufferCollectionFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_buffer_collection_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_buffer_collection_image_constraints_fuchsia: unsafe {
+ unsafe extern "system" fn set_buffer_collection_image_constraints_fuchsia(
+ _device: Device,
+ _collection: BufferCollectionFUCHSIA,
+ _p_image_constraints_info: *const ImageConstraintsInfoFUCHSIA,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_buffer_collection_image_constraints_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSetBufferCollectionImageConstraintsFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ set_buffer_collection_image_constraints_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_buffer_collection_buffer_constraints_fuchsia: unsafe {
+ unsafe extern "system" fn set_buffer_collection_buffer_constraints_fuchsia(
+ _device: Device,
+ _collection: BufferCollectionFUCHSIA,
+ _p_buffer_constraints_info: *const BufferConstraintsInfoFUCHSIA,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_buffer_collection_buffer_constraints_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSetBufferCollectionBufferConstraintsFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ set_buffer_collection_buffer_constraints_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_buffer_collection_fuchsia: unsafe {
+ unsafe extern "system" fn destroy_buffer_collection_fuchsia(
+ _device: Device,
+ _collection: BufferCollectionFUCHSIA,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_buffer_collection_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyBufferCollectionFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_buffer_collection_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_collection_properties_fuchsia: unsafe {
+ unsafe extern "system" fn get_buffer_collection_properties_fuchsia(
+ _device: Device,
+ _collection: BufferCollectionFUCHSIA,
+ _p_properties: *mut BufferCollectionPropertiesFUCHSIA,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_collection_properties_fuchsia)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferCollectionPropertiesFUCHSIA\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_collection_properties_fuchsia
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_FUCHSIA_buffer_collection'"]
+impl DebugReportObjectTypeEXT {
+ pub const BUFFER_COLLECTION_FUCHSIA: Self = Self(1_000_366_000);
+}
+#[doc = "Generated from 'VK_FUCHSIA_buffer_collection'"]
+impl ObjectType {
+ #[doc = "VkBufferCollectionFUCHSIA"]
+ pub const BUFFER_COLLECTION_FUCHSIA: Self = Self(1_000_366_000);
+}
+#[doc = "Generated from 'VK_FUCHSIA_buffer_collection'"]
+impl StructureType {
+ pub const BUFFER_COLLECTION_CREATE_INFO_FUCHSIA: Self = Self(1_000_366_000);
+ pub const IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA: Self = Self(1_000_366_001);
+ pub const BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA: Self = Self(1_000_366_002);
+ pub const BUFFER_COLLECTION_PROPERTIES_FUCHSIA: Self = Self(1_000_366_003);
+ pub const BUFFER_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_004);
+ pub const BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA: Self = Self(1_000_366_005);
+ pub const IMAGE_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_006);
+ pub const IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_007);
+ pub const SYSMEM_COLOR_SPACE_FUCHSIA: Self = Self(1_000_366_008);
+ pub const BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA: Self = Self(1_000_366_009);
+}
+impl FuchsiaExtension368Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FUCHSIA_extension_368\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct FuchsiaExtension368Fn {}
+unsafe impl Send for FuchsiaExtension368Fn {}
+unsafe impl Sync for FuchsiaExtension368Fn {}
+impl FuchsiaExtension368Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension369Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_369\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension369Fn {}
+unsafe impl Send for QcomExtension369Fn {}
+unsafe impl Sync for QcomExtension369Fn {}
+impl QcomExtension369Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_extension_369'"]
+impl DescriptorBindingFlags {
+ pub const RESERVED_4_QCOM: Self = Self(0b1_0000);
+}
+impl HuaweiSubpassShadingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_subpass_shading\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = unsafe extern "system" fn(
+ device: Device,
+ renderpass: RenderPass,
+ p_max_workgroup_size: *mut Extent2D,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSubpassShadingHUAWEI = unsafe extern "system" fn(command_buffer: CommandBuffer);
+#[derive(Clone)]
+pub struct HuaweiSubpassShadingFn {
+ pub get_device_subpass_shading_max_workgroup_size_huawei:
+ PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI,
+ pub cmd_subpass_shading_huawei: PFN_vkCmdSubpassShadingHUAWEI,
+}
+unsafe impl Send for HuaweiSubpassShadingFn {}
+unsafe impl Sync for HuaweiSubpassShadingFn {}
+impl HuaweiSubpassShadingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_device_subpass_shading_max_workgroup_size_huawei: unsafe {
+ unsafe extern "system" fn get_device_subpass_shading_max_workgroup_size_huawei(
+ _device: Device,
+ _renderpass: RenderPass,
+ _p_max_workgroup_size: *mut Extent2D,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_subpass_shading_max_workgroup_size_huawei)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_subpass_shading_max_workgroup_size_huawei
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_subpass_shading_huawei: unsafe {
+ unsafe extern "system" fn cmd_subpass_shading_huawei(
+ _command_buffer: CommandBuffer,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_subpass_shading_huawei)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSubpassShadingHUAWEI\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_subpass_shading_huawei
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_HUAWEI_subpass_shading'"]
+impl PipelineBindPoint {
+ pub const SUBPASS_SHADING_HUAWEI: Self = Self(1_000_369_003);
+}
+#[doc = "Generated from 'VK_HUAWEI_subpass_shading'"]
+impl PipelineStageFlags2 {
+ pub const SUBPASS_SHADING_HUAWEI: Self =
+ Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_HUAWEI_subpass_shading'"]
+impl ShaderStageFlags {
+ pub const SUBPASS_SHADING_HUAWEI: Self = Self(0b100_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_HUAWEI_subpass_shading'"]
+impl StructureType {
+ pub const SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI: Self = Self(1_000_369_000);
+ pub const PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI: Self = Self(1_000_369_001);
+ pub const PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI: Self = Self(1_000_369_002);
+}
+impl HuaweiInvocationMaskFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_invocation_mask\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindInvocationMaskHUAWEI = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ image_view: ImageView,
+ image_layout: ImageLayout,
+);
+#[derive(Clone)]
+pub struct HuaweiInvocationMaskFn {
+ pub cmd_bind_invocation_mask_huawei: PFN_vkCmdBindInvocationMaskHUAWEI,
+}
+unsafe impl Send for HuaweiInvocationMaskFn {}
+unsafe impl Sync for HuaweiInvocationMaskFn {}
+impl HuaweiInvocationMaskFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_bind_invocation_mask_huawei: unsafe {
+ unsafe extern "system" fn cmd_bind_invocation_mask_huawei(
+ _command_buffer: CommandBuffer,
+ _image_view: ImageView,
+ _image_layout: ImageLayout,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_invocation_mask_huawei)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdBindInvocationMaskHUAWEI\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_invocation_mask_huawei
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_HUAWEI_invocation_mask'"]
+impl AccessFlags2 {
+ pub const INVOCATION_MASK_READ_HUAWEI: Self =
+ Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_HUAWEI_invocation_mask'"]
+impl ImageUsageFlags {
+ pub const INVOCATION_MASK_HUAWEI: Self = Self(0b100_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_HUAWEI_invocation_mask'"]
+impl PipelineStageFlags2 {
+ pub const INVOCATION_MASK_HUAWEI: Self =
+ Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_HUAWEI_invocation_mask'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI: Self = Self(1_000_370_000);
+}
+impl NvExternalMemoryRdmaFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_external_memory_rdma\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMemoryRemoteAddressNV = unsafe extern "system" fn(
+ device: Device,
+ p_memory_get_remote_address_info: *const MemoryGetRemoteAddressInfoNV,
+ p_address: *mut RemoteAddressNV,
+) -> Result;
+#[derive(Clone)]
+pub struct NvExternalMemoryRdmaFn {
+ pub get_memory_remote_address_nv: PFN_vkGetMemoryRemoteAddressNV,
+}
+unsafe impl Send for NvExternalMemoryRdmaFn {}
+unsafe impl Sync for NvExternalMemoryRdmaFn {}
+impl NvExternalMemoryRdmaFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_memory_remote_address_nv: unsafe {
+ unsafe extern "system" fn get_memory_remote_address_nv(
+ _device: Device,
+ _p_memory_get_remote_address_info: *const MemoryGetRemoteAddressInfoNV,
+ _p_address: *mut RemoteAddressNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_memory_remote_address_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMemoryRemoteAddressNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_memory_remote_address_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_external_memory_rdma'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const RDMA_ADDRESS_NV: Self = Self(0b1_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_external_memory_rdma'"]
+impl MemoryPropertyFlags {
+ pub const RDMA_CAPABLE_NV: Self = Self(0b1_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_external_memory_rdma'"]
+impl StructureType {
+ pub const MEMORY_GET_REMOTE_ADDRESS_INFO_NV: Self = Self(1_000_371_000);
+ pub const PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV: Self = Self(1_000_371_001);
+}
+impl ExtPipelinePropertiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_properties\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPipelinePropertiesEXT = unsafe extern "system" fn(
+ device: Device,
+ p_pipeline_info: *const PipelineInfoEXT,
+ p_pipeline_properties: *mut BaseOutStructure,
+) -> Result;
+#[derive(Clone)]
+pub struct ExtPipelinePropertiesFn {
+ pub get_pipeline_properties_ext: PFN_vkGetPipelinePropertiesEXT,
+}
+unsafe impl Send for ExtPipelinePropertiesFn {}
+unsafe impl Sync for ExtPipelinePropertiesFn {}
+impl ExtPipelinePropertiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_pipeline_properties_ext: unsafe {
+ unsafe extern "system" fn get_pipeline_properties_ext(
+ _device: Device,
+ _p_pipeline_info: *const PipelineInfoEXT,
+ _p_pipeline_properties: *mut BaseOutStructure,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_pipeline_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPipelinePropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_pipeline_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_pipeline_properties'"]
+impl StructureType {
+ pub const PIPELINE_PROPERTIES_IDENTIFIER_EXT: Self = Self(1_000_372_000);
+ pub const PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT: Self = Self(1_000_372_001);
+ pub const PIPELINE_INFO_EXT: Self = Self::PIPELINE_INFO_KHR;
+}
+impl NvExtension374Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_374\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension374Fn {}
+unsafe impl Send for NvExtension374Fn {}
+unsafe impl Sync for NvExtension374Fn {}
+impl NvExtension374Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_extension_374'"]
+impl ExternalFenceHandleTypeFlags {
+ pub const RESERVED_4_NV: Self = Self(0b1_0000);
+ pub const RESERVED_5_NV: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_NV_extension_374'"]
+impl ExternalSemaphoreHandleTypeFlags {
+ pub const RESERVED_5_NV: Self = Self(0b10_0000);
+}
+impl NvExtension375Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_375\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension375Fn {}
+unsafe impl Send for NvExtension375Fn {}
+unsafe impl Sync for NvExtension375Fn {}
+impl NvExtension375Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_extension_375'"]
+impl ExternalMemoryHandleTypeFlags {
+ pub const RESERVED_13_NV: Self = Self(0b10_0000_0000_0000);
+}
+impl ExtExtension376Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_376\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension376Fn {}
+unsafe impl Send for ExtExtension376Fn {}
+unsafe impl Sync for ExtExtension376Fn {}
+impl ExtExtension376Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtMultisampledRenderToSingleSampledFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_multisampled_render_to_single_sampled\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtMultisampledRenderToSingleSampledFn {}
+unsafe impl Send for ExtMultisampledRenderToSingleSampledFn {}
+unsafe impl Sync for ExtMultisampledRenderToSingleSampledFn {}
+impl ExtMultisampledRenderToSingleSampledFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_multisampled_render_to_single_sampled'"]
+impl ImageCreateFlags {
+ pub const MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXT: Self = Self(0b100_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_multisampled_render_to_single_sampled'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT: Self =
+ Self(1_000_376_000);
+ pub const SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT: Self = Self(1_000_376_001);
+ pub const MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT: Self = Self(1_000_376_002);
+}
+impl ExtExtendedDynamicState2Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extended_dynamic_state2\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPatchControlPointsEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, patch_control_points: u32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetRasterizerDiscardEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, rasterizer_discard_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthBiasEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_bias_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetLogicOpEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, logic_op: LogicOp);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPrimitiveRestartEnable =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, primitive_restart_enable: Bool32);
+#[derive(Clone)]
+pub struct ExtExtendedDynamicState2Fn {
+ pub cmd_set_patch_control_points_ext: PFN_vkCmdSetPatchControlPointsEXT,
+ pub cmd_set_rasterizer_discard_enable_ext: PFN_vkCmdSetRasterizerDiscardEnable,
+ pub cmd_set_depth_bias_enable_ext: PFN_vkCmdSetDepthBiasEnable,
+ pub cmd_set_logic_op_ext: PFN_vkCmdSetLogicOpEXT,
+ pub cmd_set_primitive_restart_enable_ext: PFN_vkCmdSetPrimitiveRestartEnable,
+}
+unsafe impl Send for ExtExtendedDynamicState2Fn {}
+unsafe impl Sync for ExtExtendedDynamicState2Fn {}
+impl ExtExtendedDynamicState2Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_patch_control_points_ext: unsafe {
+ unsafe extern "system" fn cmd_set_patch_control_points_ext(
+ _command_buffer: CommandBuffer,
+ _patch_control_points: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_patch_control_points_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPatchControlPointsEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_patch_control_points_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_rasterizer_discard_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_rasterizer_discard_enable_ext(
+ _command_buffer: CommandBuffer,
+ _rasterizer_discard_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_rasterizer_discard_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetRasterizerDiscardEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_rasterizer_discard_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_bias_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_bias_enable_ext(
+ _command_buffer: CommandBuffer,
+ _depth_bias_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_bias_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthBiasEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_bias_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_logic_op_ext: unsafe {
+ unsafe extern "system" fn cmd_set_logic_op_ext(
+ _command_buffer: CommandBuffer,
+ _logic_op: LogicOp,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_logic_op_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLogicOpEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_logic_op_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_primitive_restart_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_primitive_restart_enable_ext(
+ _command_buffer: CommandBuffer,
+ _primitive_restart_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_primitive_restart_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPrimitiveRestartEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_primitive_restart_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_extended_dynamic_state2'"]
+impl DynamicState {
+ #[doc = "Not promoted to 1.3"]
+ pub const PATCH_CONTROL_POINTS_EXT: Self = Self(1_000_377_000);
+ pub const RASTERIZER_DISCARD_ENABLE_EXT: Self = Self::RASTERIZER_DISCARD_ENABLE;
+ pub const DEPTH_BIAS_ENABLE_EXT: Self = Self::DEPTH_BIAS_ENABLE;
+ #[doc = "Not promoted to 1.3"]
+ pub const LOGIC_OP_EXT: Self = Self(1_000_377_003);
+ pub const PRIMITIVE_RESTART_ENABLE_EXT: Self = Self::PRIMITIVE_RESTART_ENABLE;
+}
+#[doc = "Generated from 'VK_EXT_extended_dynamic_state2'"]
+impl StructureType {
+ #[doc = "Not promoted to 1.3"]
+ pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: Self = Self(1_000_377_000);
+}
+impl QnxScreenSurfaceFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QNX_screen_surface\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateScreenSurfaceQNX = unsafe extern "system" fn(
+ instance: Instance,
+ p_create_info: *const ScreenSurfaceCreateInfoQNX,
+ p_allocator: *const AllocationCallbacks,
+ p_surface: *mut SurfaceKHR,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ queue_family_index: u32,
+ window: *mut _screen_window,
+) -> Bool32;
+#[derive(Clone)]
+pub struct QnxScreenSurfaceFn {
+ pub create_screen_surface_qnx: PFN_vkCreateScreenSurfaceQNX,
+ pub get_physical_device_screen_presentation_support_qnx:
+ PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX,
+}
+unsafe impl Send for QnxScreenSurfaceFn {}
+unsafe impl Sync for QnxScreenSurfaceFn {}
+impl QnxScreenSurfaceFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_screen_surface_qnx: unsafe {
+ unsafe extern "system" fn create_screen_surface_qnx(
+ _instance: Instance,
+ _p_create_info: *const ScreenSurfaceCreateInfoQNX,
+ _p_allocator: *const AllocationCallbacks,
+ _p_surface: *mut SurfaceKHR,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_screen_surface_qnx)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateScreenSurfaceQNX\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_screen_surface_qnx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_screen_presentation_support_qnx: unsafe {
+ unsafe extern "system" fn get_physical_device_screen_presentation_support_qnx(
+ _physical_device: PhysicalDevice,
+ _queue_family_index: u32,
+ _window: *mut _screen_window,
+ ) -> Bool32 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_screen_presentation_support_qnx)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceScreenPresentationSupportQNX\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_screen_presentation_support_qnx
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_QNX_screen_surface'"]
+impl StructureType {
+ pub const SCREEN_SURFACE_CREATE_INFO_QNX: Self = Self(1_000_378_000);
+}
+impl KhrExtension380Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_380\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension380Fn {}
+unsafe impl Send for KhrExtension380Fn {}
+unsafe impl Sync for KhrExtension380Fn {}
+impl KhrExtension380Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrExtension381Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_381\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension381Fn {}
+unsafe impl Send for KhrExtension381Fn {}
+unsafe impl Sync for KhrExtension381Fn {}
+impl KhrExtension381Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtColorWriteEnableFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_color_write_enable\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetColorWriteEnableEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ attachment_count: u32,
+ p_color_write_enables: *const Bool32,
+);
+#[derive(Clone)]
+pub struct ExtColorWriteEnableFn {
+ pub cmd_set_color_write_enable_ext: PFN_vkCmdSetColorWriteEnableEXT,
+}
+unsafe impl Send for ExtColorWriteEnableFn {}
+unsafe impl Sync for ExtColorWriteEnableFn {}
+impl ExtColorWriteEnableFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_color_write_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_color_write_enable_ext(
+ _command_buffer: CommandBuffer,
+ _attachment_count: u32,
+ _p_color_write_enables: *const Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_color_write_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetColorWriteEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_color_write_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_color_write_enable'"]
+impl DynamicState {
+ pub const COLOR_WRITE_ENABLE_EXT: Self = Self(1_000_381_000);
+}
+#[doc = "Generated from 'VK_EXT_color_write_enable'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: Self = Self(1_000_381_000);
+ pub const PIPELINE_COLOR_WRITE_CREATE_INFO_EXT: Self = Self(1_000_381_001);
+}
+impl ExtPrimitivesGeneratedQueryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_primitives_generated_query\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPrimitivesGeneratedQueryFn {}
+unsafe impl Send for ExtPrimitivesGeneratedQueryFn {}
+unsafe impl Sync for ExtPrimitivesGeneratedQueryFn {}
+impl ExtPrimitivesGeneratedQueryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_primitives_generated_query'"]
+impl QueryType {
+ pub const PRIMITIVES_GENERATED_EXT: Self = Self(1_000_382_000);
+}
+#[doc = "Generated from 'VK_EXT_primitives_generated_query'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT: Self = Self(1_000_382_000);
+}
+impl ExtExtension384Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_384\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension384Fn {}
+unsafe impl Send for ExtExtension384Fn {}
+unsafe impl Sync for ExtExtension384Fn {}
+impl ExtExtension384Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl MesaExtension385Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_MESA_extension_385\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct MesaExtension385Fn {}
+unsafe impl Send for MesaExtension385Fn {}
+unsafe impl Sync for MesaExtension385Fn {}
+impl MesaExtension385Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleExtension386Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_386\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension386Fn {}
+unsafe impl Send for GoogleExtension386Fn {}
+unsafe impl Sync for GoogleExtension386Fn {}
+impl GoogleExtension386Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrRayTracingMaintenance1Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_ray_tracing_maintenance1\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdTraceRaysIndirect2KHR = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ indirect_device_address: DeviceAddress,
+);
+#[derive(Clone)]
+pub struct KhrRayTracingMaintenance1Fn {
+ pub cmd_trace_rays_indirect2_khr: PFN_vkCmdTraceRaysIndirect2KHR,
+}
+unsafe impl Send for KhrRayTracingMaintenance1Fn {}
+unsafe impl Sync for KhrRayTracingMaintenance1Fn {}
+impl KhrRayTracingMaintenance1Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_trace_rays_indirect2_khr: unsafe {
+ unsafe extern "system" fn cmd_trace_rays_indirect2_khr(
+ _command_buffer: CommandBuffer,
+ _indirect_device_address: DeviceAddress,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_trace_rays_indirect2_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdTraceRaysIndirect2KHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_trace_rays_indirect2_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"]
+impl AccessFlags2 {
+ pub const SHADER_BINDING_TABLE_READ_KHR: Self =
+ Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"]
+impl PipelineStageFlags2 {
+ pub const ACCELERATION_STRUCTURE_COPY_KHR: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"]
+impl QueryType {
+ pub const ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR: Self =
+ Self(1_000_386_000);
+ pub const ACCELERATION_STRUCTURE_SIZE_KHR: Self = Self(1_000_386_001);
+}
+#[doc = "Generated from 'VK_KHR_ray_tracing_maintenance1'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR: Self = Self(1_000_386_000);
+}
+impl ExtExtension388Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_388\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension388Fn {}
+unsafe impl Send for ExtExtension388Fn {}
+unsafe impl Sync for ExtExtension388Fn {}
+impl ExtExtension388Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtGlobalPriorityQueryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_global_priority_query\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtGlobalPriorityQueryFn {}
+unsafe impl Send for ExtGlobalPriorityQueryFn {}
+unsafe impl Sync for ExtGlobalPriorityQueryFn {}
+impl ExtGlobalPriorityQueryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_global_priority_query'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT: Self =
+ Self::PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR;
+ pub const QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT: Self =
+ Self::QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR;
+}
+impl ExtExtension390Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_390\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension390Fn {}
+unsafe impl Send for ExtExtension390Fn {}
+unsafe impl Sync for ExtExtension390Fn {}
+impl ExtExtension390Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension391Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_391\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension391Fn {}
+unsafe impl Send for ExtExtension391Fn {}
+unsafe impl Sync for ExtExtension391Fn {}
+impl ExtExtension391Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtImageViewMinLodFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_view_min_lod\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtImageViewMinLodFn {}
+unsafe impl Send for ExtImageViewMinLodFn {}
+unsafe impl Sync for ExtImageViewMinLodFn {}
+impl ExtImageViewMinLodFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_image_view_min_lod'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT: Self = Self(1_000_391_000);
+ pub const IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT: Self = Self(1_000_391_001);
+}
+impl ExtMultiDrawFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_multi_draw\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMultiEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ draw_count: u32,
+ p_vertex_info: *const MultiDrawInfoEXT,
+ instance_count: u32,
+ first_instance: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawMultiIndexedEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ draw_count: u32,
+ p_index_info: *const MultiDrawIndexedInfoEXT,
+ instance_count: u32,
+ first_instance: u32,
+ stride: u32,
+ p_vertex_offset: *const i32,
+);
+#[derive(Clone)]
+pub struct ExtMultiDrawFn {
+ pub cmd_draw_multi_ext: PFN_vkCmdDrawMultiEXT,
+ pub cmd_draw_multi_indexed_ext: PFN_vkCmdDrawMultiIndexedEXT,
+}
+unsafe impl Send for ExtMultiDrawFn {}
+unsafe impl Sync for ExtMultiDrawFn {}
+impl ExtMultiDrawFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_draw_multi_ext: unsafe {
+ unsafe extern "system" fn cmd_draw_multi_ext(
+ _command_buffer: CommandBuffer,
+ _draw_count: u32,
+ _p_vertex_info: *const MultiDrawInfoEXT,
+ _instance_count: u32,
+ _first_instance: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_draw_multi_ext)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMultiEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_multi_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_multi_indexed_ext: unsafe {
+ unsafe extern "system" fn cmd_draw_multi_indexed_ext(
+ _command_buffer: CommandBuffer,
+ _draw_count: u32,
+ _p_index_info: *const MultiDrawIndexedInfoEXT,
+ _instance_count: u32,
+ _first_instance: u32,
+ _stride: u32,
+ _p_vertex_offset: *const i32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_multi_indexed_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawMultiIndexedEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_multi_indexed_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_multi_draw'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT: Self = Self(1_000_392_000);
+ pub const PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT: Self = Self(1_000_392_001);
+}
+impl ExtImage2dViewOf3dFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_image_2d_view_of_3d\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtImage2dViewOf3dFn {}
+unsafe impl Send for ExtImage2dViewOf3dFn {}
+unsafe impl Sync for ExtImage2dViewOf3dFn {}
+impl ExtImage2dViewOf3dFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_image_2d_view_of_3d'"]
+impl ImageCreateFlags {
+ #[doc = "Image is created with a layout where individual slices are capable of being used as 2D images"]
+ pub const TYPE_2D_VIEW_COMPATIBLE_EXT: Self = Self(0b10_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_image_2d_view_of_3d'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT: Self = Self(1_000_393_000);
+}
+impl KhrPortabilityEnumerationFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_portability_enumeration\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct KhrPortabilityEnumerationFn {}
+unsafe impl Send for KhrPortabilityEnumerationFn {}
+unsafe impl Sync for KhrPortabilityEnumerationFn {}
+impl KhrPortabilityEnumerationFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_KHR_portability_enumeration'"]
+impl InstanceCreateFlags {
+ pub const ENUMERATE_PORTABILITY_KHR: Self = Self(0b1);
+}
+impl KhrExtension396Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_396\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension396Fn {}
+unsafe impl Send for KhrExtension396Fn {}
+unsafe impl Sync for KhrExtension396Fn {}
+impl KhrExtension396Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtOpacityMicromapFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_opacity_micromap\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateMicromapEXT = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const MicromapCreateInfoEXT,
+ p_allocator: *const AllocationCallbacks,
+ p_micromap: *mut MicromapEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyMicromapEXT = unsafe extern "system" fn(
+ device: Device,
+ micromap: MicromapEXT,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBuildMicromapsEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ info_count: u32,
+ p_infos: *const MicromapBuildInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkBuildMicromapsEXT = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ info_count: u32,
+ p_infos: *const MicromapBuildInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCopyMicromapEXT = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ p_info: *const CopyMicromapInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCopyMicromapToMemoryEXT = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ p_info: *const CopyMicromapToMemoryInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCopyMemoryToMicromapEXT = unsafe extern "system" fn(
+ device: Device,
+ deferred_operation: DeferredOperationKHR,
+ p_info: *const CopyMemoryToMicromapInfoEXT,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkWriteMicromapsPropertiesEXT = unsafe extern "system" fn(
+ device: Device,
+ micromap_count: u32,
+ p_micromaps: *const MicromapEXT,
+ query_type: QueryType,
+ data_size: usize,
+ p_data: *mut c_void,
+ stride: usize,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyMicromapEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, p_info: *const CopyMicromapInfoEXT);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyMicromapToMemoryEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_info: *const CopyMicromapToMemoryInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyMemoryToMicromapEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_info: *const CopyMemoryToMicromapInfoEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteMicromapsPropertiesEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ micromap_count: u32,
+ p_micromaps: *const MicromapEXT,
+ query_type: QueryType,
+ query_pool: QueryPool,
+ first_query: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceMicromapCompatibilityEXT = unsafe extern "system" fn(
+ device: Device,
+ p_version_info: *const MicromapVersionInfoEXT,
+ p_compatibility: *mut AccelerationStructureCompatibilityKHR,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetMicromapBuildSizesEXT = unsafe extern "system" fn(
+ device: Device,
+ build_type: AccelerationStructureBuildTypeKHR,
+ p_build_info: *const MicromapBuildInfoEXT,
+ p_size_info: *mut MicromapBuildSizesInfoEXT,
+);
+#[derive(Clone)]
+pub struct ExtOpacityMicromapFn {
+ pub create_micromap_ext: PFN_vkCreateMicromapEXT,
+ pub destroy_micromap_ext: PFN_vkDestroyMicromapEXT,
+ pub cmd_build_micromaps_ext: PFN_vkCmdBuildMicromapsEXT,
+ pub build_micromaps_ext: PFN_vkBuildMicromapsEXT,
+ pub copy_micromap_ext: PFN_vkCopyMicromapEXT,
+ pub copy_micromap_to_memory_ext: PFN_vkCopyMicromapToMemoryEXT,
+ pub copy_memory_to_micromap_ext: PFN_vkCopyMemoryToMicromapEXT,
+ pub write_micromaps_properties_ext: PFN_vkWriteMicromapsPropertiesEXT,
+ pub cmd_copy_micromap_ext: PFN_vkCmdCopyMicromapEXT,
+ pub cmd_copy_micromap_to_memory_ext: PFN_vkCmdCopyMicromapToMemoryEXT,
+ pub cmd_copy_memory_to_micromap_ext: PFN_vkCmdCopyMemoryToMicromapEXT,
+ pub cmd_write_micromaps_properties_ext: PFN_vkCmdWriteMicromapsPropertiesEXT,
+ pub get_device_micromap_compatibility_ext: PFN_vkGetDeviceMicromapCompatibilityEXT,
+ pub get_micromap_build_sizes_ext: PFN_vkGetMicromapBuildSizesEXT,
+}
+unsafe impl Send for ExtOpacityMicromapFn {}
+unsafe impl Sync for ExtOpacityMicromapFn {}
+impl ExtOpacityMicromapFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_micromap_ext: unsafe {
+ unsafe extern "system" fn create_micromap_ext(
+ _device: Device,
+ _p_create_info: *const MicromapCreateInfoEXT,
+ _p_allocator: *const AllocationCallbacks,
+ _p_micromap: *mut MicromapEXT,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_micromap_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateMicromapEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_micromap_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_micromap_ext: unsafe {
+ unsafe extern "system" fn destroy_micromap_ext(
+ _device: Device,
+ _micromap: MicromapEXT,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_micromap_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyMicromapEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_micromap_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_build_micromaps_ext: unsafe {
+ unsafe extern "system" fn cmd_build_micromaps_ext(
+ _command_buffer: CommandBuffer,
+ _info_count: u32,
+ _p_infos: *const MicromapBuildInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_build_micromaps_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBuildMicromapsEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_build_micromaps_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ build_micromaps_ext: unsafe {
+ unsafe extern "system" fn build_micromaps_ext(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _info_count: u32,
+ _p_infos: *const MicromapBuildInfoEXT,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(build_micromaps_ext)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBuildMicromapsEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ build_micromaps_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ copy_micromap_ext: unsafe {
+ unsafe extern "system" fn copy_micromap_ext(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _p_info: *const CopyMicromapInfoEXT,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(copy_micromap_ext)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCopyMicromapEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ copy_micromap_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ copy_micromap_to_memory_ext: unsafe {
+ unsafe extern "system" fn copy_micromap_to_memory_ext(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _p_info: *const CopyMicromapToMemoryInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(copy_micromap_to_memory_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCopyMicromapToMemoryEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ copy_micromap_to_memory_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ copy_memory_to_micromap_ext: unsafe {
+ unsafe extern "system" fn copy_memory_to_micromap_ext(
+ _device: Device,
+ _deferred_operation: DeferredOperationKHR,
+ _p_info: *const CopyMemoryToMicromapInfoEXT,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(copy_memory_to_micromap_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCopyMemoryToMicromapEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ copy_memory_to_micromap_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ write_micromaps_properties_ext: unsafe {
+ unsafe extern "system" fn write_micromaps_properties_ext(
+ _device: Device,
+ _micromap_count: u32,
+ _p_micromaps: *const MicromapEXT,
+ _query_type: QueryType,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ _stride: usize,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(write_micromaps_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkWriteMicromapsPropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ write_micromaps_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_micromap_ext: unsafe {
+ unsafe extern "system" fn cmd_copy_micromap_ext(
+ _command_buffer: CommandBuffer,
+ _p_info: *const CopyMicromapInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_micromap_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyMicromapEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_micromap_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_micromap_to_memory_ext: unsafe {
+ unsafe extern "system" fn cmd_copy_micromap_to_memory_ext(
+ _command_buffer: CommandBuffer,
+ _p_info: *const CopyMicromapToMemoryInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_micromap_to_memory_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyMicromapToMemoryEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_micromap_to_memory_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_memory_to_micromap_ext: unsafe {
+ unsafe extern "system" fn cmd_copy_memory_to_micromap_ext(
+ _command_buffer: CommandBuffer,
+ _p_info: *const CopyMemoryToMicromapInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_memory_to_micromap_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyMemoryToMicromapEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_memory_to_micromap_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_micromaps_properties_ext: unsafe {
+ unsafe extern "system" fn cmd_write_micromaps_properties_ext(
+ _command_buffer: CommandBuffer,
+ _micromap_count: u32,
+ _p_micromaps: *const MicromapEXT,
+ _query_type: QueryType,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_write_micromaps_properties_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdWriteMicromapsPropertiesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_micromaps_properties_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_micromap_compatibility_ext: unsafe {
+ unsafe extern "system" fn get_device_micromap_compatibility_ext(
+ _device: Device,
+ _p_version_info: *const MicromapVersionInfoEXT,
+ _p_compatibility: *mut AccelerationStructureCompatibilityKHR,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_micromap_compatibility_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceMicromapCompatibilityEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_micromap_compatibility_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_micromap_build_sizes_ext: unsafe {
+ unsafe extern "system" fn get_micromap_build_sizes_ext(
+ _device: Device,
+ _build_type: AccelerationStructureBuildTypeKHR,
+ _p_build_info: *const MicromapBuildInfoEXT,
+ _p_size_info: *mut MicromapBuildSizesInfoEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_micromap_build_sizes_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetMicromapBuildSizesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_micromap_build_sizes_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl AccessFlags2 {
+ pub const MICROMAP_READ_EXT: Self =
+ Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const MICROMAP_WRITE_EXT: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl BufferUsageFlags {
+ pub const MICROMAP_BUILD_INPUT_READ_ONLY_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000);
+ pub const MICROMAP_STORAGE_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl BuildAccelerationStructureFlagsKHR {
+ pub const ALLOW_OPACITY_MICROMAP_UPDATE_EXT: Self = Self(0b100_0000);
+ pub const ALLOW_DISABLE_OPACITY_MICROMAPS_EXT: Self = Self(0b1000_0000);
+ pub const ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT: Self = Self(0b1_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl GeometryInstanceFlagsKHR {
+ pub const FORCE_OPACITY_MICROMAP_2_STATE_EXT: Self = Self(0b1_0000);
+ pub const DISABLE_OPACITY_MICROMAPS_EXT: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl ObjectType {
+ pub const MICROMAP_EXT: Self = Self(1_000_396_000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl PipelineCreateFlags {
+ pub const RAY_TRACING_OPACITY_MICROMAP_EXT: Self = Self(0b1_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl PipelineStageFlags2 {
+ pub const MICROMAP_BUILD_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl QueryType {
+ pub const MICROMAP_SERIALIZATION_SIZE_EXT: Self = Self(1_000_396_000);
+ pub const MICROMAP_COMPACTED_SIZE_EXT: Self = Self(1_000_396_001);
+}
+#[doc = "Generated from 'VK_EXT_opacity_micromap'"]
+impl StructureType {
+ pub const MICROMAP_BUILD_INFO_EXT: Self = Self(1_000_396_000);
+ pub const MICROMAP_VERSION_INFO_EXT: Self = Self(1_000_396_001);
+ pub const COPY_MICROMAP_INFO_EXT: Self = Self(1_000_396_002);
+ pub const COPY_MICROMAP_TO_MEMORY_INFO_EXT: Self = Self(1_000_396_003);
+ pub const COPY_MEMORY_TO_MICROMAP_INFO_EXT: Self = Self(1_000_396_004);
+ pub const PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT: Self = Self(1_000_396_005);
+ pub const PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT: Self = Self(1_000_396_006);
+ pub const MICROMAP_CREATE_INFO_EXT: Self = Self(1_000_396_007);
+ pub const MICROMAP_BUILD_SIZES_INFO_EXT: Self = Self(1_000_396_008);
+ pub const ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT: Self = Self(1_000_396_009);
+}
+impl NvExtension398Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_398\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension398Fn {}
+unsafe impl Send for NvExtension398Fn {}
+unsafe impl Sync for NvExtension398Fn {}
+impl NvExtension398Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_extension_398'"]
+impl BuildAccelerationStructureFlagsKHR {
+ pub const RESERVED_9_NV: Self = Self(0b10_0000_0000);
+ pub const RESERVED_10_NV: Self = Self(0b100_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_extension_398'"]
+impl PipelineCreateFlags {
+ pub const RESERVED_28_NV: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000);
+}
+impl JuiceExtension399Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_JUICE_extension_399\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct JuiceExtension399Fn {}
+unsafe impl Send for JuiceExtension399Fn {}
+unsafe impl Sync for JuiceExtension399Fn {}
+impl JuiceExtension399Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl JuiceExtension400Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_JUICE_extension_400\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct JuiceExtension400Fn {}
+unsafe impl Send for JuiceExtension400Fn {}
+unsafe impl Sync for JuiceExtension400Fn {}
+impl JuiceExtension400Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtLoadStoreOpNoneFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_load_store_op_none\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtLoadStoreOpNoneFn {}
+unsafe impl Send for ExtLoadStoreOpNoneFn {}
+unsafe impl Sync for ExtLoadStoreOpNoneFn {}
+impl ExtLoadStoreOpNoneFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_load_store_op_none'"]
+impl AttachmentLoadOp {
+ pub const NONE_EXT: Self = Self(1_000_400_000);
+}
+#[doc = "Generated from 'VK_EXT_load_store_op_none'"]
+impl AttachmentStoreOp {
+ pub const NONE_EXT: Self = Self::NONE;
+}
+impl FbExtension402Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FB_extension_402\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct FbExtension402Fn {}
+unsafe impl Send for FbExtension402Fn {}
+unsafe impl Sync for FbExtension402Fn {}
+impl FbExtension402Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl FbExtension403Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FB_extension_403\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct FbExtension403Fn {}
+unsafe impl Send for FbExtension403Fn {}
+unsafe impl Sync for FbExtension403Fn {}
+impl FbExtension403Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl FbExtension404Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_FB_extension_404\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct FbExtension404Fn {}
+unsafe impl Send for FbExtension404Fn {}
+unsafe impl Sync for FbExtension404Fn {}
+impl FbExtension404Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl HuaweiExtension405Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_extension_405\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct HuaweiExtension405Fn {}
+unsafe impl Send for HuaweiExtension405Fn {}
+unsafe impl Sync for HuaweiExtension405Fn {}
+impl HuaweiExtension405Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_HUAWEI_extension_405'"]
+impl PipelineStageFlags2 {
+ pub const RESEVED_41_HUAWEI: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_HUAWEI_extension_405'"]
+impl ShaderStageFlags {
+ pub const RESERVED_19_HUAWEI: Self = Self(0b1000_0000_0000_0000_0000);
+}
+impl HuaweiExtension406Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_extension_406\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct HuaweiExtension406Fn {}
+unsafe impl Send for HuaweiExtension406Fn {}
+unsafe impl Sync for HuaweiExtension406Fn {}
+impl HuaweiExtension406Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GgpExtension407Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_extension_407\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GgpExtension407Fn {}
+unsafe impl Send for GgpExtension407Fn {}
+unsafe impl Sync for GgpExtension407Fn {}
+impl GgpExtension407Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GgpExtension408Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_extension_408\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GgpExtension408Fn {}
+unsafe impl Send for GgpExtension408Fn {}
+unsafe impl Sync for GgpExtension408Fn {}
+impl GgpExtension408Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GgpExtension409Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_extension_409\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GgpExtension409Fn {}
+unsafe impl Send for GgpExtension409Fn {}
+unsafe impl Sync for GgpExtension409Fn {}
+impl GgpExtension409Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GgpExtension410Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_extension_410\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GgpExtension410Fn {}
+unsafe impl Send for GgpExtension410Fn {}
+unsafe impl Sync for GgpExtension410Fn {}
+impl GgpExtension410Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GgpExtension411Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GGP_extension_411\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GgpExtension411Fn {}
+unsafe impl Send for GgpExtension411Fn {}
+unsafe impl Sync for GgpExtension411Fn {}
+impl GgpExtension411Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtBorderColorSwizzleFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_border_color_swizzle\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtBorderColorSwizzleFn {}
+unsafe impl Send for ExtBorderColorSwizzleFn {}
+unsafe impl Sync for ExtBorderColorSwizzleFn {}
+impl ExtBorderColorSwizzleFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_border_color_swizzle'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT: Self = Self(1_000_411_000);
+ pub const SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT: Self = Self(1_000_411_001);
+}
+impl ExtPageableDeviceLocalMemoryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_pageable_device_local_memory\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetDeviceMemoryPriorityEXT =
+ unsafe extern "system" fn(device: Device, memory: DeviceMemory, priority: f32);
+#[derive(Clone)]
+pub struct ExtPageableDeviceLocalMemoryFn {
+ pub set_device_memory_priority_ext: PFN_vkSetDeviceMemoryPriorityEXT,
+}
+unsafe impl Send for ExtPageableDeviceLocalMemoryFn {}
+unsafe impl Sync for ExtPageableDeviceLocalMemoryFn {}
+impl ExtPageableDeviceLocalMemoryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ set_device_memory_priority_ext: unsafe {
+ unsafe extern "system" fn set_device_memory_priority_ext(
+ _device: Device,
+ _memory: DeviceMemory,
+ _priority: f32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(set_device_memory_priority_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkSetDeviceMemoryPriorityEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ set_device_memory_priority_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_pageable_device_local_memory'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT: Self = Self(1_000_412_000);
+}
+impl KhrMaintenance4Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_maintenance4\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceBufferMemoryRequirements = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const DeviceBufferMemoryRequirements,
+ p_memory_requirements: *mut MemoryRequirements2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceImageMemoryRequirements = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const DeviceImageMemoryRequirements,
+ p_memory_requirements: *mut MemoryRequirements2,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceImageSparseMemoryRequirements = unsafe extern "system" fn(
+ device: Device,
+ p_info: *const DeviceImageMemoryRequirements,
+ p_sparse_memory_requirement_count: *mut u32,
+ p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
+);
+#[derive(Clone)]
+pub struct KhrMaintenance4Fn {
+ pub get_device_buffer_memory_requirements_khr: PFN_vkGetDeviceBufferMemoryRequirements,
+ pub get_device_image_memory_requirements_khr: PFN_vkGetDeviceImageMemoryRequirements,
+ pub get_device_image_sparse_memory_requirements_khr:
+ PFN_vkGetDeviceImageSparseMemoryRequirements,
+}
+unsafe impl Send for KhrMaintenance4Fn {}
+unsafe impl Sync for KhrMaintenance4Fn {}
+impl KhrMaintenance4Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_device_buffer_memory_requirements_khr: unsafe {
+ unsafe extern "system" fn get_device_buffer_memory_requirements_khr(
+ _device: Device,
+ _p_info: *const DeviceBufferMemoryRequirements,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_buffer_memory_requirements_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceBufferMemoryRequirementsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_buffer_memory_requirements_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_image_memory_requirements_khr: unsafe {
+ unsafe extern "system" fn get_device_image_memory_requirements_khr(
+ _device: Device,
+ _p_info: *const DeviceImageMemoryRequirements,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_image_memory_requirements_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceImageMemoryRequirementsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_image_memory_requirements_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_image_sparse_memory_requirements_khr: unsafe {
+ unsafe extern "system" fn get_device_image_sparse_memory_requirements_khr(
+ _device: Device,
+ _p_info: *const DeviceImageMemoryRequirements,
+ _p_sparse_memory_requirement_count: *mut u32,
+ _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_image_sparse_memory_requirements_khr)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceImageSparseMemoryRequirementsKHR\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_image_sparse_memory_requirements_khr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_KHR_maintenance4'"]
+impl ImageAspectFlags {
+ pub const NONE_KHR: Self = Self::NONE;
+}
+#[doc = "Generated from 'VK_KHR_maintenance4'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR: Self =
+ Self::PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES;
+ pub const PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR: Self =
+ Self::PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES;
+ pub const DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR: Self = Self::DEVICE_BUFFER_MEMORY_REQUIREMENTS;
+ pub const DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR: Self = Self::DEVICE_IMAGE_MEMORY_REQUIREMENTS;
+}
+impl HuaweiExtension415Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_HUAWEI_extension_415\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct HuaweiExtension415Fn {}
+unsafe impl Send for HuaweiExtension415Fn {}
+unsafe impl Sync for HuaweiExtension415Fn {}
+impl HuaweiExtension415Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ArmExtension416Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_extension_416\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ArmExtension416Fn {}
+unsafe impl Send for ArmExtension416Fn {}
+unsafe impl Sync for ArmExtension416Fn {}
+impl ArmExtension416Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrExtension417Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_417\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension417Fn {}
+unsafe impl Send for KhrExtension417Fn {}
+unsafe impl Sync for KhrExtension417Fn {}
+impl KhrExtension417Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ArmExtension418Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_extension_418\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ArmExtension418Fn {}
+unsafe impl Send for ArmExtension418Fn {}
+unsafe impl Sync for ArmExtension418Fn {}
+impl ArmExtension418Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension419Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_419\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension419Fn {}
+unsafe impl Send for ExtExtension419Fn {}
+unsafe impl Sync for ExtExtension419Fn {}
+impl ExtExtension419Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_extension_419'"]
+impl ImageViewCreateFlags {
+ pub const RESERVED_3_EXT: Self = Self(0b1000);
+}
+impl ExtExtension420Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_420\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension420Fn {}
+unsafe impl Send for ExtExtension420Fn {}
+unsafe impl Sync for ExtExtension420Fn {}
+impl ExtExtension420Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_extension_420'"]
+impl SwapchainCreateFlagsKHR {
+ pub const RESERVED_4_EXT: Self = Self(0b1_0000);
+}
+impl ValveDescriptorSetHostMappingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_VALVE_descriptor_set_host_mapping\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE = unsafe extern "system" fn(
+ device: Device,
+ p_binding_reference: *const DescriptorSetBindingReferenceVALVE,
+ p_host_mapping: *mut DescriptorSetLayoutHostMappingInfoVALVE,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDescriptorSetHostMappingVALVE = unsafe extern "system" fn(
+ device: Device,
+ descriptor_set: DescriptorSet,
+ pp_data: *mut *mut c_void,
+);
+#[derive(Clone)]
+pub struct ValveDescriptorSetHostMappingFn {
+ pub get_descriptor_set_layout_host_mapping_info_valve:
+ PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE,
+ pub get_descriptor_set_host_mapping_valve: PFN_vkGetDescriptorSetHostMappingVALVE,
+}
+unsafe impl Send for ValveDescriptorSetHostMappingFn {}
+unsafe impl Sync for ValveDescriptorSetHostMappingFn {}
+impl ValveDescriptorSetHostMappingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_descriptor_set_layout_host_mapping_info_valve: unsafe {
+ unsafe extern "system" fn get_descriptor_set_layout_host_mapping_info_valve(
+ _device: Device,
+ _p_binding_reference: *const DescriptorSetBindingReferenceVALVE,
+ _p_host_mapping: *mut DescriptorSetLayoutHostMappingInfoVALVE,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_descriptor_set_layout_host_mapping_info_valve)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDescriptorSetLayoutHostMappingInfoVALVE\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_set_layout_host_mapping_info_valve
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_descriptor_set_host_mapping_valve: unsafe {
+ unsafe extern "system" fn get_descriptor_set_host_mapping_valve(
+ _device: Device,
+ _descriptor_set: DescriptorSet,
+ _pp_data: *mut *mut c_void,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_descriptor_set_host_mapping_valve)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDescriptorSetHostMappingVALVE\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_set_host_mapping_valve
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_VALVE_descriptor_set_host_mapping'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE: Self =
+ Self(1_000_420_000);
+ pub const DESCRIPTOR_SET_BINDING_REFERENCE_VALVE: Self = Self(1_000_420_001);
+ pub const DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE: Self = Self(1_000_420_002);
+}
+impl ExtDepthClampZeroOneFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_depth_clamp_zero_one\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtDepthClampZeroOneFn {}
+unsafe impl Send for ExtDepthClampZeroOneFn {}
+unsafe impl Sync for ExtDepthClampZeroOneFn {}
+impl ExtDepthClampZeroOneFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_depth_clamp_zero_one'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT: Self = Self(1_000_421_000);
+}
+impl ExtNonSeamlessCubeMapFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_non_seamless_cube_map\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtNonSeamlessCubeMapFn {}
+unsafe impl Send for ExtNonSeamlessCubeMapFn {}
+unsafe impl Sync for ExtNonSeamlessCubeMapFn {}
+impl ExtNonSeamlessCubeMapFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_non_seamless_cube_map'"]
+impl SamplerCreateFlags {
+ pub const NON_SEAMLESS_CUBE_MAP_EXT: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_EXT_non_seamless_cube_map'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT: Self = Self(1_000_422_000);
+}
+impl ArmExtension424Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_extension_424\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ArmExtension424Fn {}
+unsafe impl Send for ArmExtension424Fn {}
+unsafe impl Sync for ArmExtension424Fn {}
+impl ArmExtension424Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ArmExtension425Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_extension_425\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ArmExtension425Fn {}
+unsafe impl Send for ArmExtension425Fn {}
+unsafe impl Sync for ArmExtension425Fn {}
+impl ArmExtension425Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomFragmentDensityMapOffsetFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_QCOM_fragment_density_map_offset\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct QcomFragmentDensityMapOffsetFn {}
+unsafe impl Send for QcomFragmentDensityMapOffsetFn {}
+unsafe impl Sync for QcomFragmentDensityMapOffsetFn {}
+impl QcomFragmentDensityMapOffsetFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_fragment_density_map_offset'"]
+impl ImageCreateFlags {
+ pub const FRAGMENT_DENSITY_MAP_OFFSET_QCOM: Self = Self(0b1000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_QCOM_fragment_density_map_offset'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM: Self = Self(1_000_425_000);
+ pub const PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM: Self =
+ Self(1_000_425_001);
+ pub const SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM: Self = Self(1_000_425_002);
+}
+impl NvCopyMemoryIndirectFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_copy_memory_indirect\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyMemoryIndirectNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ copy_buffer_address: DeviceAddress,
+ copy_count: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyMemoryToImageIndirectNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ copy_buffer_address: DeviceAddress,
+ copy_count: u32,
+ stride: u32,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ p_image_subresources: *const ImageSubresourceLayers,
+);
+#[derive(Clone)]
+pub struct NvCopyMemoryIndirectFn {
+ pub cmd_copy_memory_indirect_nv: PFN_vkCmdCopyMemoryIndirectNV,
+ pub cmd_copy_memory_to_image_indirect_nv: PFN_vkCmdCopyMemoryToImageIndirectNV,
+}
+unsafe impl Send for NvCopyMemoryIndirectFn {}
+unsafe impl Sync for NvCopyMemoryIndirectFn {}
+impl NvCopyMemoryIndirectFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_copy_memory_indirect_nv: unsafe {
+ unsafe extern "system" fn cmd_copy_memory_indirect_nv(
+ _command_buffer: CommandBuffer,
+ _copy_buffer_address: DeviceAddress,
+ _copy_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_memory_indirect_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyMemoryIndirectNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_memory_indirect_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_memory_to_image_indirect_nv: unsafe {
+ unsafe extern "system" fn cmd_copy_memory_to_image_indirect_nv(
+ _command_buffer: CommandBuffer,
+ _copy_buffer_address: DeviceAddress,
+ _copy_count: u32,
+ _stride: u32,
+ _dst_image: Image,
+ _dst_image_layout: ImageLayout,
+ _p_image_subresources: *const ImageSubresourceLayers,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_memory_to_image_indirect_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdCopyMemoryToImageIndirectNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_memory_to_image_indirect_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_copy_memory_indirect'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV: Self = Self(1_000_426_000);
+ pub const PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV: Self = Self(1_000_426_001);
+}
+impl NvMemoryDecompressionFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_memory_decompression\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDecompressMemoryNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ decompress_region_count: u32,
+ p_decompress_memory_regions: *const DecompressMemoryRegionNV,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDecompressMemoryIndirectCountNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ indirect_commands_address: DeviceAddress,
+ indirect_commands_count_address: DeviceAddress,
+ stride: u32,
+);
+#[derive(Clone)]
+pub struct NvMemoryDecompressionFn {
+ pub cmd_decompress_memory_nv: PFN_vkCmdDecompressMemoryNV,
+ pub cmd_decompress_memory_indirect_count_nv: PFN_vkCmdDecompressMemoryIndirectCountNV,
+}
+unsafe impl Send for NvMemoryDecompressionFn {}
+unsafe impl Sync for NvMemoryDecompressionFn {}
+impl NvMemoryDecompressionFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_decompress_memory_nv: unsafe {
+ unsafe extern "system" fn cmd_decompress_memory_nv(
+ _command_buffer: CommandBuffer,
+ _decompress_region_count: u32,
+ _p_decompress_memory_regions: *const DecompressMemoryRegionNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_decompress_memory_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDecompressMemoryNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_decompress_memory_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_decompress_memory_indirect_count_nv: unsafe {
+ unsafe extern "system" fn cmd_decompress_memory_indirect_count_nv(
+ _command_buffer: CommandBuffer,
+ _indirect_commands_address: DeviceAddress,
+ _indirect_commands_count_address: DeviceAddress,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_decompress_memory_indirect_count_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDecompressMemoryIndirectCountNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_decompress_memory_indirect_count_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_memory_decompression'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV: Self = Self(1_000_427_000);
+ pub const PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV: Self = Self(1_000_427_001);
+}
+impl NvExtension429Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_429\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension429Fn {}
+unsafe impl Send for NvExtension429Fn {}
+unsafe impl Sync for NvExtension429Fn {}
+impl NvExtension429Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension430Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_430\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension430Fn {}
+unsafe impl Send for NvExtension430Fn {}
+unsafe impl Sync for NvExtension430Fn {}
+impl NvExtension430Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvLinearColorAttachmentFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_linear_color_attachment\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvLinearColorAttachmentFn {}
+unsafe impl Send for NvLinearColorAttachmentFn {}
+unsafe impl Sync for NvLinearColorAttachmentFn {}
+impl NvLinearColorAttachmentFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_linear_color_attachment'"]
+impl FormatFeatureFlags2 {
+ #[doc = "Format support linear image as render target, it cannot be mixed with non linear attachment"]
+ pub const LINEAR_COLOR_ATTACHMENT_NV: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_linear_color_attachment'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV: Self = Self(1_000_430_000);
+}
+impl NvExtension432Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_432\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension432Fn {}
+unsafe impl Send for NvExtension432Fn {}
+unsafe impl Sync for NvExtension432Fn {}
+impl NvExtension432Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension433Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_433\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension433Fn {}
+unsafe impl Send for NvExtension433Fn {}
+unsafe impl Sync for NvExtension433Fn {}
+impl NvExtension433Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleSurfacelessQueryFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_surfaceless_query\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct GoogleSurfacelessQueryFn {}
+unsafe impl Send for GoogleSurfacelessQueryFn {}
+unsafe impl Sync for GoogleSurfacelessQueryFn {}
+impl GoogleSurfacelessQueryFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl KhrExtension435Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_KHR_extension_435\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct KhrExtension435Fn {}
+unsafe impl Send for KhrExtension435Fn {}
+unsafe impl Sync for KhrExtension435Fn {}
+impl KhrExtension435Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension436Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_436\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension436Fn {}
+unsafe impl Send for NvExtension436Fn {}
+unsafe impl Sync for NvExtension436Fn {}
+impl NvExtension436Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension437Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_437\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension437Fn {}
+unsafe impl Send for ExtExtension437Fn {}
+unsafe impl Sync for ExtExtension437Fn {}
+impl ExtExtension437Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtImageCompressionControlSwapchainFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_image_compression_control_swapchain\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtImageCompressionControlSwapchainFn {}
+unsafe impl Send for ExtImageCompressionControlSwapchainFn {}
+unsafe impl Sync for ExtImageCompressionControlSwapchainFn {}
+impl ExtImageCompressionControlSwapchainFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_image_compression_control_swapchain'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT: Self =
+ Self(1_000_437_000);
+}
+impl SecExtension439Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_SEC_extension_439\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct SecExtension439Fn {}
+unsafe impl Send for SecExtension439Fn {}
+unsafe impl Sync for SecExtension439Fn {}
+impl SecExtension439Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomExtension440Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_extension_440\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct QcomExtension440Fn {}
+unsafe impl Send for QcomExtension440Fn {}
+unsafe impl Sync for QcomExtension440Fn {}
+impl QcomExtension440Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_extension_440'"]
+impl DeviceQueueCreateFlags {
+ pub const RESERVED_1_QCOM: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_QCOM_extension_440'"]
+impl QueueFlags {
+ pub const RESERVED_7_QCOM: Self = Self(0b1000_0000);
+}
+impl QcomImageProcessingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_image_processing\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct QcomImageProcessingFn {}
+unsafe impl Send for QcomImageProcessingFn {}
+unsafe impl Sync for QcomImageProcessingFn {}
+impl QcomImageProcessingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_image_processing'"]
+impl DescriptorType {
+ pub const SAMPLE_WEIGHT_IMAGE_QCOM: Self = Self(1_000_440_000);
+ pub const BLOCK_MATCH_IMAGE_QCOM: Self = Self(1_000_440_001);
+}
+#[doc = "Generated from 'VK_QCOM_image_processing'"]
+impl FormatFeatureFlags2 {
+ pub const WEIGHT_IMAGE_QCOM: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const WEIGHT_SAMPLED_IMAGE_QCOM: Self =
+ Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const BLOCK_MATCHING_QCOM: Self = Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const BOX_FILTER_SAMPLED_QCOM: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_QCOM_image_processing'"]
+impl ImageUsageFlags {
+ pub const SAMPLE_WEIGHT_QCOM: Self = Self(0b1_0000_0000_0000_0000_0000);
+ pub const SAMPLE_BLOCK_MATCH_QCOM: Self = Self(0b10_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_QCOM_image_processing'"]
+impl SamplerCreateFlags {
+ pub const IMAGE_PROCESSING_QCOM: Self = Self(0b1_0000);
+}
+#[doc = "Generated from 'VK_QCOM_image_processing'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM: Self = Self(1_000_440_000);
+ pub const PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM: Self = Self(1_000_440_001);
+ pub const IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM: Self = Self(1_000_440_002);
+}
+impl CoreaviExtension442Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_COREAVI_extension_442\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct CoreaviExtension442Fn {}
+unsafe impl Send for CoreaviExtension442Fn {}
+unsafe impl Sync for CoreaviExtension442Fn {}
+impl CoreaviExtension442Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl CoreaviExtension443Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_COREAVI_extension_443\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct CoreaviExtension443Fn {}
+unsafe impl Send for CoreaviExtension443Fn {}
+unsafe impl Sync for CoreaviExtension443Fn {}
+impl CoreaviExtension443Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl CoreaviExtension444Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_COREAVI_extension_444\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct CoreaviExtension444Fn {}
+unsafe impl Send for CoreaviExtension444Fn {}
+unsafe impl Sync for CoreaviExtension444Fn {}
+impl CoreaviExtension444Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_COREAVI_extension_444'"]
+impl CommandPoolResetFlags {
+ pub const RESERVED_1_COREAVI: Self = Self(0b10);
+}
+impl CoreaviExtension445Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_COREAVI_extension_445\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct CoreaviExtension445Fn {}
+unsafe impl Send for CoreaviExtension445Fn {}
+unsafe impl Sync for CoreaviExtension445Fn {}
+impl CoreaviExtension445Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl CoreaviExtension446Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_COREAVI_extension_446\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct CoreaviExtension446Fn {}
+unsafe impl Send for CoreaviExtension446Fn {}
+unsafe impl Sync for CoreaviExtension446Fn {}
+impl CoreaviExtension446Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl CoreaviExtension447Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_COREAVI_extension_447\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct CoreaviExtension447Fn {}
+unsafe impl Send for CoreaviExtension447Fn {}
+unsafe impl Sync for CoreaviExtension447Fn {}
+impl CoreaviExtension447Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl SecExtension448Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_SEC_extension_448\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct SecExtension448Fn {}
+unsafe impl Send for SecExtension448Fn {}
+unsafe impl Sync for SecExtension448Fn {}
+impl SecExtension448Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl SecExtension449Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_SEC_extension_449\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct SecExtension449Fn {}
+unsafe impl Send for SecExtension449Fn {}
+unsafe impl Sync for SecExtension449Fn {}
+impl SecExtension449Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl SecExtension450Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_SEC_extension_450\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct SecExtension450Fn {}
+unsafe impl Send for SecExtension450Fn {}
+unsafe impl Sync for SecExtension450Fn {}
+impl SecExtension450Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl SecExtension451Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_SEC_extension_451\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct SecExtension451Fn {}
+unsafe impl Send for SecExtension451Fn {}
+unsafe impl Sync for SecExtension451Fn {}
+impl SecExtension451Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension452Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_452\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension452Fn {}
+unsafe impl Send for NvExtension452Fn {}
+unsafe impl Sync for NvExtension452Fn {}
+impl NvExtension452Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ArmExtension453Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_extension_453\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ArmExtension453Fn {}
+unsafe impl Send for ArmExtension453Fn {}
+unsafe impl Sync for ArmExtension453Fn {}
+impl ArmExtension453Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleExtension454Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_454\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension454Fn {}
+unsafe impl Send for GoogleExtension454Fn {}
+unsafe impl Sync for GoogleExtension454Fn {}
+impl GoogleExtension454Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl GoogleExtension455Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_GOOGLE_extension_455\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct GoogleExtension455Fn {}
+unsafe impl Send for GoogleExtension455Fn {}
+unsafe impl Sync for GoogleExtension455Fn {}
+impl GoogleExtension455Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtendedDynamicState3Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extended_dynamic_state3\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetTessellationDomainOriginEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ domain_origin: TessellationDomainOrigin,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthClampEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_clamp_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetPolygonModeEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, polygon_mode: PolygonMode);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetRasterizationSamplesEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ rasterization_samples: SampleCountFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetSampleMaskEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ samples: SampleCountFlags,
+ p_sample_mask: *const SampleMask,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetAlphaToCoverageEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, alpha_to_coverage_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetAlphaToOneEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, alpha_to_one_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetLogicOpEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, logic_op_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetColorBlendEnableEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_attachment: u32,
+ attachment_count: u32,
+ p_color_blend_enables: *const Bool32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetColorBlendEquationEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_attachment: u32,
+ attachment_count: u32,
+ p_color_blend_equations: *const ColorBlendEquationEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetColorWriteMaskEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_attachment: u32,
+ attachment_count: u32,
+ p_color_write_masks: *const ColorComponentFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetRasterizationStreamEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, rasterization_stream: u32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetConservativeRasterizationModeEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ conservative_rasterization_mode: ConservativeRasterizationModeEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ extra_primitive_overestimation_size: f32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthClipEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, depth_clip_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetSampleLocationsEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, sample_locations_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetColorBlendAdvancedEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_attachment: u32,
+ attachment_count: u32,
+ p_color_blend_advanced: *const ColorBlendAdvancedEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetProvokingVertexModeEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ provoking_vertex_mode: ProvokingVertexModeEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetLineRasterizationModeEXT = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ line_rasterization_mode: LineRasterizationModeEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetLineStippleEnableEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, stippled_line_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthClipNegativeOneToOneEXT =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, negative_one_to_one: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetViewportWScalingEnableNV =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, viewport_w_scaling_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetViewportSwizzleNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_viewport: u32,
+ viewport_count: u32,
+ p_viewport_swizzles: *const ViewportSwizzleNV,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoverageToColorEnableNV =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, coverage_to_color_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoverageToColorLocationNV =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, coverage_to_color_location: u32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoverageModulationModeNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ coverage_modulation_mode: CoverageModulationModeNV,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoverageModulationTableEnableNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ coverage_modulation_table_enable: Bool32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoverageModulationTableNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ coverage_modulation_table_count: u32,
+ p_coverage_modulation_table: *const f32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetShadingRateImageEnableNV =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, shading_rate_image_enable: Bool32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetRepresentativeFragmentTestEnableNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ representative_fragment_test_enable: Bool32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetCoverageReductionModeNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ coverage_reduction_mode: CoverageReductionModeNV,
+);
+#[derive(Clone)]
+pub struct ExtExtendedDynamicState3Fn {
+ pub cmd_set_tessellation_domain_origin_ext: PFN_vkCmdSetTessellationDomainOriginEXT,
+ pub cmd_set_depth_clamp_enable_ext: PFN_vkCmdSetDepthClampEnableEXT,
+ pub cmd_set_polygon_mode_ext: PFN_vkCmdSetPolygonModeEXT,
+ pub cmd_set_rasterization_samples_ext: PFN_vkCmdSetRasterizationSamplesEXT,
+ pub cmd_set_sample_mask_ext: PFN_vkCmdSetSampleMaskEXT,
+ pub cmd_set_alpha_to_coverage_enable_ext: PFN_vkCmdSetAlphaToCoverageEnableEXT,
+ pub cmd_set_alpha_to_one_enable_ext: PFN_vkCmdSetAlphaToOneEnableEXT,
+ pub cmd_set_logic_op_enable_ext: PFN_vkCmdSetLogicOpEnableEXT,
+ pub cmd_set_color_blend_enable_ext: PFN_vkCmdSetColorBlendEnableEXT,
+ pub cmd_set_color_blend_equation_ext: PFN_vkCmdSetColorBlendEquationEXT,
+ pub cmd_set_color_write_mask_ext: PFN_vkCmdSetColorWriteMaskEXT,
+ pub cmd_set_rasterization_stream_ext: PFN_vkCmdSetRasterizationStreamEXT,
+ pub cmd_set_conservative_rasterization_mode_ext: PFN_vkCmdSetConservativeRasterizationModeEXT,
+ pub cmd_set_extra_primitive_overestimation_size_ext:
+ PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT,
+ pub cmd_set_depth_clip_enable_ext: PFN_vkCmdSetDepthClipEnableEXT,
+ pub cmd_set_sample_locations_enable_ext: PFN_vkCmdSetSampleLocationsEnableEXT,
+ pub cmd_set_color_blend_advanced_ext: PFN_vkCmdSetColorBlendAdvancedEXT,
+ pub cmd_set_provoking_vertex_mode_ext: PFN_vkCmdSetProvokingVertexModeEXT,
+ pub cmd_set_line_rasterization_mode_ext: PFN_vkCmdSetLineRasterizationModeEXT,
+ pub cmd_set_line_stipple_enable_ext: PFN_vkCmdSetLineStippleEnableEXT,
+ pub cmd_set_depth_clip_negative_one_to_one_ext: PFN_vkCmdSetDepthClipNegativeOneToOneEXT,
+ pub cmd_set_viewport_w_scaling_enable_nv: PFN_vkCmdSetViewportWScalingEnableNV,
+ pub cmd_set_viewport_swizzle_nv: PFN_vkCmdSetViewportSwizzleNV,
+ pub cmd_set_coverage_to_color_enable_nv: PFN_vkCmdSetCoverageToColorEnableNV,
+ pub cmd_set_coverage_to_color_location_nv: PFN_vkCmdSetCoverageToColorLocationNV,
+ pub cmd_set_coverage_modulation_mode_nv: PFN_vkCmdSetCoverageModulationModeNV,
+ pub cmd_set_coverage_modulation_table_enable_nv: PFN_vkCmdSetCoverageModulationTableEnableNV,
+ pub cmd_set_coverage_modulation_table_nv: PFN_vkCmdSetCoverageModulationTableNV,
+ pub cmd_set_shading_rate_image_enable_nv: PFN_vkCmdSetShadingRateImageEnableNV,
+ pub cmd_set_representative_fragment_test_enable_nv:
+ PFN_vkCmdSetRepresentativeFragmentTestEnableNV,
+ pub cmd_set_coverage_reduction_mode_nv: PFN_vkCmdSetCoverageReductionModeNV,
+}
+unsafe impl Send for ExtExtendedDynamicState3Fn {}
+unsafe impl Sync for ExtExtendedDynamicState3Fn {}
+impl ExtExtendedDynamicState3Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_set_tessellation_domain_origin_ext: unsafe {
+ unsafe extern "system" fn cmd_set_tessellation_domain_origin_ext(
+ _command_buffer: CommandBuffer,
+ _domain_origin: TessellationDomainOrigin,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_tessellation_domain_origin_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetTessellationDomainOriginEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_tessellation_domain_origin_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_clamp_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_clamp_enable_ext(
+ _command_buffer: CommandBuffer,
+ _depth_clamp_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_clamp_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthClampEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_clamp_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_polygon_mode_ext: unsafe {
+ unsafe extern "system" fn cmd_set_polygon_mode_ext(
+ _command_buffer: CommandBuffer,
+ _polygon_mode: PolygonMode,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_polygon_mode_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPolygonModeEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_polygon_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_rasterization_samples_ext: unsafe {
+ unsafe extern "system" fn cmd_set_rasterization_samples_ext(
+ _command_buffer: CommandBuffer,
+ _rasterization_samples: SampleCountFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_rasterization_samples_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetRasterizationSamplesEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_rasterization_samples_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_sample_mask_ext: unsafe {
+ unsafe extern "system" fn cmd_set_sample_mask_ext(
+ _command_buffer: CommandBuffer,
+ _samples: SampleCountFlags,
+ _p_sample_mask: *const SampleMask,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_sample_mask_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetSampleMaskEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_sample_mask_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_alpha_to_coverage_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_alpha_to_coverage_enable_ext(
+ _command_buffer: CommandBuffer,
+ _alpha_to_coverage_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_alpha_to_coverage_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetAlphaToCoverageEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_alpha_to_coverage_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_alpha_to_one_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_alpha_to_one_enable_ext(
+ _command_buffer: CommandBuffer,
+ _alpha_to_one_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_alpha_to_one_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetAlphaToOneEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_alpha_to_one_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_logic_op_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_logic_op_enable_ext(
+ _command_buffer: CommandBuffer,
+ _logic_op_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_logic_op_enable_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLogicOpEnableEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_logic_op_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_color_blend_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_color_blend_enable_ext(
+ _command_buffer: CommandBuffer,
+ _first_attachment: u32,
+ _attachment_count: u32,
+ _p_color_blend_enables: *const Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_color_blend_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetColorBlendEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_color_blend_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_color_blend_equation_ext: unsafe {
+ unsafe extern "system" fn cmd_set_color_blend_equation_ext(
+ _command_buffer: CommandBuffer,
+ _first_attachment: u32,
+ _attachment_count: u32,
+ _p_color_blend_equations: *const ColorBlendEquationEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_color_blend_equation_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetColorBlendEquationEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_color_blend_equation_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_color_write_mask_ext: unsafe {
+ unsafe extern "system" fn cmd_set_color_write_mask_ext(
+ _command_buffer: CommandBuffer,
+ _first_attachment: u32,
+ _attachment_count: u32,
+ _p_color_write_masks: *const ColorComponentFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_color_write_mask_ext)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetColorWriteMaskEXT\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_color_write_mask_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_rasterization_stream_ext: unsafe {
+ unsafe extern "system" fn cmd_set_rasterization_stream_ext(
+ _command_buffer: CommandBuffer,
+ _rasterization_stream: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_rasterization_stream_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetRasterizationStreamEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_rasterization_stream_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_conservative_rasterization_mode_ext: unsafe {
+ unsafe extern "system" fn cmd_set_conservative_rasterization_mode_ext(
+ _command_buffer: CommandBuffer,
+ _conservative_rasterization_mode: ConservativeRasterizationModeEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_conservative_rasterization_mode_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetConservativeRasterizationModeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_conservative_rasterization_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_extra_primitive_overestimation_size_ext: unsafe {
+ unsafe extern "system" fn cmd_set_extra_primitive_overestimation_size_ext(
+ _command_buffer: CommandBuffer,
+ _extra_primitive_overestimation_size: f32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_extra_primitive_overestimation_size_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetExtraPrimitiveOverestimationSizeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_extra_primitive_overestimation_size_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_clip_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_clip_enable_ext(
+ _command_buffer: CommandBuffer,
+ _depth_clip_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_clip_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthClipEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_clip_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_sample_locations_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_sample_locations_enable_ext(
+ _command_buffer: CommandBuffer,
+ _sample_locations_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_sample_locations_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetSampleLocationsEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_sample_locations_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_color_blend_advanced_ext: unsafe {
+ unsafe extern "system" fn cmd_set_color_blend_advanced_ext(
+ _command_buffer: CommandBuffer,
+ _first_attachment: u32,
+ _attachment_count: u32,
+ _p_color_blend_advanced: *const ColorBlendAdvancedEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_color_blend_advanced_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetColorBlendAdvancedEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_color_blend_advanced_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_provoking_vertex_mode_ext: unsafe {
+ unsafe extern "system" fn cmd_set_provoking_vertex_mode_ext(
+ _command_buffer: CommandBuffer,
+ _provoking_vertex_mode: ProvokingVertexModeEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_provoking_vertex_mode_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetProvokingVertexModeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_provoking_vertex_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_line_rasterization_mode_ext: unsafe {
+ unsafe extern "system" fn cmd_set_line_rasterization_mode_ext(
+ _command_buffer: CommandBuffer,
+ _line_rasterization_mode: LineRasterizationModeEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_line_rasterization_mode_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetLineRasterizationModeEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_line_rasterization_mode_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_line_stipple_enable_ext: unsafe {
+ unsafe extern "system" fn cmd_set_line_stipple_enable_ext(
+ _command_buffer: CommandBuffer,
+ _stippled_line_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_line_stipple_enable_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetLineStippleEnableEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_line_stipple_enable_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_clip_negative_one_to_one_ext: unsafe {
+ unsafe extern "system" fn cmd_set_depth_clip_negative_one_to_one_ext(
+ _command_buffer: CommandBuffer,
+ _negative_one_to_one: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_clip_negative_one_to_one_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthClipNegativeOneToOneEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_clip_negative_one_to_one_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_viewport_w_scaling_enable_nv: unsafe {
+ unsafe extern "system" fn cmd_set_viewport_w_scaling_enable_nv(
+ _command_buffer: CommandBuffer,
+ _viewport_w_scaling_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_viewport_w_scaling_enable_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetViewportWScalingEnableNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport_w_scaling_enable_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_viewport_swizzle_nv: unsafe {
+ unsafe extern "system" fn cmd_set_viewport_swizzle_nv(
+ _command_buffer: CommandBuffer,
+ _first_viewport: u32,
+ _viewport_count: u32,
+ _p_viewport_swizzles: *const ViewportSwizzleNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_viewport_swizzle_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportSwizzleNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport_swizzle_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coverage_to_color_enable_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coverage_to_color_enable_nv(
+ _command_buffer: CommandBuffer,
+ _coverage_to_color_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coverage_to_color_enable_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoverageToColorEnableNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coverage_to_color_enable_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coverage_to_color_location_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coverage_to_color_location_nv(
+ _command_buffer: CommandBuffer,
+ _coverage_to_color_location: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coverage_to_color_location_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoverageToColorLocationNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coverage_to_color_location_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coverage_modulation_mode_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coverage_modulation_mode_nv(
+ _command_buffer: CommandBuffer,
+ _coverage_modulation_mode: CoverageModulationModeNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coverage_modulation_mode_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoverageModulationModeNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coverage_modulation_mode_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coverage_modulation_table_enable_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coverage_modulation_table_enable_nv(
+ _command_buffer: CommandBuffer,
+ _coverage_modulation_table_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coverage_modulation_table_enable_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoverageModulationTableEnableNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coverage_modulation_table_enable_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coverage_modulation_table_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coverage_modulation_table_nv(
+ _command_buffer: CommandBuffer,
+ _coverage_modulation_table_count: u32,
+ _p_coverage_modulation_table: *const f32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coverage_modulation_table_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoverageModulationTableNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coverage_modulation_table_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_shading_rate_image_enable_nv: unsafe {
+ unsafe extern "system" fn cmd_set_shading_rate_image_enable_nv(
+ _command_buffer: CommandBuffer,
+ _shading_rate_image_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_shading_rate_image_enable_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetShadingRateImageEnableNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_shading_rate_image_enable_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_representative_fragment_test_enable_nv: unsafe {
+ unsafe extern "system" fn cmd_set_representative_fragment_test_enable_nv(
+ _command_buffer: CommandBuffer,
+ _representative_fragment_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_representative_fragment_test_enable_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetRepresentativeFragmentTestEnableNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_representative_fragment_test_enable_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_coverage_reduction_mode_nv: unsafe {
+ unsafe extern "system" fn cmd_set_coverage_reduction_mode_nv(
+ _command_buffer: CommandBuffer,
+ _coverage_reduction_mode: CoverageReductionModeNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_coverage_reduction_mode_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetCoverageReductionModeNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_coverage_reduction_mode_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_extended_dynamic_state3'"]
+impl DynamicState {
+ pub const TESSELLATION_DOMAIN_ORIGIN_EXT: Self = Self(1_000_455_002);
+ pub const DEPTH_CLAMP_ENABLE_EXT: Self = Self(1_000_455_003);
+ pub const POLYGON_MODE_EXT: Self = Self(1_000_455_004);
+ pub const RASTERIZATION_SAMPLES_EXT: Self = Self(1_000_455_005);
+ pub const SAMPLE_MASK_EXT: Self = Self(1_000_455_006);
+ pub const ALPHA_TO_COVERAGE_ENABLE_EXT: Self = Self(1_000_455_007);
+ pub const ALPHA_TO_ONE_ENABLE_EXT: Self = Self(1_000_455_008);
+ pub const LOGIC_OP_ENABLE_EXT: Self = Self(1_000_455_009);
+ pub const COLOR_BLEND_ENABLE_EXT: Self = Self(1_000_455_010);
+ pub const COLOR_BLEND_EQUATION_EXT: Self = Self(1_000_455_011);
+ pub const COLOR_WRITE_MASK_EXT: Self = Self(1_000_455_012);
+ pub const RASTERIZATION_STREAM_EXT: Self = Self(1_000_455_013);
+ pub const CONSERVATIVE_RASTERIZATION_MODE_EXT: Self = Self(1_000_455_014);
+ pub const EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT: Self = Self(1_000_455_015);
+ pub const DEPTH_CLIP_ENABLE_EXT: Self = Self(1_000_455_016);
+ pub const SAMPLE_LOCATIONS_ENABLE_EXT: Self = Self(1_000_455_017);
+ pub const COLOR_BLEND_ADVANCED_EXT: Self = Self(1_000_455_018);
+ pub const PROVOKING_VERTEX_MODE_EXT: Self = Self(1_000_455_019);
+ pub const LINE_RASTERIZATION_MODE_EXT: Self = Self(1_000_455_020);
+ pub const LINE_STIPPLE_ENABLE_EXT: Self = Self(1_000_455_021);
+ pub const DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT: Self = Self(1_000_455_022);
+ pub const VIEWPORT_W_SCALING_ENABLE_NV: Self = Self(1_000_455_023);
+ pub const VIEWPORT_SWIZZLE_NV: Self = Self(1_000_455_024);
+ pub const COVERAGE_TO_COLOR_ENABLE_NV: Self = Self(1_000_455_025);
+ pub const COVERAGE_TO_COLOR_LOCATION_NV: Self = Self(1_000_455_026);
+ pub const COVERAGE_MODULATION_MODE_NV: Self = Self(1_000_455_027);
+ pub const COVERAGE_MODULATION_TABLE_ENABLE_NV: Self = Self(1_000_455_028);
+ pub const COVERAGE_MODULATION_TABLE_NV: Self = Self(1_000_455_029);
+ pub const SHADING_RATE_IMAGE_ENABLE_NV: Self = Self(1_000_455_030);
+ pub const REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV: Self = Self(1_000_455_031);
+ pub const COVERAGE_REDUCTION_MODE_NV: Self = Self(1_000_455_032);
+}
+#[doc = "Generated from 'VK_EXT_extended_dynamic_state3'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT: Self = Self(1_000_455_000);
+ pub const PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT: Self = Self(1_000_455_001);
+}
+impl ExtExtension457Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_457\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension457Fn {}
+unsafe impl Send for ExtExtension457Fn {}
+unsafe impl Sync for ExtExtension457Fn {}
+impl ExtExtension457Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension458Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_458\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension458Fn {}
+unsafe impl Send for ExtExtension458Fn {}
+unsafe impl Sync for ExtExtension458Fn {}
+impl ExtExtension458Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtSubpassMergeFeedbackFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_subpass_merge_feedback\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ExtSubpassMergeFeedbackFn {}
+unsafe impl Send for ExtSubpassMergeFeedbackFn {}
+unsafe impl Sync for ExtSubpassMergeFeedbackFn {}
+impl ExtSubpassMergeFeedbackFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_subpass_merge_feedback'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT: Self = Self(1_000_458_000);
+ pub const RENDER_PASS_CREATION_CONTROL_EXT: Self = Self(1_000_458_001);
+ pub const RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT: Self = Self(1_000_458_002);
+ pub const RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT: Self = Self(1_000_458_003);
+}
+impl LunargDirectDriverLoadingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_LUNARG_direct_driver_loading\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct LunargDirectDriverLoadingFn {}
+unsafe impl Send for LunargDirectDriverLoadingFn {}
+unsafe impl Sync for LunargDirectDriverLoadingFn {}
+impl LunargDirectDriverLoadingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_LUNARG_direct_driver_loading'"]
+impl StructureType {
+ pub const DIRECT_DRIVER_LOADING_INFO_LUNARG: Self = Self(1_000_459_000);
+ pub const DIRECT_DRIVER_LOADING_LIST_LUNARG: Self = Self(1_000_459_001);
+}
+impl ExtExtension461Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_461\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension461Fn {}
+unsafe impl Send for ExtExtension461Fn {}
+unsafe impl Sync for ExtExtension461Fn {}
+impl ExtExtension461Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_extension_461'"]
+impl FormatFeatureFlags2 {
+ pub const RESERVED_39_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+impl ExtExtension462Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_462\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension462Fn {}
+unsafe impl Send for ExtExtension462Fn {}
+unsafe impl Sync for ExtExtension462Fn {}
+impl ExtExtension462Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtShaderModuleIdentifierFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_shader_module_identifier\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetShaderModuleIdentifierEXT = unsafe extern "system" fn(
+ device: Device,
+ shader_module: ShaderModule,
+ p_identifier: *mut ShaderModuleIdentifierEXT,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetShaderModuleCreateInfoIdentifierEXT = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const ShaderModuleCreateInfo,
+ p_identifier: *mut ShaderModuleIdentifierEXT,
+);
+#[derive(Clone)]
+pub struct ExtShaderModuleIdentifierFn {
+ pub get_shader_module_identifier_ext: PFN_vkGetShaderModuleIdentifierEXT,
+ pub get_shader_module_create_info_identifier_ext: PFN_vkGetShaderModuleCreateInfoIdentifierEXT,
+}
+unsafe impl Send for ExtShaderModuleIdentifierFn {}
+unsafe impl Sync for ExtShaderModuleIdentifierFn {}
+impl ExtShaderModuleIdentifierFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_shader_module_identifier_ext: unsafe {
+ unsafe extern "system" fn get_shader_module_identifier_ext(
+ _device: Device,
+ _shader_module: ShaderModule,
+ _p_identifier: *mut ShaderModuleIdentifierEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_shader_module_identifier_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetShaderModuleIdentifierEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_shader_module_identifier_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_shader_module_create_info_identifier_ext: unsafe {
+ unsafe extern "system" fn get_shader_module_create_info_identifier_ext(
+ _device: Device,
+ _p_create_info: *const ShaderModuleCreateInfo,
+ _p_identifier: *mut ShaderModuleIdentifierEXT,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_shader_module_create_info_identifier_ext)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetShaderModuleCreateInfoIdentifierEXT\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_shader_module_create_info_identifier_ext
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_EXT_shader_module_identifier'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT: Self = Self(1_000_462_000);
+ pub const PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT: Self = Self(1_000_462_001);
+ pub const PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT: Self = Self(1_000_462_002);
+ pub const SHADER_MODULE_IDENTIFIER_EXT: Self = Self(1_000_462_003);
+}
+impl ExtRasterizationOrderAttachmentAccessFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_EXT_rasterization_order_attachment_access\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtRasterizationOrderAttachmentAccessFn {}
+unsafe impl Send for ExtRasterizationOrderAttachmentAccessFn {}
+unsafe impl Sync for ExtRasterizationOrderAttachmentAccessFn {}
+impl ExtRasterizationOrderAttachmentAccessFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"]
+impl PipelineColorBlendStateCreateFlags {
+ pub const RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXT: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"]
+impl PipelineDepthStencilStateCreateFlags {
+ pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT: Self = Self(0b1);
+ pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT: Self =
+ Self(1_000_342_000);
+}
+#[doc = "Generated from 'VK_EXT_rasterization_order_attachment_access'"]
+impl SubpassDescriptionFlags {
+ pub const RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_EXT: Self = Self(0b1_0000);
+ pub const RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_EXT: Self = Self(0b10_0000);
+ pub const RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_EXT: Self = Self(0b100_0000);
+}
+impl NvOpticalFlowFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_optical_flow\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_optical_flow_image_format_info: *const OpticalFlowImageFormatInfoNV,
+ p_format_count: *mut u32,
+ p_image_format_properties: *mut OpticalFlowImageFormatPropertiesNV,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateOpticalFlowSessionNV = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const OpticalFlowSessionCreateInfoNV,
+ p_allocator: *const AllocationCallbacks,
+ p_session: *mut OpticalFlowSessionNV,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyOpticalFlowSessionNV = unsafe extern "system" fn(
+ device: Device,
+ session: OpticalFlowSessionNV,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindOpticalFlowSessionImageNV = unsafe extern "system" fn(
+ device: Device,
+ session: OpticalFlowSessionNV,
+ binding_point: OpticalFlowSessionBindingPointNV,
+ view: ImageView,
+ layout: ImageLayout,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdOpticalFlowExecuteNV = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ session: OpticalFlowSessionNV,
+ p_execute_info: *const OpticalFlowExecuteInfoNV,
+);
+#[derive(Clone)]
+pub struct NvOpticalFlowFn {
+ pub get_physical_device_optical_flow_image_formats_nv:
+ PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV,
+ pub create_optical_flow_session_nv: PFN_vkCreateOpticalFlowSessionNV,
+ pub destroy_optical_flow_session_nv: PFN_vkDestroyOpticalFlowSessionNV,
+ pub bind_optical_flow_session_image_nv: PFN_vkBindOpticalFlowSessionImageNV,
+ pub cmd_optical_flow_execute_nv: PFN_vkCmdOpticalFlowExecuteNV,
+}
+unsafe impl Send for NvOpticalFlowFn {}
+unsafe impl Sync for NvOpticalFlowFn {}
+impl NvOpticalFlowFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_optical_flow_image_formats_nv: unsafe {
+ unsafe extern "system" fn get_physical_device_optical_flow_image_formats_nv(
+ _physical_device: PhysicalDevice,
+ _p_optical_flow_image_format_info: *const OpticalFlowImageFormatInfoNV,
+ _p_format_count: *mut u32,
+ _p_image_format_properties: *mut OpticalFlowImageFormatPropertiesNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_optical_flow_image_formats_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceOpticalFlowImageFormatsNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_optical_flow_image_formats_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_optical_flow_session_nv: unsafe {
+ unsafe extern "system" fn create_optical_flow_session_nv(
+ _device: Device,
+ _p_create_info: *const OpticalFlowSessionCreateInfoNV,
+ _p_allocator: *const AllocationCallbacks,
+ _p_session: *mut OpticalFlowSessionNV,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_optical_flow_session_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateOpticalFlowSessionNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_optical_flow_session_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_optical_flow_session_nv: unsafe {
+ unsafe extern "system" fn destroy_optical_flow_session_nv(
+ _device: Device,
+ _session: OpticalFlowSessionNV,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_optical_flow_session_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyOpticalFlowSessionNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_optical_flow_session_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_optical_flow_session_image_nv: unsafe {
+ unsafe extern "system" fn bind_optical_flow_session_image_nv(
+ _device: Device,
+ _session: OpticalFlowSessionNV,
+ _binding_point: OpticalFlowSessionBindingPointNV,
+ _view: ImageView,
+ _layout: ImageLayout,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(bind_optical_flow_session_image_nv)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkBindOpticalFlowSessionImageNV\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ bind_optical_flow_session_image_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_optical_flow_execute_nv: unsafe {
+ unsafe extern "system" fn cmd_optical_flow_execute_nv(
+ _command_buffer: CommandBuffer,
+ _session: OpticalFlowSessionNV,
+ _p_execute_info: *const OpticalFlowExecuteInfoNV,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_optical_flow_execute_nv)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdOpticalFlowExecuteNV\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_optical_flow_execute_nv
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl AccessFlags2 {
+ pub const OPTICAL_FLOW_READ_NV: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const OPTICAL_FLOW_WRITE_NV: Self =
+ Self(0b1000_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl Format {
+ pub const R16G16_S10_5_NV: Self = Self(1_000_464_000);
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl FormatFeatureFlags2 {
+ pub const OPTICAL_FLOW_IMAGE_NV: Self =
+ Self(0b1_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const OPTICAL_FLOW_VECTOR_NV: Self =
+ Self(0b10_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+ pub const OPTICAL_FLOW_COST_NV: Self =
+ Self(0b100_0000_0000_0000_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl ObjectType {
+ pub const OPTICAL_FLOW_SESSION_NV: Self = Self(1_000_464_000);
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl PipelineStageFlags2 {
+ pub const OPTICAL_FLOW_NV: Self = Self(0b10_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl QueueFlags {
+ pub const OPTICAL_FLOW_NV: Self = Self(0b1_0000_0000);
+}
+#[doc = "Generated from 'VK_NV_optical_flow'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV: Self = Self(1_000_464_000);
+ pub const PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV: Self = Self(1_000_464_001);
+ pub const OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV: Self = Self(1_000_464_002);
+ pub const OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV: Self = Self(1_000_464_003);
+ pub const OPTICAL_FLOW_SESSION_CREATE_INFO_NV: Self = Self(1_000_464_004);
+ pub const OPTICAL_FLOW_EXECUTE_INFO_NV: Self = Self(1_000_464_005);
+ pub const OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV: Self = Self(1_000_464_010);
+}
+impl ExtLegacyDitheringFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_legacy_dithering\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtLegacyDitheringFn {}
+unsafe impl Send for ExtLegacyDitheringFn {}
+unsafe impl Sync for ExtLegacyDitheringFn {}
+impl ExtLegacyDitheringFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_legacy_dithering'"]
+impl RenderingFlags {
+ pub const ENABLE_LEGACY_DITHERING_EXT: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_EXT_legacy_dithering'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT: Self = Self(1_000_465_000);
+}
+#[doc = "Generated from 'VK_EXT_legacy_dithering'"]
+impl SubpassDescriptionFlags {
+ pub const ENABLE_LEGACY_DITHERING_EXT: Self = Self(0b1000_0000);
+}
+impl ExtPipelineProtectedAccessFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_pipeline_protected_access\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtPipelineProtectedAccessFn {}
+unsafe impl Send for ExtPipelineProtectedAccessFn {}
+unsafe impl Sync for ExtPipelineProtectedAccessFn {}
+impl ExtPipelineProtectedAccessFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_pipeline_protected_access'"]
+impl PipelineCreateFlags {
+ pub const NO_PROTECTED_ACCESS_EXT: Self = Self(0b1000_0000_0000_0000_0000_0000_0000);
+ pub const PROTECTED_ACCESS_ONLY_EXT: Self = Self(0b100_0000_0000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_EXT_pipeline_protected_access'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT: Self = Self(1_000_466_000);
+}
+impl ExtExtension468Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_468\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension468Fn {}
+unsafe impl Send for ExtExtension468Fn {}
+unsafe impl Sync for ExtExtension468Fn {}
+impl ExtExtension468Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AndroidExtension469Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ANDROID_extension_469\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AndroidExtension469Fn {}
+unsafe impl Send for AndroidExtension469Fn {}
+unsafe impl Sync for AndroidExtension469Fn {}
+impl AndroidExtension469Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension470Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_470\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension470Fn {}
+unsafe impl Send for AmdExtension470Fn {}
+unsafe impl Sync for AmdExtension470Fn {}
+impl AmdExtension470Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension471Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_471\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension471Fn {}
+unsafe impl Send for AmdExtension471Fn {}
+unsafe impl Sync for AmdExtension471Fn {}
+impl AmdExtension471Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension472Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_472\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension472Fn {}
+unsafe impl Send for AmdExtension472Fn {}
+unsafe impl Sync for AmdExtension472Fn {}
+impl AmdExtension472Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension473Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_473\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension473Fn {}
+unsafe impl Send for AmdExtension473Fn {}
+unsafe impl Sync for AmdExtension473Fn {}
+impl AmdExtension473Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension474Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_474\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension474Fn {}
+unsafe impl Send for AmdExtension474Fn {}
+unsafe impl Sync for AmdExtension474Fn {}
+impl AmdExtension474Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension475Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_475\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension475Fn {}
+unsafe impl Send for AmdExtension475Fn {}
+unsafe impl Sync for AmdExtension475Fn {}
+impl AmdExtension475Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension476Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_476\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension476Fn {}
+unsafe impl Send for AmdExtension476Fn {}
+unsafe impl Sync for AmdExtension476Fn {}
+impl AmdExtension476Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension477Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_477\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension477Fn {}
+unsafe impl Send for AmdExtension477Fn {}
+unsafe impl Sync for AmdExtension477Fn {}
+impl AmdExtension477Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension478Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_478\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension478Fn {}
+unsafe impl Send for AmdExtension478Fn {}
+unsafe impl Sync for AmdExtension478Fn {}
+impl AmdExtension478Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl AmdExtension479Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_AMD_extension_479\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct AmdExtension479Fn {}
+unsafe impl Send for AmdExtension479Fn {}
+unsafe impl Sync for AmdExtension479Fn {}
+impl AmdExtension479Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension480Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_480\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension480Fn {}
+unsafe impl Send for ExtExtension480Fn {}
+unsafe impl Sync for ExtExtension480Fn {}
+impl ExtExtension480Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension481Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_481\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension481Fn {}
+unsafe impl Send for ExtExtension481Fn {}
+unsafe impl Sync for ExtExtension481Fn {}
+impl ExtExtension481Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension482Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_482\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension482Fn {}
+unsafe impl Send for ExtExtension482Fn {}
+unsafe impl Sync for ExtExtension482Fn {}
+impl ExtExtension482Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension483Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_483\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension483Fn {}
+unsafe impl Send for ExtExtension483Fn {}
+unsafe impl Sync for ExtExtension483Fn {}
+impl ExtExtension483Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_extension_483'"]
+impl ShaderStageFlags {
+ pub const EXT_483_RESERVE_15: Self = Self(0b1000_0000_0000_0000);
+ pub const EXT_483_RESERVE_16: Self = Self(0b1_0000_0000_0000_0000);
+ pub const EXT_483_RESERVE_17: Self = Self(0b10_0000_0000_0000_0000);
+}
+impl ExtExtension484Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_484\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension484Fn {}
+unsafe impl Send for ExtExtension484Fn {}
+unsafe impl Sync for ExtExtension484Fn {}
+impl ExtExtension484Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomTilePropertiesFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_QCOM_tile_properties\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetFramebufferTilePropertiesQCOM = unsafe extern "system" fn(
+ device: Device,
+ framebuffer: Framebuffer,
+ p_properties_count: *mut u32,
+ p_properties: *mut TilePropertiesQCOM,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDynamicRenderingTilePropertiesQCOM = unsafe extern "system" fn(
+ device: Device,
+ p_rendering_info: *const RenderingInfo,
+ p_properties: *mut TilePropertiesQCOM,
+) -> Result;
+#[derive(Clone)]
+pub struct QcomTilePropertiesFn {
+ pub get_framebuffer_tile_properties_qcom: PFN_vkGetFramebufferTilePropertiesQCOM,
+ pub get_dynamic_rendering_tile_properties_qcom: PFN_vkGetDynamicRenderingTilePropertiesQCOM,
+}
+unsafe impl Send for QcomTilePropertiesFn {}
+unsafe impl Sync for QcomTilePropertiesFn {}
+impl QcomTilePropertiesFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_framebuffer_tile_properties_qcom: unsafe {
+ unsafe extern "system" fn get_framebuffer_tile_properties_qcom(
+ _device: Device,
+ _framebuffer: Framebuffer,
+ _p_properties_count: *mut u32,
+ _p_properties: *mut TilePropertiesQCOM,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_framebuffer_tile_properties_qcom)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetFramebufferTilePropertiesQCOM\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_framebuffer_tile_properties_qcom
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_dynamic_rendering_tile_properties_qcom: unsafe {
+ unsafe extern "system" fn get_dynamic_rendering_tile_properties_qcom(
+ _device: Device,
+ _p_rendering_info: *const RenderingInfo,
+ _p_properties: *mut TilePropertiesQCOM,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_dynamic_rendering_tile_properties_qcom)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDynamicRenderingTilePropertiesQCOM\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_dynamic_rendering_tile_properties_qcom
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[doc = "Generated from 'VK_QCOM_tile_properties'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM: Self = Self(1_000_484_000);
+ pub const TILE_PROPERTIES_QCOM: Self = Self(1_000_484_001);
+}
+impl SecAmigoProfilingFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_SEC_amigo_profiling\0") }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct SecAmigoProfilingFn {}
+unsafe impl Send for SecAmigoProfilingFn {}
+unsafe impl Sync for SecAmigoProfilingFn {}
+impl SecAmigoProfilingFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_SEC_amigo_profiling'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC: Self = Self(1_000_485_000);
+ pub const AMIGO_PROFILING_SUBMIT_INFO_SEC: Self = Self(1_000_485_001);
+}
+impl ExtExtension487Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_487\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension487Fn {}
+unsafe impl Send for ExtExtension487Fn {}
+unsafe impl Sync for ExtExtension487Fn {}
+impl ExtExtension487Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension488Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_488\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension488Fn {}
+unsafe impl Send for ExtExtension488Fn {}
+unsafe impl Sync for ExtExtension488Fn {}
+impl ExtExtension488Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl QcomMultiviewPerViewViewportsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_QCOM_multiview_per_view_viewports\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct QcomMultiviewPerViewViewportsFn {}
+unsafe impl Send for QcomMultiviewPerViewViewportsFn {}
+unsafe impl Sync for QcomMultiviewPerViewViewportsFn {}
+impl QcomMultiviewPerViewViewportsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_QCOM_multiview_per_view_viewports'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM: Self =
+ Self(1_000_488_000);
+}
+impl NvExtension490Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_490\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension490Fn {}
+unsafe impl Send for NvExtension490Fn {}
+unsafe impl Sync for NvExtension490Fn {}
+impl NvExtension490Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvRayTracingInvocationReorderFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"VK_NV_ray_tracing_invocation_reorder\0",
+ )
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct NvRayTracingInvocationReorderFn {}
+unsafe impl Send for NvRayTracingInvocationReorderFn {}
+unsafe impl Sync for NvRayTracingInvocationReorderFn {}
+impl NvRayTracingInvocationReorderFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_NV_ray_tracing_invocation_reorder'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV: Self =
+ Self(1_000_490_000);
+ pub const PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV: Self =
+ Self(1_000_490_001);
+}
+impl NvExtension492Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_492\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension492Fn {}
+unsafe impl Send for NvExtension492Fn {}
+unsafe impl Sync for NvExtension492Fn {}
+impl NvExtension492Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension493Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_493\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension493Fn {}
+unsafe impl Send for NvExtension493Fn {}
+unsafe impl Sync for NvExtension493Fn {}
+impl NvExtension493Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension494Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_494\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension494Fn {}
+unsafe impl Send for NvExtension494Fn {}
+unsafe impl Sync for NvExtension494Fn {}
+impl NvExtension494Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtMutableDescriptorTypeFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe {
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_mutable_descriptor_type\0")
+ }
+ }
+ pub const SPEC_VERSION: u32 = 1u32;
+}
+#[derive(Clone)]
+pub struct ExtMutableDescriptorTypeFn {}
+unsafe impl Send for ExtMutableDescriptorTypeFn {}
+unsafe impl Sync for ExtMutableDescriptorTypeFn {}
+impl ExtMutableDescriptorTypeFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"]
+impl DescriptorPoolCreateFlags {
+ pub const HOST_ONLY_EXT: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"]
+impl DescriptorSetLayoutCreateFlags {
+ pub const HOST_ONLY_POOL_EXT: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"]
+impl DescriptorType {
+ pub const MUTABLE_EXT: Self = Self(1_000_351_000);
+}
+#[doc = "Generated from 'VK_EXT_mutable_descriptor_type'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT: Self = Self(1_000_351_000);
+ pub const MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT: Self = Self(1_000_351_002);
+}
+impl ExtExtension496Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_496\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension496Fn {}
+unsafe impl Send for ExtExtension496Fn {}
+unsafe impl Sync for ExtExtension496Fn {}
+impl ExtExtension496Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension497Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_497\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension497Fn {}
+unsafe impl Send for ExtExtension497Fn {}
+unsafe impl Sync for ExtExtension497Fn {}
+impl ExtExtension497Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ArmShaderCoreBuiltinsFn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_ARM_shader_core_builtins\0") }
+ }
+ pub const SPEC_VERSION: u32 = 2u32;
+}
+#[derive(Clone)]
+pub struct ArmShaderCoreBuiltinsFn {}
+unsafe impl Send for ArmShaderCoreBuiltinsFn {}
+unsafe impl Sync for ArmShaderCoreBuiltinsFn {}
+impl ArmShaderCoreBuiltinsFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[doc = "Generated from 'VK_ARM_shader_core_builtins'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM: Self = Self(1_000_497_000);
+ pub const PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM: Self = Self(1_000_497_001);
+}
+impl ExtExtension499Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_499\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension499Fn {}
+unsafe impl Send for ExtExtension499Fn {}
+unsafe impl Sync for ExtExtension499Fn {}
+impl ExtExtension499Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension500Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_500\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension500Fn {}
+unsafe impl Send for ExtExtension500Fn {}
+unsafe impl Sync for ExtExtension500Fn {}
+impl ExtExtension500Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension501Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_501\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension501Fn {}
+unsafe impl Send for ExtExtension501Fn {}
+unsafe impl Sync for ExtExtension501Fn {}
+impl ExtExtension501Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension502Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_502\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension502Fn {}
+unsafe impl Send for ExtExtension502Fn {}
+unsafe impl Sync for ExtExtension502Fn {}
+impl ExtExtension502Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension503Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_503\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension503Fn {}
+unsafe impl Send for ExtExtension503Fn {}
+unsafe impl Sync for ExtExtension503Fn {}
+impl ExtExtension503Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension504Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_504\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension504Fn {}
+unsafe impl Send for NvExtension504Fn {}
+unsafe impl Sync for NvExtension504Fn {}
+impl NvExtension504Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl ExtExtension505Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_EXT_extension_505\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct ExtExtension505Fn {}
+unsafe impl Send for ExtExtension505Fn {}
+unsafe impl Sync for ExtExtension505Fn {}
+impl ExtExtension505Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+impl NvExtension506Fn {
+ #[inline]
+ pub const fn name() -> &'static ::std::ffi::CStr {
+ unsafe { ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"VK_NV_extension_506\0") }
+ }
+ pub const SPEC_VERSION: u32 = 0u32;
+}
+#[derive(Clone)]
+pub struct NvExtension506Fn {}
+unsafe impl Send for NvExtension506Fn {}
+unsafe impl Sync for NvExtension506Fn {}
+impl NvExtension506Fn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
diff --git a/third_party/rust/ash/src/vk/feature_extensions.rs b/third_party/rust/ash/src/vk/feature_extensions.rs
new file mode 100644
index 0000000000..4b1820c9a7
--- /dev/null
+++ b/third_party/rust/ash/src/vk/feature_extensions.rs
@@ -0,0 +1,451 @@
+use crate::vk::bitflags::*;
+use crate::vk::enums::*;
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl BufferCreateFlags {
+ #[doc = "Buffer requires protected memory"]
+ pub const PROTECTED: Self = Self(0b1000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl CommandPoolCreateFlags {
+ #[doc = "Command buffers allocated from pool are protected command buffers"]
+ pub const PROTECTED: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl DependencyFlags {
+ #[doc = "Dependency is across devices"]
+ pub const DEVICE_GROUP: Self = Self(0b100);
+ pub const VIEW_LOCAL: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl DeviceQueueCreateFlags {
+ #[doc = "Queue is a protected-capable device queue"]
+ pub const PROTECTED: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl Format {
+ pub const G8B8G8R8_422_UNORM: Self = Self(1_000_156_000);
+ pub const B8G8R8G8_422_UNORM: Self = Self(1_000_156_001);
+ pub const G8_B8_R8_3PLANE_420_UNORM: Self = Self(1_000_156_002);
+ pub const G8_B8R8_2PLANE_420_UNORM: Self = Self(1_000_156_003);
+ pub const G8_B8_R8_3PLANE_422_UNORM: Self = Self(1_000_156_004);
+ pub const G8_B8R8_2PLANE_422_UNORM: Self = Self(1_000_156_005);
+ pub const G8_B8_R8_3PLANE_444_UNORM: Self = Self(1_000_156_006);
+ pub const R10X6_UNORM_PACK16: Self = Self(1_000_156_007);
+ pub const R10X6G10X6_UNORM_2PACK16: Self = Self(1_000_156_008);
+ pub const R10X6G10X6B10X6A10X6_UNORM_4PACK16: Self = Self(1_000_156_009);
+ pub const G10X6B10X6G10X6R10X6_422_UNORM_4PACK16: Self = Self(1_000_156_010);
+ pub const B10X6G10X6R10X6G10X6_422_UNORM_4PACK16: Self = Self(1_000_156_011);
+ pub const G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_012);
+ pub const G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_013);
+ pub const G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_014);
+ pub const G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_015);
+ pub const G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16: Self = Self(1_000_156_016);
+ pub const R12X4_UNORM_PACK16: Self = Self(1_000_156_017);
+ pub const R12X4G12X4_UNORM_2PACK16: Self = Self(1_000_156_018);
+ pub const R12X4G12X4B12X4A12X4_UNORM_4PACK16: Self = Self(1_000_156_019);
+ pub const G12X4B12X4G12X4R12X4_422_UNORM_4PACK16: Self = Self(1_000_156_020);
+ pub const B12X4G12X4R12X4G12X4_422_UNORM_4PACK16: Self = Self(1_000_156_021);
+ pub const G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_022);
+ pub const G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16: Self = Self(1_000_156_023);
+ pub const G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_024);
+ pub const G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16: Self = Self(1_000_156_025);
+ pub const G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16: Self = Self(1_000_156_026);
+ pub const G16B16G16R16_422_UNORM: Self = Self(1_000_156_027);
+ pub const B16G16R16G16_422_UNORM: Self = Self(1_000_156_028);
+ pub const G16_B16_R16_3PLANE_420_UNORM: Self = Self(1_000_156_029);
+ pub const G16_B16R16_2PLANE_420_UNORM: Self = Self(1_000_156_030);
+ pub const G16_B16_R16_3PLANE_422_UNORM: Self = Self(1_000_156_031);
+ pub const G16_B16R16_2PLANE_422_UNORM: Self = Self(1_000_156_032);
+ pub const G16_B16_R16_3PLANE_444_UNORM: Self = Self(1_000_156_033);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl FormatFeatureFlags {
+ #[doc = "Format can be used as the source image of image transfer commands"]
+ pub const TRANSFER_SRC: Self = Self(0b100_0000_0000_0000);
+ #[doc = "Format can be used as the destination image of image transfer commands"]
+ pub const TRANSFER_DST: Self = Self(0b1000_0000_0000_0000);
+ #[doc = "Format can have midpoint rather than cosited chroma samples"]
+ pub const MIDPOINT_CHROMA_SAMPLES: Self = Self(0b10_0000_0000_0000_0000);
+ #[doc = "Format can be used with linear filtering whilst color conversion is enabled"]
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER: Self = Self(0b100_0000_0000_0000_0000);
+ #[doc = "Format can have different chroma, min and mag filters"]
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER: Self =
+ Self(0b1000_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT: Self =
+ Self(0b1_0000_0000_0000_0000_0000);
+ pub const SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE: Self =
+ Self(0b10_0000_0000_0000_0000_0000);
+ #[doc = "Format supports disjoint planes"]
+ pub const DISJOINT: Self = Self(0b100_0000_0000_0000_0000_0000);
+ #[doc = "Format can have cosited rather than midpoint chroma samples"]
+ pub const COSITED_CHROMA_SAMPLES: Self = Self(0b1000_0000_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl ImageAspectFlags {
+ pub const PLANE_0: Self = Self(0b1_0000);
+ pub const PLANE_1: Self = Self(0b10_0000);
+ pub const PLANE_2: Self = Self(0b100_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl ImageCreateFlags {
+ pub const ALIAS: Self = Self(0b100_0000_0000);
+ #[doc = "Allows using VkBindImageMemoryDeviceGroupInfo::pSplitInstanceBindRegions when binding memory to the image"]
+ pub const SPLIT_INSTANCE_BIND_REGIONS: Self = Self(0b100_0000);
+ #[doc = "The 3D image can be viewed as a 2D or 2D array image"]
+ pub const TYPE_2D_ARRAY_COMPATIBLE: Self = Self(0b10_0000);
+ pub const BLOCK_TEXEL_VIEW_COMPATIBLE: Self = Self(0b1000_0000);
+ pub const EXTENDED_USAGE: Self = Self(0b1_0000_0000);
+ #[doc = "Image requires protected memory"]
+ pub const PROTECTED: Self = Self(0b1000_0000_0000);
+ pub const DISJOINT: Self = Self(0b10_0000_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl ImageLayout {
+ pub const DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL: Self = Self(1_000_117_000);
+ pub const DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL: Self = Self(1_000_117_001);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl MemoryHeapFlags {
+ #[doc = "If set, heap allocations allocate multiple instances by default"]
+ pub const MULTI_INSTANCE: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl MemoryPropertyFlags {
+ #[doc = "Memory is protected"]
+ pub const PROTECTED: Self = Self(0b10_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl ObjectType {
+ pub const SAMPLER_YCBCR_CONVERSION: Self = Self(1_000_156_000);
+ pub const DESCRIPTOR_UPDATE_TEMPLATE: Self = Self(1_000_085_000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl PipelineCreateFlags {
+ pub const VIEW_INDEX_FROM_DEVICE_INDEX: Self = Self(0b1000);
+ pub const DISPATCH_BASE: Self = Self(0b1_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl QueueFlags {
+ #[doc = "Queues may support protected operations"]
+ pub const PROTECTED: Self = Self(0b1_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl Result {
+ pub const ERROR_OUT_OF_POOL_MEMORY: Self = Self(-1_000_069_000);
+ pub const ERROR_INVALID_EXTERNAL_HANDLE: Self = Self(-1_000_072_003);
+}
+#[doc = "Generated from 'VK_VERSION_1_1'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: Self = Self(1_000_094_000);
+ pub const BIND_BUFFER_MEMORY_INFO: Self = Self(1_000_157_000);
+ pub const BIND_IMAGE_MEMORY_INFO: Self = Self(1_000_157_001);
+ pub const PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: Self = Self(1_000_083_000);
+ pub const MEMORY_DEDICATED_REQUIREMENTS: Self = Self(1_000_127_000);
+ pub const MEMORY_DEDICATED_ALLOCATE_INFO: Self = Self(1_000_127_001);
+ pub const MEMORY_ALLOCATE_FLAGS_INFO: Self = Self(1_000_060_000);
+ pub const DEVICE_GROUP_RENDER_PASS_BEGIN_INFO: Self = Self(1_000_060_003);
+ pub const DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO: Self = Self(1_000_060_004);
+ pub const DEVICE_GROUP_SUBMIT_INFO: Self = Self(1_000_060_005);
+ pub const DEVICE_GROUP_BIND_SPARSE_INFO: Self = Self(1_000_060_006);
+ pub const BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO: Self = Self(1_000_060_013);
+ pub const BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO: Self = Self(1_000_060_014);
+ pub const PHYSICAL_DEVICE_GROUP_PROPERTIES: Self = Self(1_000_070_000);
+ pub const DEVICE_GROUP_DEVICE_CREATE_INFO: Self = Self(1_000_070_001);
+ pub const BUFFER_MEMORY_REQUIREMENTS_INFO_2: Self = Self(1_000_146_000);
+ pub const IMAGE_MEMORY_REQUIREMENTS_INFO_2: Self = Self(1_000_146_001);
+ pub const IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2: Self = Self(1_000_146_002);
+ pub const MEMORY_REQUIREMENTS_2: Self = Self(1_000_146_003);
+ pub const SPARSE_IMAGE_MEMORY_REQUIREMENTS_2: Self = Self(1_000_146_004);
+ pub const PHYSICAL_DEVICE_FEATURES_2: Self = Self(1_000_059_000);
+ pub const PHYSICAL_DEVICE_PROPERTIES_2: Self = Self(1_000_059_001);
+ pub const FORMAT_PROPERTIES_2: Self = Self(1_000_059_002);
+ pub const IMAGE_FORMAT_PROPERTIES_2: Self = Self(1_000_059_003);
+ pub const PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2: Self = Self(1_000_059_004);
+ pub const QUEUE_FAMILY_PROPERTIES_2: Self = Self(1_000_059_005);
+ pub const PHYSICAL_DEVICE_MEMORY_PROPERTIES_2: Self = Self(1_000_059_006);
+ pub const SPARSE_IMAGE_FORMAT_PROPERTIES_2: Self = Self(1_000_059_007);
+ pub const PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2: Self = Self(1_000_059_008);
+ pub const PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: Self = Self(1_000_117_000);
+ pub const RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO: Self = Self(1_000_117_001);
+ pub const IMAGE_VIEW_USAGE_CREATE_INFO: Self = Self(1_000_117_002);
+ pub const PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO: Self = Self(1_000_117_003);
+ pub const RENDER_PASS_MULTIVIEW_CREATE_INFO: Self = Self(1_000_053_000);
+ pub const PHYSICAL_DEVICE_MULTIVIEW_FEATURES: Self = Self(1_000_053_001);
+ pub const PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: Self = Self(1_000_053_002);
+ pub const PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: Self = Self(1_000_120_000);
+ pub const PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES: Self =
+ Self::PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
+ pub const PROTECTED_SUBMIT_INFO: Self = Self(1_000_145_000);
+ pub const PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: Self = Self(1_000_145_001);
+ pub const PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: Self = Self(1_000_145_002);
+ pub const DEVICE_QUEUE_INFO_2: Self = Self(1_000_145_003);
+ pub const SAMPLER_YCBCR_CONVERSION_CREATE_INFO: Self = Self(1_000_156_000);
+ pub const SAMPLER_YCBCR_CONVERSION_INFO: Self = Self(1_000_156_001);
+ pub const BIND_IMAGE_PLANE_MEMORY_INFO: Self = Self(1_000_156_002);
+ pub const IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO: Self = Self(1_000_156_003);
+ pub const PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: Self = Self(1_000_156_004);
+ pub const SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES: Self = Self(1_000_156_005);
+ pub const DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO: Self = Self(1_000_085_000);
+ pub const PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO: Self = Self(1_000_071_000);
+ pub const EXTERNAL_IMAGE_FORMAT_PROPERTIES: Self = Self(1_000_071_001);
+ pub const PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO: Self = Self(1_000_071_002);
+ pub const EXTERNAL_BUFFER_PROPERTIES: Self = Self(1_000_071_003);
+ pub const PHYSICAL_DEVICE_ID_PROPERTIES: Self = Self(1_000_071_004);
+ pub const EXTERNAL_MEMORY_BUFFER_CREATE_INFO: Self = Self(1_000_072_000);
+ pub const EXTERNAL_MEMORY_IMAGE_CREATE_INFO: Self = Self(1_000_072_001);
+ pub const EXPORT_MEMORY_ALLOCATE_INFO: Self = Self(1_000_072_002);
+ pub const PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO: Self = Self(1_000_112_000);
+ pub const EXTERNAL_FENCE_PROPERTIES: Self = Self(1_000_112_001);
+ pub const EXPORT_FENCE_CREATE_INFO: Self = Self(1_000_113_000);
+ pub const EXPORT_SEMAPHORE_CREATE_INFO: Self = Self(1_000_077_000);
+ pub const PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO: Self = Self(1_000_076_000);
+ pub const EXTERNAL_SEMAPHORE_PROPERTIES: Self = Self(1_000_076_001);
+ pub const PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: Self = Self(1_000_168_000);
+ pub const DESCRIPTOR_SET_LAYOUT_SUPPORT: Self = Self(1_000_168_001);
+ pub const PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: Self = Self(1_000_063_000);
+ pub const PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES: Self =
+ Self::PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl BufferCreateFlags {
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b1_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl BufferUsageFlags {
+ pub const SHADER_DEVICE_ADDRESS: Self = Self(0b10_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl DescriptorPoolCreateFlags {
+ pub const UPDATE_AFTER_BIND: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl DescriptorSetLayoutCreateFlags {
+ pub const UPDATE_AFTER_BIND_POOL: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl FormatFeatureFlags {
+ #[doc = "Format can be used with min/max reduction filtering"]
+ pub const SAMPLED_IMAGE_FILTER_MINMAX: Self = Self(0b1_0000_0000_0000_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl FramebufferCreateFlags {
+ pub const IMAGELESS: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl ImageLayout {
+ pub const DEPTH_ATTACHMENT_OPTIMAL: Self = Self(1_000_241_000);
+ pub const DEPTH_READ_ONLY_OPTIMAL: Self = Self(1_000_241_001);
+ pub const STENCIL_ATTACHMENT_OPTIMAL: Self = Self(1_000_241_002);
+ pub const STENCIL_READ_ONLY_OPTIMAL: Self = Self(1_000_241_003);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl MemoryAllocateFlags {
+ pub const DEVICE_ADDRESS: Self = Self(0b10);
+ pub const DEVICE_ADDRESS_CAPTURE_REPLAY: Self = Self(0b100);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl Result {
+ pub const ERROR_FRAGMENTATION: Self = Self(-1_000_161_000);
+ pub const ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS: Self = Self(-1_000_257_000);
+}
+#[doc = "Generated from 'VK_VERSION_1_2'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: Self = Self(49);
+ pub const PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES: Self = Self(50);
+ pub const PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: Self = Self(51);
+ pub const PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES: Self = Self(52);
+ pub const IMAGE_FORMAT_LIST_CREATE_INFO: Self = Self(1_000_147_000);
+ pub const ATTACHMENT_DESCRIPTION_2: Self = Self(1_000_109_000);
+ pub const ATTACHMENT_REFERENCE_2: Self = Self(1_000_109_001);
+ pub const SUBPASS_DESCRIPTION_2: Self = Self(1_000_109_002);
+ pub const SUBPASS_DEPENDENCY_2: Self = Self(1_000_109_003);
+ pub const RENDER_PASS_CREATE_INFO_2: Self = Self(1_000_109_004);
+ pub const SUBPASS_BEGIN_INFO: Self = Self(1_000_109_005);
+ pub const SUBPASS_END_INFO: Self = Self(1_000_109_006);
+ pub const PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES: Self = Self(1_000_177_000);
+ pub const PHYSICAL_DEVICE_DRIVER_PROPERTIES: Self = Self(1_000_196_000);
+ pub const PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES: Self = Self(1_000_180_000);
+ pub const PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES: Self = Self(1_000_082_000);
+ pub const PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES: Self = Self(1_000_197_000);
+ pub const DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO: Self = Self(1_000_161_000);
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: Self = Self(1_000_161_001);
+ pub const PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES: Self = Self(1_000_161_002);
+ pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO: Self = Self(1_000_161_003);
+ pub const DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT: Self = Self(1_000_161_004);
+ pub const PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES: Self = Self(1_000_199_000);
+ pub const SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE: Self = Self(1_000_199_001);
+ pub const PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: Self = Self(1_000_221_000);
+ pub const IMAGE_STENCIL_USAGE_CREATE_INFO: Self = Self(1_000_246_000);
+ pub const PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES: Self = Self(1_000_130_000);
+ pub const SAMPLER_REDUCTION_MODE_CREATE_INFO: Self = Self(1_000_130_001);
+ pub const PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES: Self = Self(1_000_211_000);
+ pub const PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES: Self = Self(1_000_108_000);
+ pub const FRAMEBUFFER_ATTACHMENTS_CREATE_INFO: Self = Self(1_000_108_001);
+ pub const FRAMEBUFFER_ATTACHMENT_IMAGE_INFO: Self = Self(1_000_108_002);
+ pub const RENDER_PASS_ATTACHMENT_BEGIN_INFO: Self = Self(1_000_108_003);
+ pub const PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES: Self = Self(1_000_253_000);
+ pub const PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES: Self = Self(1_000_175_000);
+ pub const PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES: Self = Self(1_000_241_000);
+ pub const ATTACHMENT_REFERENCE_STENCIL_LAYOUT: Self = Self(1_000_241_001);
+ pub const ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT: Self = Self(1_000_241_002);
+ pub const PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: Self = Self(1_000_261_000);
+ pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES: Self = Self(1_000_207_000);
+ pub const PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES: Self = Self(1_000_207_001);
+ pub const SEMAPHORE_TYPE_CREATE_INFO: Self = Self(1_000_207_002);
+ pub const TIMELINE_SEMAPHORE_SUBMIT_INFO: Self = Self(1_000_207_003);
+ pub const SEMAPHORE_WAIT_INFO: Self = Self(1_000_207_004);
+ pub const SEMAPHORE_SIGNAL_INFO: Self = Self(1_000_207_005);
+ pub const PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: Self = Self(1_000_257_000);
+ pub const BUFFER_DEVICE_ADDRESS_INFO: Self = Self(1_000_244_001);
+ pub const BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO: Self = Self(1_000_257_002);
+ pub const MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO: Self = Self(1_000_257_003);
+ pub const DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO: Self = Self(1_000_257_004);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl AccessFlags {
+ pub const NONE: Self = Self(0);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl AttachmentStoreOp {
+ pub const NONE: Self = Self(1_000_301_000);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl DescriptorType {
+ pub const INLINE_UNIFORM_BLOCK: Self = Self(1_000_138_000);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl DynamicState {
+ pub const CULL_MODE: Self = Self(1_000_267_000);
+ pub const FRONT_FACE: Self = Self(1_000_267_001);
+ pub const PRIMITIVE_TOPOLOGY: Self = Self(1_000_267_002);
+ pub const VIEWPORT_WITH_COUNT: Self = Self(1_000_267_003);
+ pub const SCISSOR_WITH_COUNT: Self = Self(1_000_267_004);
+ pub const VERTEX_INPUT_BINDING_STRIDE: Self = Self(1_000_267_005);
+ pub const DEPTH_TEST_ENABLE: Self = Self(1_000_267_006);
+ pub const DEPTH_WRITE_ENABLE: Self = Self(1_000_267_007);
+ pub const DEPTH_COMPARE_OP: Self = Self(1_000_267_008);
+ pub const DEPTH_BOUNDS_TEST_ENABLE: Self = Self(1_000_267_009);
+ pub const STENCIL_TEST_ENABLE: Self = Self(1_000_267_010);
+ pub const STENCIL_OP: Self = Self(1_000_267_011);
+ pub const RASTERIZER_DISCARD_ENABLE: Self = Self(1_000_377_001);
+ pub const DEPTH_BIAS_ENABLE: Self = Self(1_000_377_002);
+ pub const PRIMITIVE_RESTART_ENABLE: Self = Self(1_000_377_004);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl EventCreateFlags {
+ pub const DEVICE_ONLY: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl Format {
+ pub const G8_B8R8_2PLANE_444_UNORM: Self = Self(1_000_330_000);
+ pub const G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16: Self = Self(1_000_330_001);
+ pub const G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16: Self = Self(1_000_330_002);
+ pub const G16_B16R16_2PLANE_444_UNORM: Self = Self(1_000_330_003);
+ pub const A4R4G4B4_UNORM_PACK16: Self = Self(1_000_340_000);
+ pub const A4B4G4R4_UNORM_PACK16: Self = Self(1_000_340_001);
+ pub const ASTC_4X4_SFLOAT_BLOCK: Self = Self(1_000_066_000);
+ pub const ASTC_5X4_SFLOAT_BLOCK: Self = Self(1_000_066_001);
+ pub const ASTC_5X5_SFLOAT_BLOCK: Self = Self(1_000_066_002);
+ pub const ASTC_6X5_SFLOAT_BLOCK: Self = Self(1_000_066_003);
+ pub const ASTC_6X6_SFLOAT_BLOCK: Self = Self(1_000_066_004);
+ pub const ASTC_8X5_SFLOAT_BLOCK: Self = Self(1_000_066_005);
+ pub const ASTC_8X6_SFLOAT_BLOCK: Self = Self(1_000_066_006);
+ pub const ASTC_8X8_SFLOAT_BLOCK: Self = Self(1_000_066_007);
+ pub const ASTC_10X5_SFLOAT_BLOCK: Self = Self(1_000_066_008);
+ pub const ASTC_10X6_SFLOAT_BLOCK: Self = Self(1_000_066_009);
+ pub const ASTC_10X8_SFLOAT_BLOCK: Self = Self(1_000_066_010);
+ pub const ASTC_10X10_SFLOAT_BLOCK: Self = Self(1_000_066_011);
+ pub const ASTC_12X10_SFLOAT_BLOCK: Self = Self(1_000_066_012);
+ pub const ASTC_12X12_SFLOAT_BLOCK: Self = Self(1_000_066_013);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl ImageAspectFlags {
+ pub const NONE: Self = Self(0);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl ImageLayout {
+ pub const READ_ONLY_OPTIMAL: Self = Self(1_000_314_000);
+ pub const ATTACHMENT_OPTIMAL: Self = Self(1_000_314_001);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl ObjectType {
+ pub const PRIVATE_DATA_SLOT: Self = Self(1_000_295_000);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl PipelineCacheCreateFlags {
+ pub const EXTERNALLY_SYNCHRONIZED: Self = Self(0b1);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl PipelineCreateFlags {
+ pub const FAIL_ON_PIPELINE_COMPILE_REQUIRED: Self = Self(0b1_0000_0000);
+ pub const EARLY_RETURN_ON_FAILURE: Self = Self(0b10_0000_0000);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl PipelineShaderStageCreateFlags {
+ pub const ALLOW_VARYING_SUBGROUP_SIZE: Self = Self(0b1);
+ pub const REQUIRE_FULL_SUBGROUPS: Self = Self(0b10);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl PipelineStageFlags {
+ pub const NONE: Self = Self(0);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl Result {
+ pub const PIPELINE_COMPILE_REQUIRED: Self = Self(1_000_297_000);
+}
+#[doc = "Generated from 'VK_VERSION_1_3'"]
+impl StructureType {
+ pub const PHYSICAL_DEVICE_VULKAN_1_3_FEATURES: Self = Self(53);
+ pub const PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES: Self = Self(54);
+ pub const PIPELINE_CREATION_FEEDBACK_CREATE_INFO: Self = Self(1_000_192_000);
+ pub const PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES: Self = Self(1_000_215_000);
+ pub const PHYSICAL_DEVICE_TOOL_PROPERTIES: Self = Self(1_000_245_000);
+ pub const PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES: Self =
+ Self(1_000_276_000);
+ pub const PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES: Self = Self(1_000_295_000);
+ pub const DEVICE_PRIVATE_DATA_CREATE_INFO: Self = Self(1_000_295_001);
+ pub const PRIVATE_DATA_SLOT_CREATE_INFO: Self = Self(1_000_295_002);
+ pub const PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES: Self = Self(1_000_297_000);
+ pub const MEMORY_BARRIER_2: Self = Self(1_000_314_000);
+ pub const BUFFER_MEMORY_BARRIER_2: Self = Self(1_000_314_001);
+ pub const IMAGE_MEMORY_BARRIER_2: Self = Self(1_000_314_002);
+ pub const DEPENDENCY_INFO: Self = Self(1_000_314_003);
+ pub const SUBMIT_INFO_2: Self = Self(1_000_314_004);
+ pub const SEMAPHORE_SUBMIT_INFO: Self = Self(1_000_314_005);
+ pub const COMMAND_BUFFER_SUBMIT_INFO: Self = Self(1_000_314_006);
+ pub const PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES: Self = Self(1_000_314_007);
+ pub const PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES: Self = Self(1_000_325_000);
+ pub const PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES: Self = Self(1_000_335_000);
+ pub const COPY_BUFFER_INFO_2: Self = Self(1_000_337_000);
+ pub const COPY_IMAGE_INFO_2: Self = Self(1_000_337_001);
+ pub const COPY_BUFFER_TO_IMAGE_INFO_2: Self = Self(1_000_337_002);
+ pub const COPY_IMAGE_TO_BUFFER_INFO_2: Self = Self(1_000_337_003);
+ pub const BLIT_IMAGE_INFO_2: Self = Self(1_000_337_004);
+ pub const RESOLVE_IMAGE_INFO_2: Self = Self(1_000_337_005);
+ pub const BUFFER_COPY_2: Self = Self(1_000_337_006);
+ pub const IMAGE_COPY_2: Self = Self(1_000_337_007);
+ pub const IMAGE_BLIT_2: Self = Self(1_000_337_008);
+ pub const BUFFER_IMAGE_COPY_2: Self = Self(1_000_337_009);
+ pub const IMAGE_RESOLVE_2: Self = Self(1_000_337_010);
+ pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES: Self = Self(1_000_225_000);
+ pub const PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO: Self = Self(1_000_225_001);
+ pub const PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES: Self = Self(1_000_225_002);
+ pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES: Self = Self(1_000_138_000);
+ pub const PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES: Self = Self(1_000_138_001);
+ pub const WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK: Self = Self(1_000_138_002);
+ pub const DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO: Self = Self(1_000_138_003);
+ pub const PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES: Self = Self(1_000_066_000);
+ pub const RENDERING_INFO: Self = Self(1_000_044_000);
+ pub const RENDERING_ATTACHMENT_INFO: Self = Self(1_000_044_001);
+ pub const PIPELINE_RENDERING_CREATE_INFO: Self = Self(1_000_044_002);
+ pub const PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES: Self = Self(1_000_044_003);
+ pub const COMMAND_BUFFER_INHERITANCE_RENDERING_INFO: Self = Self(1_000_044_004);
+ pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES: Self = Self(1_000_280_000);
+ pub const PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES: Self = Self(1_000_280_001);
+ pub const PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES: Self = Self(1_000_281_001);
+ pub const FORMAT_PROPERTIES_3: Self = Self(1_000_360_000);
+ pub const PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES: Self = Self(1_000_413_000);
+ pub const PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES: Self = Self(1_000_413_001);
+ pub const DEVICE_BUFFER_MEMORY_REQUIREMENTS: Self = Self(1_000_413_002);
+ pub const DEVICE_IMAGE_MEMORY_REQUIREMENTS: Self = Self(1_000_413_003);
+}
diff --git a/third_party/rust/ash/src/vk/features.rs b/third_party/rust/ash/src/vk/features.rs
new file mode 100644
index 0000000000..4a52e377ef
--- /dev/null
+++ b/third_party/rust/ash/src/vk/features.rs
@@ -0,0 +1,5353 @@
+use crate::vk::bitflags::*;
+use crate::vk::definitions::*;
+use crate::vk::enums::*;
+use std::os::raw::*;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetInstanceProcAddr =
+ unsafe extern "system" fn(instance: Instance, p_name: *const c_char) -> PFN_vkVoidFunction;
+#[derive(Clone)]
+pub struct StaticFn {
+ pub get_instance_proc_addr: PFN_vkGetInstanceProcAddr,
+}
+unsafe impl Send for StaticFn {}
+unsafe impl Sync for StaticFn {}
+impl StaticFn {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_instance_proc_addr: unsafe {
+ unsafe extern "system" fn get_instance_proc_addr(
+ _instance: Instance,
+ _p_name: *const c_char,
+ ) -> PFN_vkVoidFunction {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_instance_proc_addr)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetInstanceProcAddr\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_instance_proc_addr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateInstance = unsafe extern "system" fn(
+ p_create_info: *const InstanceCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_instance: *mut Instance,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumerateInstanceExtensionProperties = unsafe extern "system" fn(
+ p_layer_name: *const c_char,
+ p_property_count: *mut u32,
+ p_properties: *mut ExtensionProperties,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumerateInstanceLayerProperties = unsafe extern "system" fn(
+ p_property_count: *mut u32,
+ p_properties: *mut LayerProperties,
+) -> Result;
+#[derive(Clone)]
+pub struct EntryFnV1_0 {
+ pub create_instance: PFN_vkCreateInstance,
+ pub enumerate_instance_extension_properties: PFN_vkEnumerateInstanceExtensionProperties,
+ pub enumerate_instance_layer_properties: PFN_vkEnumerateInstanceLayerProperties,
+}
+unsafe impl Send for EntryFnV1_0 {}
+unsafe impl Sync for EntryFnV1_0 {}
+impl EntryFnV1_0 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_instance: unsafe {
+ unsafe extern "system" fn create_instance(
+ _p_create_info: *const InstanceCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_instance: *mut Instance,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_instance)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateInstance\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_instance
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ enumerate_instance_extension_properties: unsafe {
+ unsafe extern "system" fn enumerate_instance_extension_properties(
+ _p_layer_name: *const c_char,
+ _p_property_count: *mut u32,
+ _p_properties: *mut ExtensionProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_instance_extension_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumerateInstanceExtensionProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_instance_extension_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ enumerate_instance_layer_properties: unsafe {
+ unsafe extern "system" fn enumerate_instance_layer_properties(
+ _p_property_count: *mut u32,
+ _p_properties: *mut LayerProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_instance_layer_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumerateInstanceLayerProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_instance_layer_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyInstance =
+ unsafe extern "system" fn(instance: Instance, p_allocator: *const AllocationCallbacks);
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumeratePhysicalDevices = unsafe extern "system" fn(
+ instance: Instance,
+ p_physical_device_count: *mut u32,
+ p_physical_devices: *mut PhysicalDevice,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceFeatures = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_features: *mut PhysicalDeviceFeatures,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceFormatProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ format: Format,
+ p_format_properties: *mut FormatProperties,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceImageFormatProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ format: Format,
+ ty: ImageType,
+ tiling: ImageTiling,
+ usage: ImageUsageFlags,
+ flags: ImageCreateFlags,
+ p_image_format_properties: *mut ImageFormatProperties,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_properties: *mut PhysicalDeviceProperties,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceQueueFamilyProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_queue_family_property_count: *mut u32,
+ p_queue_family_properties: *mut QueueFamilyProperties,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceMemoryProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_memory_properties: *mut PhysicalDeviceMemoryProperties,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceProcAddr =
+ unsafe extern "system" fn(device: Device, p_name: *const c_char) -> PFN_vkVoidFunction;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDevice = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_create_info: *const DeviceCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_device: *mut Device,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumerateDeviceExtensionProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_layer_name: *const c_char,
+ p_property_count: *mut u32,
+ p_properties: *mut ExtensionProperties,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumerateDeviceLayerProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ p_property_count: *mut u32,
+ p_properties: *mut LayerProperties,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPhysicalDeviceSparseImageFormatProperties = unsafe extern "system" fn(
+ physical_device: PhysicalDevice,
+ format: Format,
+ ty: ImageType,
+ samples: SampleCountFlags,
+ usage: ImageUsageFlags,
+ tiling: ImageTiling,
+ p_property_count: *mut u32,
+ p_properties: *mut SparseImageFormatProperties,
+);
+#[derive(Clone)]
+pub struct InstanceFnV1_0 {
+ pub destroy_instance: PFN_vkDestroyInstance,
+ pub enumerate_physical_devices: PFN_vkEnumeratePhysicalDevices,
+ pub get_physical_device_features: PFN_vkGetPhysicalDeviceFeatures,
+ pub get_physical_device_format_properties: PFN_vkGetPhysicalDeviceFormatProperties,
+ pub get_physical_device_image_format_properties: PFN_vkGetPhysicalDeviceImageFormatProperties,
+ pub get_physical_device_properties: PFN_vkGetPhysicalDeviceProperties,
+ pub get_physical_device_queue_family_properties: PFN_vkGetPhysicalDeviceQueueFamilyProperties,
+ pub get_physical_device_memory_properties: PFN_vkGetPhysicalDeviceMemoryProperties,
+ pub get_device_proc_addr: PFN_vkGetDeviceProcAddr,
+ pub create_device: PFN_vkCreateDevice,
+ pub enumerate_device_extension_properties: PFN_vkEnumerateDeviceExtensionProperties,
+ pub enumerate_device_layer_properties: PFN_vkEnumerateDeviceLayerProperties,
+ pub get_physical_device_sparse_image_format_properties:
+ PFN_vkGetPhysicalDeviceSparseImageFormatProperties,
+}
+unsafe impl Send for InstanceFnV1_0 {}
+unsafe impl Sync for InstanceFnV1_0 {}
+impl InstanceFnV1_0 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ destroy_instance: unsafe {
+ unsafe extern "system" fn destroy_instance(
+ _instance: Instance,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_instance)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyInstance\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_instance
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ enumerate_physical_devices: unsafe {
+ unsafe extern "system" fn enumerate_physical_devices(
+ _instance: Instance,
+ _p_physical_device_count: *mut u32,
+ _p_physical_devices: *mut PhysicalDevice,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_physical_devices)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumeratePhysicalDevices\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_physical_devices
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_features: unsafe {
+ unsafe extern "system" fn get_physical_device_features(
+ _physical_device: PhysicalDevice,
+ _p_features: *mut PhysicalDeviceFeatures,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_features)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFeatures\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_features
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_format_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_format_properties(
+ _physical_device: PhysicalDevice,
+ _format: Format,
+ _p_format_properties: *mut FormatProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_format_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFormatProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_format_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_image_format_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_image_format_properties(
+ _physical_device: PhysicalDevice,
+ _format: Format,
+ _ty: ImageType,
+ _tiling: ImageTiling,
+ _usage: ImageUsageFlags,
+ _flags: ImageCreateFlags,
+ _p_image_format_properties: *mut ImageFormatProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_image_format_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceImageFormatProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_image_format_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_properties(
+ _physical_device: PhysicalDevice,
+ _p_properties: *mut PhysicalDeviceProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_queue_family_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_queue_family_properties(
+ _physical_device: PhysicalDevice,
+ _p_queue_family_property_count: *mut u32,
+ _p_queue_family_properties: *mut QueueFamilyProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_queue_family_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceQueueFamilyProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_queue_family_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_memory_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_memory_properties(
+ _physical_device: PhysicalDevice,
+ _p_memory_properties: *mut PhysicalDeviceMemoryProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_memory_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceMemoryProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_memory_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_proc_addr: unsafe {
+ unsafe extern "system" fn get_device_proc_addr(
+ _device: Device,
+ _p_name: *const c_char,
+ ) -> PFN_vkVoidFunction {
+ panic!(concat!("Unable to load ", stringify!(get_device_proc_addr)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceProcAddr\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_proc_addr
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_device: unsafe {
+ unsafe extern "system" fn create_device(
+ _physical_device: PhysicalDevice,
+ _p_create_info: *const DeviceCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_device: *mut Device,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_device)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateDevice\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_device
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ enumerate_device_extension_properties: unsafe {
+ unsafe extern "system" fn enumerate_device_extension_properties(
+ _physical_device: PhysicalDevice,
+ _p_layer_name: *const c_char,
+ _p_property_count: *mut u32,
+ _p_properties: *mut ExtensionProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_device_extension_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumerateDeviceExtensionProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_device_extension_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ enumerate_device_layer_properties: unsafe {
+ unsafe extern "system" fn enumerate_device_layer_properties(
+ _physical_device: PhysicalDevice,
+ _p_property_count: *mut u32,
+ _p_properties: *mut LayerProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_device_layer_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumerateDeviceLayerProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_device_layer_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_sparse_image_format_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_sparse_image_format_properties(
+ _physical_device: PhysicalDevice,
+ _format: Format,
+ _ty: ImageType,
+ _samples: SampleCountFlags,
+ _usage: ImageUsageFlags,
+ _tiling: ImageTiling,
+ _p_property_count: *mut u32,
+ _p_properties: *mut SparseImageFormatProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_sparse_image_format_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSparseImageFormatProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_sparse_image_format_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDevice =
+ unsafe extern "system" fn(device: Device, p_allocator: *const AllocationCallbacks);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceQueue = unsafe extern "system" fn(
+ device: Device,
+ queue_family_index: u32,
+ queue_index: u32,
+ p_queue: *mut Queue,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueSubmit = unsafe extern "system" fn(
+ queue: Queue,
+ submit_count: u32,
+ p_submits: *const SubmitInfo,
+ fence: Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueWaitIdle = unsafe extern "system" fn(queue: Queue) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDeviceWaitIdle = unsafe extern "system" fn(device: Device) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAllocateMemory = unsafe extern "system" fn(
+ device: Device,
+ p_allocate_info: *const MemoryAllocateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_memory: *mut DeviceMemory,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkFreeMemory = unsafe extern "system" fn(
+ device: Device,
+ memory: DeviceMemory,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkMapMemory = unsafe extern "system" fn(
+ device: Device,
+ memory: DeviceMemory,
+ offset: DeviceSize,
+ size: DeviceSize,
+ flags: MemoryMapFlags,
+ pp_data: *mut *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkUnmapMemory = unsafe extern "system" fn(device: Device, memory: DeviceMemory);
+#[allow(non_camel_case_types)]
+pub type PFN_vkFlushMappedMemoryRanges = unsafe extern "system" fn(
+ device: Device,
+ memory_range_count: u32,
+ p_memory_ranges: *const MappedMemoryRange,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkInvalidateMappedMemoryRanges = unsafe extern "system" fn(
+ device: Device,
+ memory_range_count: u32,
+ p_memory_ranges: *const MappedMemoryRange,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceMemoryCommitment = unsafe extern "system" fn(
+ device: Device,
+ memory: DeviceMemory,
+ p_committed_memory_in_bytes: *mut DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindBufferMemory = unsafe extern "system" fn(
+ device: Device,
+ buffer: Buffer,
+ memory: DeviceMemory,
+ memory_offset: DeviceSize,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkBindImageMemory = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ memory: DeviceMemory,
+ memory_offset: DeviceSize,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetBufferMemoryRequirements = unsafe extern "system" fn(
+ device: Device,
+ buffer: Buffer,
+ p_memory_requirements: *mut MemoryRequirements,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageMemoryRequirements = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ p_memory_requirements: *mut MemoryRequirements,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageSparseMemoryRequirements = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ p_sparse_memory_requirement_count: *mut u32,
+ p_sparse_memory_requirements: *mut SparseImageMemoryRequirements,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkQueueBindSparse = unsafe extern "system" fn(
+ queue: Queue,
+ bind_info_count: u32,
+ p_bind_info: *const BindSparseInfo,
+ fence: Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateFence = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const FenceCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_fence: *mut Fence,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyFence = unsafe extern "system" fn(
+ device: Device,
+ fence: Fence,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetFences =
+ unsafe extern "system" fn(device: Device, fence_count: u32, p_fences: *const Fence) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetFenceStatus = unsafe extern "system" fn(device: Device, fence: Fence) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkWaitForFences = unsafe extern "system" fn(
+ device: Device,
+ fence_count: u32,
+ p_fences: *const Fence,
+ wait_all: Bool32,
+ timeout: u64,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateSemaphore = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const SemaphoreCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_semaphore: *mut Semaphore,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroySemaphore = unsafe extern "system" fn(
+ device: Device,
+ semaphore: Semaphore,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateEvent = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const EventCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_event: *mut Event,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyEvent = unsafe extern "system" fn(
+ device: Device,
+ event: Event,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetEventStatus = unsafe extern "system" fn(device: Device, event: Event) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkSetEvent = unsafe extern "system" fn(device: Device, event: Event) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetEvent = unsafe extern "system" fn(device: Device, event: Event) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateQueryPool = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const QueryPoolCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_query_pool: *mut QueryPool,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyQueryPool = unsafe extern "system" fn(
+ device: Device,
+ query_pool: QueryPool,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetQueryPoolResults = unsafe extern "system" fn(
+ device: Device,
+ query_pool: QueryPool,
+ first_query: u32,
+ query_count: u32,
+ data_size: usize,
+ p_data: *mut c_void,
+ stride: DeviceSize,
+ flags: QueryResultFlags,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateBuffer = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const BufferCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_buffer: *mut Buffer,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyBuffer = unsafe extern "system" fn(
+ device: Device,
+ buffer: Buffer,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateBufferView = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const BufferViewCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_view: *mut BufferView,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyBufferView = unsafe extern "system" fn(
+ device: Device,
+ buffer_view: BufferView,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateImage = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const ImageCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_image: *mut Image,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyImage = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetImageSubresourceLayout = unsafe extern "system" fn(
+ device: Device,
+ image: Image,
+ p_subresource: *const ImageSubresource,
+ p_layout: *mut SubresourceLayout,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateImageView = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const ImageViewCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_view: *mut ImageView,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyImageView = unsafe extern "system" fn(
+ device: Device,
+ image_view: ImageView,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateShaderModule = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const ShaderModuleCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_shader_module: *mut ShaderModule,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyShaderModule = unsafe extern "system" fn(
+ device: Device,
+ shader_module: ShaderModule,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreatePipelineCache = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const PipelineCacheCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_pipeline_cache: *mut PipelineCache,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyPipelineCache = unsafe extern "system" fn(
+ device: Device,
+ pipeline_cache: PipelineCache,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetPipelineCacheData = unsafe extern "system" fn(
+ device: Device,
+ pipeline_cache: PipelineCache,
+ p_data_size: *mut usize,
+ p_data: *mut c_void,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkMergePipelineCaches = unsafe extern "system" fn(
+ device: Device,
+ dst_cache: PipelineCache,
+ src_cache_count: u32,
+ p_src_caches: *const PipelineCache,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateGraphicsPipelines = unsafe extern "system" fn(
+ device: Device,
+ pipeline_cache: PipelineCache,
+ create_info_count: u32,
+ p_create_infos: *const GraphicsPipelineCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_pipelines: *mut Pipeline,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateComputePipelines = unsafe extern "system" fn(
+ device: Device,
+ pipeline_cache: PipelineCache,
+ create_info_count: u32,
+ p_create_infos: *const ComputePipelineCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_pipelines: *mut Pipeline,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyPipeline = unsafe extern "system" fn(
+ device: Device,
+ pipeline: Pipeline,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreatePipelineLayout = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const PipelineLayoutCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_pipeline_layout: *mut PipelineLayout,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyPipelineLayout = unsafe extern "system" fn(
+ device: Device,
+ pipeline_layout: PipelineLayout,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateSampler = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const SamplerCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_sampler: *mut Sampler,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroySampler = unsafe extern "system" fn(
+ device: Device,
+ sampler: Sampler,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDescriptorSetLayout = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const DescriptorSetLayoutCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_set_layout: *mut DescriptorSetLayout,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDescriptorSetLayout = unsafe extern "system" fn(
+ device: Device,
+ descriptor_set_layout: DescriptorSetLayout,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateDescriptorPool = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const DescriptorPoolCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_descriptor_pool: *mut DescriptorPool,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyDescriptorPool = unsafe extern "system" fn(
+ device: Device,
+ descriptor_pool: DescriptorPool,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetDescriptorPool = unsafe extern "system" fn(
+ device: Device,
+ descriptor_pool: DescriptorPool,
+ flags: DescriptorPoolResetFlags,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAllocateDescriptorSets = unsafe extern "system" fn(
+ device: Device,
+ p_allocate_info: *const DescriptorSetAllocateInfo,
+ p_descriptor_sets: *mut DescriptorSet,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkFreeDescriptorSets = unsafe extern "system" fn(
+ device: Device,
+ descriptor_pool: DescriptorPool,
+ descriptor_set_count: u32,
+ p_descriptor_sets: *const DescriptorSet,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkUpdateDescriptorSets = unsafe extern "system" fn(
+ device: Device,
+ descriptor_write_count: u32,
+ p_descriptor_writes: *const WriteDescriptorSet,
+ descriptor_copy_count: u32,
+ p_descriptor_copies: *const CopyDescriptorSet,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateFramebuffer = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const FramebufferCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_framebuffer: *mut Framebuffer,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyFramebuffer = unsafe extern "system" fn(
+ device: Device,
+ framebuffer: Framebuffer,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateRenderPass = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const RenderPassCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_render_pass: *mut RenderPass,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyRenderPass = unsafe extern "system" fn(
+ device: Device,
+ render_pass: RenderPass,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetRenderAreaGranularity = unsafe extern "system" fn(
+ device: Device,
+ render_pass: RenderPass,
+ p_granularity: *mut Extent2D,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCreateCommandPool = unsafe extern "system" fn(
+ device: Device,
+ p_create_info: *const CommandPoolCreateInfo,
+ p_allocator: *const AllocationCallbacks,
+ p_command_pool: *mut CommandPool,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkDestroyCommandPool = unsafe extern "system" fn(
+ device: Device,
+ command_pool: CommandPool,
+ p_allocator: *const AllocationCallbacks,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetCommandPool = unsafe extern "system" fn(
+ device: Device,
+ command_pool: CommandPool,
+ flags: CommandPoolResetFlags,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkAllocateCommandBuffers = unsafe extern "system" fn(
+ device: Device,
+ p_allocate_info: *const CommandBufferAllocateInfo,
+ p_command_buffers: *mut CommandBuffer,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkFreeCommandBuffers = unsafe extern "system" fn(
+ device: Device,
+ command_pool: CommandPool,
+ command_buffer_count: u32,
+ p_command_buffers: *const CommandBuffer,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkBeginCommandBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_begin_info: *const CommandBufferBeginInfo,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkEndCommandBuffer =
+ unsafe extern "system" fn(command_buffer: CommandBuffer) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkResetCommandBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ flags: CommandBufferResetFlags,
+) -> Result;
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindPipeline = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_bind_point: PipelineBindPoint,
+ pipeline: Pipeline,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetViewport = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_viewport: u32,
+ viewport_count: u32,
+ p_viewports: *const Viewport,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetScissor = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_scissor: u32,
+ scissor_count: u32,
+ p_scissors: *const Rect2D,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetLineWidth =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, line_width: f32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthBias = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ depth_bias_constant_factor: f32,
+ depth_bias_clamp: f32,
+ depth_bias_slope_factor: f32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetBlendConstants =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, blend_constants: *const [f32; 4]);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetDepthBounds = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ min_depth_bounds: f32,
+ max_depth_bounds: f32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetStencilCompareMask = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ face_mask: StencilFaceFlags,
+ compare_mask: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetStencilWriteMask = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ face_mask: StencilFaceFlags,
+ write_mask: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetStencilReference = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ face_mask: StencilFaceFlags,
+ reference: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindDescriptorSets = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_bind_point: PipelineBindPoint,
+ layout: PipelineLayout,
+ first_set: u32,
+ descriptor_set_count: u32,
+ p_descriptor_sets: *const DescriptorSet,
+ dynamic_offset_count: u32,
+ p_dynamic_offsets: *const u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindIndexBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ index_type: IndexType,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBindVertexBuffers = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ first_binding: u32,
+ binding_count: u32,
+ p_buffers: *const Buffer,
+ p_offsets: *const DeviceSize,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDraw = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ vertex_count: u32,
+ instance_count: u32,
+ first_vertex: u32,
+ first_instance: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawIndexed = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ index_count: u32,
+ instance_count: u32,
+ first_index: u32,
+ vertex_offset: i32,
+ first_instance: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawIndirect = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ draw_count: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDrawIndexedIndirect = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ buffer: Buffer,
+ offset: DeviceSize,
+ draw_count: u32,
+ stride: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDispatch = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ group_count_x: u32,
+ group_count_y: u32,
+ group_count_z: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdDispatchIndirect =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, buffer: Buffer, offset: DeviceSize);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_buffer: Buffer,
+ dst_buffer: Buffer,
+ region_count: u32,
+ p_regions: *const BufferCopy,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyImage = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_image: Image,
+ src_image_layout: ImageLayout,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ region_count: u32,
+ p_regions: *const ImageCopy,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBlitImage = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_image: Image,
+ src_image_layout: ImageLayout,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ region_count: u32,
+ p_regions: *const ImageBlit,
+ filter: Filter,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyBufferToImage = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_buffer: Buffer,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ region_count: u32,
+ p_regions: *const BufferImageCopy,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyImageToBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_image: Image,
+ src_image_layout: ImageLayout,
+ dst_buffer: Buffer,
+ region_count: u32,
+ p_regions: *const BufferImageCopy,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdUpdateBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ dst_buffer: Buffer,
+ dst_offset: DeviceSize,
+ data_size: DeviceSize,
+ p_data: *const c_void,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdFillBuffer = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ dst_buffer: Buffer,
+ dst_offset: DeviceSize,
+ size: DeviceSize,
+ data: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdClearColorImage = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ image: Image,
+ image_layout: ImageLayout,
+ p_color: *const ClearColorValue,
+ range_count: u32,
+ p_ranges: *const ImageSubresourceRange,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdClearDepthStencilImage = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ image: Image,
+ image_layout: ImageLayout,
+ p_depth_stencil: *const ClearDepthStencilValue,
+ range_count: u32,
+ p_ranges: *const ImageSubresourceRange,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdClearAttachments = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ attachment_count: u32,
+ p_attachments: *const ClearAttachment,
+ rect_count: u32,
+ p_rects: *const ClearRect,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdResolveImage = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_image: Image,
+ src_image_layout: ImageLayout,
+ dst_image: Image,
+ dst_image_layout: ImageLayout,
+ region_count: u32,
+ p_regions: *const ImageResolve,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdSetEvent = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ event: Event,
+ stage_mask: PipelineStageFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdResetEvent = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ event: Event,
+ stage_mask: PipelineStageFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWaitEvents = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ event_count: u32,
+ p_events: *const Event,
+ src_stage_mask: PipelineStageFlags,
+ dst_stage_mask: PipelineStageFlags,
+ memory_barrier_count: u32,
+ p_memory_barriers: *const MemoryBarrier,
+ buffer_memory_barrier_count: u32,
+ p_buffer_memory_barriers: *const BufferMemoryBarrier,
+ image_memory_barrier_count: u32,
+ p_image_memory_barriers: *const ImageMemoryBarrier,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdPipelineBarrier = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ src_stage_mask: PipelineStageFlags,
+ dst_stage_mask: PipelineStageFlags,
+ dependency_flags: DependencyFlags,
+ memory_barrier_count: u32,
+ p_memory_barriers: *const MemoryBarrier,
+ buffer_memory_barrier_count: u32,
+ p_buffer_memory_barriers: *const BufferMemoryBarrier,
+ image_memory_barrier_count: u32,
+ p_image_memory_barriers: *const ImageMemoryBarrier,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginQuery = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ query_pool: QueryPool,
+ query: u32,
+ flags: QueryControlFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndQuery =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, query_pool: QueryPool, query: u32);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdResetQueryPool = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ query_pool: QueryPool,
+ first_query: u32,
+ query_count: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdWriteTimestamp = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ pipeline_stage: PipelineStageFlags,
+ query_pool: QueryPool,
+ query: u32,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdCopyQueryPoolResults = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ query_pool: QueryPool,
+ first_query: u32,
+ query_count: u32,
+ dst_buffer: Buffer,
+ dst_offset: DeviceSize,
+ stride: DeviceSize,
+ flags: QueryResultFlags,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdPushConstants = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ layout: PipelineLayout,
+ stage_flags: ShaderStageFlags,
+ offset: u32,
+ size: u32,
+ p_values: *const c_void,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdBeginRenderPass = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ p_render_pass_begin: *const RenderPassBeginInfo,
+ contents: SubpassContents,
+);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdNextSubpass =
+ unsafe extern "system" fn(command_buffer: CommandBuffer, contents: SubpassContents);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdEndRenderPass = unsafe extern "system" fn(command_buffer: CommandBuffer);
+#[allow(non_camel_case_types)]
+pub type PFN_vkCmdExecuteCommands = unsafe extern "system" fn(
+ command_buffer: CommandBuffer,
+ command_buffer_count: u32,
+ p_command_buffers: *const CommandBuffer,
+);
+#[derive(Clone)]
+pub struct DeviceFnV1_0 {
+ pub destroy_device: PFN_vkDestroyDevice,
+ pub get_device_queue: PFN_vkGetDeviceQueue,
+ pub queue_submit: PFN_vkQueueSubmit,
+ pub queue_wait_idle: PFN_vkQueueWaitIdle,
+ pub device_wait_idle: PFN_vkDeviceWaitIdle,
+ pub allocate_memory: PFN_vkAllocateMemory,
+ pub free_memory: PFN_vkFreeMemory,
+ pub map_memory: PFN_vkMapMemory,
+ pub unmap_memory: PFN_vkUnmapMemory,
+ pub flush_mapped_memory_ranges: PFN_vkFlushMappedMemoryRanges,
+ pub invalidate_mapped_memory_ranges: PFN_vkInvalidateMappedMemoryRanges,
+ pub get_device_memory_commitment: PFN_vkGetDeviceMemoryCommitment,
+ pub bind_buffer_memory: PFN_vkBindBufferMemory,
+ pub bind_image_memory: PFN_vkBindImageMemory,
+ pub get_buffer_memory_requirements: PFN_vkGetBufferMemoryRequirements,
+ pub get_image_memory_requirements: PFN_vkGetImageMemoryRequirements,
+ pub get_image_sparse_memory_requirements: PFN_vkGetImageSparseMemoryRequirements,
+ pub queue_bind_sparse: PFN_vkQueueBindSparse,
+ pub create_fence: PFN_vkCreateFence,
+ pub destroy_fence: PFN_vkDestroyFence,
+ pub reset_fences: PFN_vkResetFences,
+ pub get_fence_status: PFN_vkGetFenceStatus,
+ pub wait_for_fences: PFN_vkWaitForFences,
+ pub create_semaphore: PFN_vkCreateSemaphore,
+ pub destroy_semaphore: PFN_vkDestroySemaphore,
+ pub create_event: PFN_vkCreateEvent,
+ pub destroy_event: PFN_vkDestroyEvent,
+ pub get_event_status: PFN_vkGetEventStatus,
+ pub set_event: PFN_vkSetEvent,
+ pub reset_event: PFN_vkResetEvent,
+ pub create_query_pool: PFN_vkCreateQueryPool,
+ pub destroy_query_pool: PFN_vkDestroyQueryPool,
+ pub get_query_pool_results: PFN_vkGetQueryPoolResults,
+ pub create_buffer: PFN_vkCreateBuffer,
+ pub destroy_buffer: PFN_vkDestroyBuffer,
+ pub create_buffer_view: PFN_vkCreateBufferView,
+ pub destroy_buffer_view: PFN_vkDestroyBufferView,
+ pub create_image: PFN_vkCreateImage,
+ pub destroy_image: PFN_vkDestroyImage,
+ pub get_image_subresource_layout: PFN_vkGetImageSubresourceLayout,
+ pub create_image_view: PFN_vkCreateImageView,
+ pub destroy_image_view: PFN_vkDestroyImageView,
+ pub create_shader_module: PFN_vkCreateShaderModule,
+ pub destroy_shader_module: PFN_vkDestroyShaderModule,
+ pub create_pipeline_cache: PFN_vkCreatePipelineCache,
+ pub destroy_pipeline_cache: PFN_vkDestroyPipelineCache,
+ pub get_pipeline_cache_data: PFN_vkGetPipelineCacheData,
+ pub merge_pipeline_caches: PFN_vkMergePipelineCaches,
+ pub create_graphics_pipelines: PFN_vkCreateGraphicsPipelines,
+ pub create_compute_pipelines: PFN_vkCreateComputePipelines,
+ pub destroy_pipeline: PFN_vkDestroyPipeline,
+ pub create_pipeline_layout: PFN_vkCreatePipelineLayout,
+ pub destroy_pipeline_layout: PFN_vkDestroyPipelineLayout,
+ pub create_sampler: PFN_vkCreateSampler,
+ pub destroy_sampler: PFN_vkDestroySampler,
+ pub create_descriptor_set_layout: PFN_vkCreateDescriptorSetLayout,
+ pub destroy_descriptor_set_layout: PFN_vkDestroyDescriptorSetLayout,
+ pub create_descriptor_pool: PFN_vkCreateDescriptorPool,
+ pub destroy_descriptor_pool: PFN_vkDestroyDescriptorPool,
+ pub reset_descriptor_pool: PFN_vkResetDescriptorPool,
+ pub allocate_descriptor_sets: PFN_vkAllocateDescriptorSets,
+ pub free_descriptor_sets: PFN_vkFreeDescriptorSets,
+ pub update_descriptor_sets: PFN_vkUpdateDescriptorSets,
+ pub create_framebuffer: PFN_vkCreateFramebuffer,
+ pub destroy_framebuffer: PFN_vkDestroyFramebuffer,
+ pub create_render_pass: PFN_vkCreateRenderPass,
+ pub destroy_render_pass: PFN_vkDestroyRenderPass,
+ pub get_render_area_granularity: PFN_vkGetRenderAreaGranularity,
+ pub create_command_pool: PFN_vkCreateCommandPool,
+ pub destroy_command_pool: PFN_vkDestroyCommandPool,
+ pub reset_command_pool: PFN_vkResetCommandPool,
+ pub allocate_command_buffers: PFN_vkAllocateCommandBuffers,
+ pub free_command_buffers: PFN_vkFreeCommandBuffers,
+ pub begin_command_buffer: PFN_vkBeginCommandBuffer,
+ pub end_command_buffer: PFN_vkEndCommandBuffer,
+ pub reset_command_buffer: PFN_vkResetCommandBuffer,
+ pub cmd_bind_pipeline: PFN_vkCmdBindPipeline,
+ pub cmd_set_viewport: PFN_vkCmdSetViewport,
+ pub cmd_set_scissor: PFN_vkCmdSetScissor,
+ pub cmd_set_line_width: PFN_vkCmdSetLineWidth,
+ pub cmd_set_depth_bias: PFN_vkCmdSetDepthBias,
+ pub cmd_set_blend_constants: PFN_vkCmdSetBlendConstants,
+ pub cmd_set_depth_bounds: PFN_vkCmdSetDepthBounds,
+ pub cmd_set_stencil_compare_mask: PFN_vkCmdSetStencilCompareMask,
+ pub cmd_set_stencil_write_mask: PFN_vkCmdSetStencilWriteMask,
+ pub cmd_set_stencil_reference: PFN_vkCmdSetStencilReference,
+ pub cmd_bind_descriptor_sets: PFN_vkCmdBindDescriptorSets,
+ pub cmd_bind_index_buffer: PFN_vkCmdBindIndexBuffer,
+ pub cmd_bind_vertex_buffers: PFN_vkCmdBindVertexBuffers,
+ pub cmd_draw: PFN_vkCmdDraw,
+ pub cmd_draw_indexed: PFN_vkCmdDrawIndexed,
+ pub cmd_draw_indirect: PFN_vkCmdDrawIndirect,
+ pub cmd_draw_indexed_indirect: PFN_vkCmdDrawIndexedIndirect,
+ pub cmd_dispatch: PFN_vkCmdDispatch,
+ pub cmd_dispatch_indirect: PFN_vkCmdDispatchIndirect,
+ pub cmd_copy_buffer: PFN_vkCmdCopyBuffer,
+ pub cmd_copy_image: PFN_vkCmdCopyImage,
+ pub cmd_blit_image: PFN_vkCmdBlitImage,
+ pub cmd_copy_buffer_to_image: PFN_vkCmdCopyBufferToImage,
+ pub cmd_copy_image_to_buffer: PFN_vkCmdCopyImageToBuffer,
+ pub cmd_update_buffer: PFN_vkCmdUpdateBuffer,
+ pub cmd_fill_buffer: PFN_vkCmdFillBuffer,
+ pub cmd_clear_color_image: PFN_vkCmdClearColorImage,
+ pub cmd_clear_depth_stencil_image: PFN_vkCmdClearDepthStencilImage,
+ pub cmd_clear_attachments: PFN_vkCmdClearAttachments,
+ pub cmd_resolve_image: PFN_vkCmdResolveImage,
+ pub cmd_set_event: PFN_vkCmdSetEvent,
+ pub cmd_reset_event: PFN_vkCmdResetEvent,
+ pub cmd_wait_events: PFN_vkCmdWaitEvents,
+ pub cmd_pipeline_barrier: PFN_vkCmdPipelineBarrier,
+ pub cmd_begin_query: PFN_vkCmdBeginQuery,
+ pub cmd_end_query: PFN_vkCmdEndQuery,
+ pub cmd_reset_query_pool: PFN_vkCmdResetQueryPool,
+ pub cmd_write_timestamp: PFN_vkCmdWriteTimestamp,
+ pub cmd_copy_query_pool_results: PFN_vkCmdCopyQueryPoolResults,
+ pub cmd_push_constants: PFN_vkCmdPushConstants,
+ pub cmd_begin_render_pass: PFN_vkCmdBeginRenderPass,
+ pub cmd_next_subpass: PFN_vkCmdNextSubpass,
+ pub cmd_end_render_pass: PFN_vkCmdEndRenderPass,
+ pub cmd_execute_commands: PFN_vkCmdExecuteCommands,
+}
+unsafe impl Send for DeviceFnV1_0 {}
+unsafe impl Sync for DeviceFnV1_0 {}
+impl DeviceFnV1_0 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ destroy_device: unsafe {
+ unsafe extern "system" fn destroy_device(
+ _device: Device,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_device)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyDevice\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_device
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_queue: unsafe {
+ unsafe extern "system" fn get_device_queue(
+ _device: Device,
+ _queue_family_index: u32,
+ _queue_index: u32,
+ _p_queue: *mut Queue,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(get_device_queue)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceQueue\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_queue
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_submit: unsafe {
+ unsafe extern "system" fn queue_submit(
+ _queue: Queue,
+ _submit_count: u32,
+ _p_submits: *const SubmitInfo,
+ _fence: Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(queue_submit)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit\0");
+ let val = _f(cname);
+ if val.is_null() {
+ queue_submit
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_wait_idle: unsafe {
+ unsafe extern "system" fn queue_wait_idle(_queue: Queue) -> Result {
+ panic!(concat!("Unable to load ", stringify!(queue_wait_idle)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueueWaitIdle\0");
+ let val = _f(cname);
+ if val.is_null() {
+ queue_wait_idle
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ device_wait_idle: unsafe {
+ unsafe extern "system" fn device_wait_idle(_device: Device) -> Result {
+ panic!(concat!("Unable to load ", stringify!(device_wait_idle)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDeviceWaitIdle\0");
+ let val = _f(cname);
+ if val.is_null() {
+ device_wait_idle
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ allocate_memory: unsafe {
+ unsafe extern "system" fn allocate_memory(
+ _device: Device,
+ _p_allocate_info: *const MemoryAllocateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_memory: *mut DeviceMemory,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(allocate_memory)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAllocateMemory\0");
+ let val = _f(cname);
+ if val.is_null() {
+ allocate_memory
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ free_memory: unsafe {
+ unsafe extern "system" fn free_memory(
+ _device: Device,
+ _memory: DeviceMemory,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(free_memory)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkFreeMemory\0");
+ let val = _f(cname);
+ if val.is_null() {
+ free_memory
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ map_memory: unsafe {
+ unsafe extern "system" fn map_memory(
+ _device: Device,
+ _memory: DeviceMemory,
+ _offset: DeviceSize,
+ _size: DeviceSize,
+ _flags: MemoryMapFlags,
+ _pp_data: *mut *mut c_void,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(map_memory)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkMapMemory\0");
+ let val = _f(cname);
+ if val.is_null() {
+ map_memory
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ unmap_memory: unsafe {
+ unsafe extern "system" fn unmap_memory(_device: Device, _memory: DeviceMemory) {
+ panic!(concat!("Unable to load ", stringify!(unmap_memory)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkUnmapMemory\0");
+ let val = _f(cname);
+ if val.is_null() {
+ unmap_memory
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ flush_mapped_memory_ranges: unsafe {
+ unsafe extern "system" fn flush_mapped_memory_ranges(
+ _device: Device,
+ _memory_range_count: u32,
+ _p_memory_ranges: *const MappedMemoryRange,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(flush_mapped_memory_ranges)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkFlushMappedMemoryRanges\0");
+ let val = _f(cname);
+ if val.is_null() {
+ flush_mapped_memory_ranges
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ invalidate_mapped_memory_ranges: unsafe {
+ unsafe extern "system" fn invalidate_mapped_memory_ranges(
+ _device: Device,
+ _memory_range_count: u32,
+ _p_memory_ranges: *const MappedMemoryRange,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(invalidate_mapped_memory_ranges)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkInvalidateMappedMemoryRanges\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ invalidate_mapped_memory_ranges
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_memory_commitment: unsafe {
+ unsafe extern "system" fn get_device_memory_commitment(
+ _device: Device,
+ _memory: DeviceMemory,
+ _p_committed_memory_in_bytes: *mut DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_memory_commitment)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceMemoryCommitment\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_memory_commitment
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_buffer_memory: unsafe {
+ unsafe extern "system" fn bind_buffer_memory(
+ _device: Device,
+ _buffer: Buffer,
+ _memory: DeviceMemory,
+ _memory_offset: DeviceSize,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(bind_buffer_memory)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBindBufferMemory\0");
+ let val = _f(cname);
+ if val.is_null() {
+ bind_buffer_memory
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_image_memory: unsafe {
+ unsafe extern "system" fn bind_image_memory(
+ _device: Device,
+ _image: Image,
+ _memory: DeviceMemory,
+ _memory_offset: DeviceSize,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(bind_image_memory)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBindImageMemory\0");
+ let val = _f(cname);
+ if val.is_null() {
+ bind_image_memory
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_memory_requirements: unsafe {
+ unsafe extern "system" fn get_buffer_memory_requirements(
+ _device: Device,
+ _buffer: Buffer,
+ _p_memory_requirements: *mut MemoryRequirements,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_memory_requirements)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferMemoryRequirements\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_memory_requirements
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_memory_requirements: unsafe {
+ unsafe extern "system" fn get_image_memory_requirements(
+ _device: Device,
+ _image: Image,
+ _p_memory_requirements: *mut MemoryRequirements,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_memory_requirements)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageMemoryRequirements\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_memory_requirements
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_sparse_memory_requirements: unsafe {
+ unsafe extern "system" fn get_image_sparse_memory_requirements(
+ _device: Device,
+ _image: Image,
+ _p_sparse_memory_requirement_count: *mut u32,
+ _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_sparse_memory_requirements)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageSparseMemoryRequirements\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_sparse_memory_requirements
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_bind_sparse: unsafe {
+ unsafe extern "system" fn queue_bind_sparse(
+ _queue: Queue,
+ _bind_info_count: u32,
+ _p_bind_info: *const BindSparseInfo,
+ _fence: Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(queue_bind_sparse)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueueBindSparse\0");
+ let val = _f(cname);
+ if val.is_null() {
+ queue_bind_sparse
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_fence: unsafe {
+ unsafe extern "system" fn create_fence(
+ _device: Device,
+ _p_create_info: *const FenceCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_fence: *mut Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_fence)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateFence\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_fence
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_fence: unsafe {
+ unsafe extern "system" fn destroy_fence(
+ _device: Device,
+ _fence: Fence,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_fence)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyFence\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_fence
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_fences: unsafe {
+ unsafe extern "system" fn reset_fences(
+ _device: Device,
+ _fence_count: u32,
+ _p_fences: *const Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(reset_fences)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetFences\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_fences
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_fence_status: unsafe {
+ unsafe extern "system" fn get_fence_status(
+ _device: Device,
+ _fence: Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_fence_status)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetFenceStatus\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_fence_status
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ wait_for_fences: unsafe {
+ unsafe extern "system" fn wait_for_fences(
+ _device: Device,
+ _fence_count: u32,
+ _p_fences: *const Fence,
+ _wait_all: Bool32,
+ _timeout: u64,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(wait_for_fences)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkWaitForFences\0");
+ let val = _f(cname);
+ if val.is_null() {
+ wait_for_fences
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_semaphore: unsafe {
+ unsafe extern "system" fn create_semaphore(
+ _device: Device,
+ _p_create_info: *const SemaphoreCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_semaphore: *mut Semaphore,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_semaphore)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateSemaphore\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_semaphore
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_semaphore: unsafe {
+ unsafe extern "system" fn destroy_semaphore(
+ _device: Device,
+ _semaphore: Semaphore,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_semaphore)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroySemaphore\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_semaphore
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_event: unsafe {
+ unsafe extern "system" fn create_event(
+ _device: Device,
+ _p_create_info: *const EventCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_event: *mut Event,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_event)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateEvent\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_event
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_event: unsafe {
+ unsafe extern "system" fn destroy_event(
+ _device: Device,
+ _event: Event,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_event)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyEvent\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_event
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_event_status: unsafe {
+ unsafe extern "system" fn get_event_status(
+ _device: Device,
+ _event: Event,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(get_event_status)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetEventStatus\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_event_status
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_event: unsafe {
+ unsafe extern "system" fn set_event(_device: Device, _event: Event) -> Result {
+ panic!(concat!("Unable to load ", stringify!(set_event)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSetEvent\0");
+ let val = _f(cname);
+ if val.is_null() {
+ set_event
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_event: unsafe {
+ unsafe extern "system" fn reset_event(_device: Device, _event: Event) -> Result {
+ panic!(concat!("Unable to load ", stringify!(reset_event)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetEvent\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_event
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_query_pool: unsafe {
+ unsafe extern "system" fn create_query_pool(
+ _device: Device,
+ _p_create_info: *const QueryPoolCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_query_pool: *mut QueryPool,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_query_pool)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateQueryPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_query_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_query_pool: unsafe {
+ unsafe extern "system" fn destroy_query_pool(
+ _device: Device,
+ _query_pool: QueryPool,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_query_pool)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyQueryPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_query_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_query_pool_results: unsafe {
+ unsafe extern "system" fn get_query_pool_results(
+ _device: Device,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ _query_count: u32,
+ _data_size: usize,
+ _p_data: *mut c_void,
+ _stride: DeviceSize,
+ _flags: QueryResultFlags,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_query_pool_results)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetQueryPoolResults\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_query_pool_results
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_buffer: unsafe {
+ unsafe extern "system" fn create_buffer(
+ _device: Device,
+ _p_create_info: *const BufferCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_buffer: *mut Buffer,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_buffer)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_buffer: unsafe {
+ unsafe extern "system" fn destroy_buffer(
+ _device: Device,
+ _buffer: Buffer,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_buffer)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_buffer_view: unsafe {
+ unsafe extern "system" fn create_buffer_view(
+ _device: Device,
+ _p_create_info: *const BufferViewCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_view: *mut BufferView,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_buffer_view)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateBufferView\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_buffer_view
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_buffer_view: unsafe {
+ unsafe extern "system" fn destroy_buffer_view(
+ _device: Device,
+ _buffer_view: BufferView,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_buffer_view)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyBufferView\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_buffer_view
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_image: unsafe {
+ unsafe extern "system" fn create_image(
+ _device: Device,
+ _p_create_info: *const ImageCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_image: *mut Image,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_image)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_image: unsafe {
+ unsafe extern "system" fn destroy_image(
+ _device: Device,
+ _image: Image,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_image)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_subresource_layout: unsafe {
+ unsafe extern "system" fn get_image_subresource_layout(
+ _device: Device,
+ _image: Image,
+ _p_subresource: *const ImageSubresource,
+ _p_layout: *mut SubresourceLayout,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_subresource_layout)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageSubresourceLayout\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_subresource_layout
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_image_view: unsafe {
+ unsafe extern "system" fn create_image_view(
+ _device: Device,
+ _p_create_info: *const ImageViewCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_view: *mut ImageView,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_image_view)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateImageView\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_image_view
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_image_view: unsafe {
+ unsafe extern "system" fn destroy_image_view(
+ _device: Device,
+ _image_view: ImageView,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_image_view)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyImageView\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_image_view
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_shader_module: unsafe {
+ unsafe extern "system" fn create_shader_module(
+ _device: Device,
+ _p_create_info: *const ShaderModuleCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_shader_module: *mut ShaderModule,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_shader_module)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateShaderModule\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_shader_module
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_shader_module: unsafe {
+ unsafe extern "system" fn destroy_shader_module(
+ _device: Device,
+ _shader_module: ShaderModule,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_shader_module)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyShaderModule\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_shader_module
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_pipeline_cache: unsafe {
+ unsafe extern "system" fn create_pipeline_cache(
+ _device: Device,
+ _p_create_info: *const PipelineCacheCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_pipeline_cache: *mut PipelineCache,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_pipeline_cache)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreatePipelineCache\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_pipeline_cache
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_pipeline_cache: unsafe {
+ unsafe extern "system" fn destroy_pipeline_cache(
+ _device: Device,
+ _pipeline_cache: PipelineCache,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_pipeline_cache)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyPipelineCache\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_pipeline_cache
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_pipeline_cache_data: unsafe {
+ unsafe extern "system" fn get_pipeline_cache_data(
+ _device: Device,
+ _pipeline_cache: PipelineCache,
+ _p_data_size: *mut usize,
+ _p_data: *mut c_void,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_pipeline_cache_data)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetPipelineCacheData\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_pipeline_cache_data
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ merge_pipeline_caches: unsafe {
+ unsafe extern "system" fn merge_pipeline_caches(
+ _device: Device,
+ _dst_cache: PipelineCache,
+ _src_cache_count: u32,
+ _p_src_caches: *const PipelineCache,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(merge_pipeline_caches)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkMergePipelineCaches\0");
+ let val = _f(cname);
+ if val.is_null() {
+ merge_pipeline_caches
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_graphics_pipelines: unsafe {
+ unsafe extern "system" fn create_graphics_pipelines(
+ _device: Device,
+ _pipeline_cache: PipelineCache,
+ _create_info_count: u32,
+ _p_create_infos: *const GraphicsPipelineCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_pipelines: *mut Pipeline,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_graphics_pipelines)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateGraphicsPipelines\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_graphics_pipelines
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_compute_pipelines: unsafe {
+ unsafe extern "system" fn create_compute_pipelines(
+ _device: Device,
+ _pipeline_cache: PipelineCache,
+ _create_info_count: u32,
+ _p_create_infos: *const ComputePipelineCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_pipelines: *mut Pipeline,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_compute_pipelines)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateComputePipelines\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_compute_pipelines
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_pipeline: unsafe {
+ unsafe extern "system" fn destroy_pipeline(
+ _device: Device,
+ _pipeline: Pipeline,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_pipeline)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyPipeline\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_pipeline
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_pipeline_layout: unsafe {
+ unsafe extern "system" fn create_pipeline_layout(
+ _device: Device,
+ _p_create_info: *const PipelineLayoutCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_pipeline_layout: *mut PipelineLayout,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_pipeline_layout)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreatePipelineLayout\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_pipeline_layout
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_pipeline_layout: unsafe {
+ unsafe extern "system" fn destroy_pipeline_layout(
+ _device: Device,
+ _pipeline_layout: PipelineLayout,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_pipeline_layout)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyPipelineLayout\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_pipeline_layout
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_sampler: unsafe {
+ unsafe extern "system" fn create_sampler(
+ _device: Device,
+ _p_create_info: *const SamplerCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_sampler: *mut Sampler,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_sampler)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateSampler\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_sampler
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_sampler: unsafe {
+ unsafe extern "system" fn destroy_sampler(
+ _device: Device,
+ _sampler: Sampler,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_sampler)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroySampler\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_sampler
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_descriptor_set_layout: unsafe {
+ unsafe extern "system" fn create_descriptor_set_layout(
+ _device: Device,
+ _p_create_info: *const DescriptorSetLayoutCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_set_layout: *mut DescriptorSetLayout,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_descriptor_set_layout)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDescriptorSetLayout\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_descriptor_set_layout
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_descriptor_set_layout: unsafe {
+ unsafe extern "system" fn destroy_descriptor_set_layout(
+ _device: Device,
+ _descriptor_set_layout: DescriptorSetLayout,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_descriptor_set_layout)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyDescriptorSetLayout\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_descriptor_set_layout
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_descriptor_pool: unsafe {
+ unsafe extern "system" fn create_descriptor_pool(
+ _device: Device,
+ _p_create_info: *const DescriptorPoolCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_descriptor_pool: *mut DescriptorPool,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_descriptor_pool)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateDescriptorPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_descriptor_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_descriptor_pool: unsafe {
+ unsafe extern "system" fn destroy_descriptor_pool(
+ _device: Device,
+ _descriptor_pool: DescriptorPool,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_descriptor_pool)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyDescriptorPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_descriptor_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_descriptor_pool: unsafe {
+ unsafe extern "system" fn reset_descriptor_pool(
+ _device: Device,
+ _descriptor_pool: DescriptorPool,
+ _flags: DescriptorPoolResetFlags,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(reset_descriptor_pool)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetDescriptorPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_descriptor_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ allocate_descriptor_sets: unsafe {
+ unsafe extern "system" fn allocate_descriptor_sets(
+ _device: Device,
+ _p_allocate_info: *const DescriptorSetAllocateInfo,
+ _p_descriptor_sets: *mut DescriptorSet,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(allocate_descriptor_sets)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAllocateDescriptorSets\0");
+ let val = _f(cname);
+ if val.is_null() {
+ allocate_descriptor_sets
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ free_descriptor_sets: unsafe {
+ unsafe extern "system" fn free_descriptor_sets(
+ _device: Device,
+ _descriptor_pool: DescriptorPool,
+ _descriptor_set_count: u32,
+ _p_descriptor_sets: *const DescriptorSet,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(free_descriptor_sets)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkFreeDescriptorSets\0");
+ let val = _f(cname);
+ if val.is_null() {
+ free_descriptor_sets
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ update_descriptor_sets: unsafe {
+ unsafe extern "system" fn update_descriptor_sets(
+ _device: Device,
+ _descriptor_write_count: u32,
+ _p_descriptor_writes: *const WriteDescriptorSet,
+ _descriptor_copy_count: u32,
+ _p_descriptor_copies: *const CopyDescriptorSet,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(update_descriptor_sets)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkUpdateDescriptorSets\0");
+ let val = _f(cname);
+ if val.is_null() {
+ update_descriptor_sets
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_framebuffer: unsafe {
+ unsafe extern "system" fn create_framebuffer(
+ _device: Device,
+ _p_create_info: *const FramebufferCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_framebuffer: *mut Framebuffer,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_framebuffer)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateFramebuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_framebuffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_framebuffer: unsafe {
+ unsafe extern "system" fn destroy_framebuffer(
+ _device: Device,
+ _framebuffer: Framebuffer,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_framebuffer)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyFramebuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_framebuffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_render_pass: unsafe {
+ unsafe extern "system" fn create_render_pass(
+ _device: Device,
+ _p_create_info: *const RenderPassCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_render_pass: *mut RenderPass,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_render_pass)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateRenderPass\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_render_pass
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_render_pass: unsafe {
+ unsafe extern "system" fn destroy_render_pass(
+ _device: Device,
+ _render_pass: RenderPass,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_render_pass)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyRenderPass\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_render_pass
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_render_area_granularity: unsafe {
+ unsafe extern "system" fn get_render_area_granularity(
+ _device: Device,
+ _render_pass: RenderPass,
+ _p_granularity: *mut Extent2D,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_render_area_granularity)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetRenderAreaGranularity\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_render_area_granularity
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_command_pool: unsafe {
+ unsafe extern "system" fn create_command_pool(
+ _device: Device,
+ _p_create_info: *const CommandPoolCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_command_pool: *mut CommandPool,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_command_pool)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateCommandPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_command_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_command_pool: unsafe {
+ unsafe extern "system" fn destroy_command_pool(
+ _device: Device,
+ _command_pool: CommandPool,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(destroy_command_pool)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyCommandPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_command_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_command_pool: unsafe {
+ unsafe extern "system" fn reset_command_pool(
+ _device: Device,
+ _command_pool: CommandPool,
+ _flags: CommandPoolResetFlags,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(reset_command_pool)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetCommandPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_command_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ allocate_command_buffers: unsafe {
+ unsafe extern "system" fn allocate_command_buffers(
+ _device: Device,
+ _p_allocate_info: *const CommandBufferAllocateInfo,
+ _p_command_buffers: *mut CommandBuffer,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(allocate_command_buffers)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkAllocateCommandBuffers\0");
+ let val = _f(cname);
+ if val.is_null() {
+ allocate_command_buffers
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ free_command_buffers: unsafe {
+ unsafe extern "system" fn free_command_buffers(
+ _device: Device,
+ _command_pool: CommandPool,
+ _command_buffer_count: u32,
+ _p_command_buffers: *const CommandBuffer,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(free_command_buffers)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkFreeCommandBuffers\0");
+ let val = _f(cname);
+ if val.is_null() {
+ free_command_buffers
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ begin_command_buffer: unsafe {
+ unsafe extern "system" fn begin_command_buffer(
+ _command_buffer: CommandBuffer,
+ _p_begin_info: *const CommandBufferBeginInfo,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(begin_command_buffer)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBeginCommandBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ begin_command_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ end_command_buffer: unsafe {
+ unsafe extern "system" fn end_command_buffer(
+ _command_buffer: CommandBuffer,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(end_command_buffer)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkEndCommandBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ end_command_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_command_buffer: unsafe {
+ unsafe extern "system" fn reset_command_buffer(
+ _command_buffer: CommandBuffer,
+ _flags: CommandBufferResetFlags,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(reset_command_buffer)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetCommandBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_command_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_pipeline: unsafe {
+ unsafe extern "system" fn cmd_bind_pipeline(
+ _command_buffer: CommandBuffer,
+ _pipeline_bind_point: PipelineBindPoint,
+ _pipeline: Pipeline,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_bind_pipeline)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBindPipeline\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_pipeline
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_viewport: unsafe {
+ unsafe extern "system" fn cmd_set_viewport(
+ _command_buffer: CommandBuffer,
+ _first_viewport: u32,
+ _viewport_count: u32,
+ _p_viewports: *const Viewport,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_viewport)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewport\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_scissor: unsafe {
+ unsafe extern "system" fn cmd_set_scissor(
+ _command_buffer: CommandBuffer,
+ _first_scissor: u32,
+ _scissor_count: u32,
+ _p_scissors: *const Rect2D,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_scissor)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetScissor\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_scissor
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_line_width: unsafe {
+ unsafe extern "system" fn cmd_set_line_width(
+ _command_buffer: CommandBuffer,
+ _line_width: f32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_line_width)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetLineWidth\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_line_width
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_bias: unsafe {
+ unsafe extern "system" fn cmd_set_depth_bias(
+ _command_buffer: CommandBuffer,
+ _depth_bias_constant_factor: f32,
+ _depth_bias_clamp: f32,
+ _depth_bias_slope_factor: f32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_depth_bias)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBias\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_bias
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_blend_constants: unsafe {
+ unsafe extern "system" fn cmd_set_blend_constants(
+ _command_buffer: CommandBuffer,
+ _blend_constants: *const [f32; 4],
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_blend_constants)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetBlendConstants\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_blend_constants
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_bounds: unsafe {
+ unsafe extern "system" fn cmd_set_depth_bounds(
+ _command_buffer: CommandBuffer,
+ _min_depth_bounds: f32,
+ _max_depth_bounds: f32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_depth_bounds)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBounds\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_bounds
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_compare_mask: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_compare_mask(
+ _command_buffer: CommandBuffer,
+ _face_mask: StencilFaceFlags,
+ _compare_mask: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_stencil_compare_mask)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetStencilCompareMask\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_compare_mask
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_write_mask: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_write_mask(
+ _command_buffer: CommandBuffer,
+ _face_mask: StencilFaceFlags,
+ _write_mask: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_stencil_write_mask)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilWriteMask\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_write_mask
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_reference: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_reference(
+ _command_buffer: CommandBuffer,
+ _face_mask: StencilFaceFlags,
+ _reference: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_stencil_reference)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilReference\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_reference
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_descriptor_sets: unsafe {
+ unsafe extern "system" fn cmd_bind_descriptor_sets(
+ _command_buffer: CommandBuffer,
+ _pipeline_bind_point: PipelineBindPoint,
+ _layout: PipelineLayout,
+ _first_set: u32,
+ _descriptor_set_count: u32,
+ _p_descriptor_sets: *const DescriptorSet,
+ _dynamic_offset_count: u32,
+ _p_dynamic_offsets: *const u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_descriptor_sets)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBindDescriptorSets\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_descriptor_sets
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_index_buffer: unsafe {
+ unsafe extern "system" fn cmd_bind_index_buffer(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _index_type: IndexType,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_index_buffer)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBindIndexBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_index_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_vertex_buffers: unsafe {
+ unsafe extern "system" fn cmd_bind_vertex_buffers(
+ _command_buffer: CommandBuffer,
+ _first_binding: u32,
+ _binding_count: u32,
+ _p_buffers: *const Buffer,
+ _p_offsets: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_vertex_buffers)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBindVertexBuffers\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_vertex_buffers
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw: unsafe {
+ unsafe extern "system" fn cmd_draw(
+ _command_buffer: CommandBuffer,
+ _vertex_count: u32,
+ _instance_count: u32,
+ _first_vertex: u32,
+ _first_instance: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_draw)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDraw\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indexed: unsafe {
+ unsafe extern "system" fn cmd_draw_indexed(
+ _command_buffer: CommandBuffer,
+ _index_count: u32,
+ _instance_count: u32,
+ _first_index: u32,
+ _vertex_offset: i32,
+ _first_instance: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_draw_indexed)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndexed\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indexed
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indirect: unsafe {
+ unsafe extern "system" fn cmd_draw_indirect(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_draw_indirect)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirect\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indirect
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indexed_indirect: unsafe {
+ unsafe extern "system" fn cmd_draw_indexed_indirect(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indexed_indirect)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndexedIndirect\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indexed_indirect
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_dispatch: unsafe {
+ unsafe extern "system" fn cmd_dispatch(
+ _command_buffer: CommandBuffer,
+ _group_count_x: u32,
+ _group_count_y: u32,
+ _group_count_z: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_dispatch)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatch\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_dispatch
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_dispatch_indirect: unsafe {
+ unsafe extern "system" fn cmd_dispatch_indirect(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_dispatch_indirect)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchIndirect\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_dispatch_indirect
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_buffer: unsafe {
+ unsafe extern "system" fn cmd_copy_buffer(
+ _command_buffer: CommandBuffer,
+ _src_buffer: Buffer,
+ _dst_buffer: Buffer,
+ _region_count: u32,
+ _p_regions: *const BufferCopy,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_image: unsafe {
+ unsafe extern "system" fn cmd_copy_image(
+ _command_buffer: CommandBuffer,
+ _src_image: Image,
+ _src_image_layout: ImageLayout,
+ _dst_image: Image,
+ _dst_image_layout: ImageLayout,
+ _region_count: u32,
+ _p_regions: *const ImageCopy,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_copy_image)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_blit_image: unsafe {
+ unsafe extern "system" fn cmd_blit_image(
+ _command_buffer: CommandBuffer,
+ _src_image: Image,
+ _src_image_layout: ImageLayout,
+ _dst_image: Image,
+ _dst_image_layout: ImageLayout,
+ _region_count: u32,
+ _p_regions: *const ImageBlit,
+ _filter: Filter,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_blit_image)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBlitImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_blit_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_buffer_to_image: unsafe {
+ unsafe extern "system" fn cmd_copy_buffer_to_image(
+ _command_buffer: CommandBuffer,
+ _src_buffer: Buffer,
+ _dst_image: Image,
+ _dst_image_layout: ImageLayout,
+ _region_count: u32,
+ _p_regions: *const BufferImageCopy,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_buffer_to_image)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBufferToImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_buffer_to_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_image_to_buffer: unsafe {
+ unsafe extern "system" fn cmd_copy_image_to_buffer(
+ _command_buffer: CommandBuffer,
+ _src_image: Image,
+ _src_image_layout: ImageLayout,
+ _dst_buffer: Buffer,
+ _region_count: u32,
+ _p_regions: *const BufferImageCopy,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_image_to_buffer)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImageToBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_image_to_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_update_buffer: unsafe {
+ unsafe extern "system" fn cmd_update_buffer(
+ _command_buffer: CommandBuffer,
+ _dst_buffer: Buffer,
+ _dst_offset: DeviceSize,
+ _data_size: DeviceSize,
+ _p_data: *const c_void,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_update_buffer)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdUpdateBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_update_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_fill_buffer: unsafe {
+ unsafe extern "system" fn cmd_fill_buffer(
+ _command_buffer: CommandBuffer,
+ _dst_buffer: Buffer,
+ _dst_offset: DeviceSize,
+ _size: DeviceSize,
+ _data: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_fill_buffer)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdFillBuffer\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_fill_buffer
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_clear_color_image: unsafe {
+ unsafe extern "system" fn cmd_clear_color_image(
+ _command_buffer: CommandBuffer,
+ _image: Image,
+ _image_layout: ImageLayout,
+ _p_color: *const ClearColorValue,
+ _range_count: u32,
+ _p_ranges: *const ImageSubresourceRange,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_clear_color_image)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdClearColorImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_clear_color_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_clear_depth_stencil_image: unsafe {
+ unsafe extern "system" fn cmd_clear_depth_stencil_image(
+ _command_buffer: CommandBuffer,
+ _image: Image,
+ _image_layout: ImageLayout,
+ _p_depth_stencil: *const ClearDepthStencilValue,
+ _range_count: u32,
+ _p_ranges: *const ImageSubresourceRange,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_clear_depth_stencil_image)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdClearDepthStencilImage\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_clear_depth_stencil_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_clear_attachments: unsafe {
+ unsafe extern "system" fn cmd_clear_attachments(
+ _command_buffer: CommandBuffer,
+ _attachment_count: u32,
+ _p_attachments: *const ClearAttachment,
+ _rect_count: u32,
+ _p_rects: *const ClearRect,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_clear_attachments)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdClearAttachments\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_clear_attachments
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_resolve_image: unsafe {
+ unsafe extern "system" fn cmd_resolve_image(
+ _command_buffer: CommandBuffer,
+ _src_image: Image,
+ _src_image_layout: ImageLayout,
+ _dst_image: Image,
+ _dst_image_layout: ImageLayout,
+ _region_count: u32,
+ _p_regions: *const ImageResolve,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_resolve_image)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResolveImage\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_resolve_image
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_event: unsafe {
+ unsafe extern "system" fn cmd_set_event(
+ _command_buffer: CommandBuffer,
+ _event: Event,
+ _stage_mask: PipelineStageFlags,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_event)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_event
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_reset_event: unsafe {
+ unsafe extern "system" fn cmd_reset_event(
+ _command_buffer: CommandBuffer,
+ _event: Event,
+ _stage_mask: PipelineStageFlags,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_reset_event)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_reset_event
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_wait_events: unsafe {
+ unsafe extern "system" fn cmd_wait_events(
+ _command_buffer: CommandBuffer,
+ _event_count: u32,
+ _p_events: *const Event,
+ _src_stage_mask: PipelineStageFlags,
+ _dst_stage_mask: PipelineStageFlags,
+ _memory_barrier_count: u32,
+ _p_memory_barriers: *const MemoryBarrier,
+ _buffer_memory_barrier_count: u32,
+ _p_buffer_memory_barriers: *const BufferMemoryBarrier,
+ _image_memory_barrier_count: u32,
+ _p_image_memory_barriers: *const ImageMemoryBarrier,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_wait_events)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_wait_events
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_pipeline_barrier: unsafe {
+ unsafe extern "system" fn cmd_pipeline_barrier(
+ _command_buffer: CommandBuffer,
+ _src_stage_mask: PipelineStageFlags,
+ _dst_stage_mask: PipelineStageFlags,
+ _dependency_flags: DependencyFlags,
+ _memory_barrier_count: u32,
+ _p_memory_barriers: *const MemoryBarrier,
+ _buffer_memory_barrier_count: u32,
+ _p_buffer_memory_barriers: *const BufferMemoryBarrier,
+ _image_memory_barrier_count: u32,
+ _p_image_memory_barriers: *const ImageMemoryBarrier,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_pipeline_barrier)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_pipeline_barrier
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_query: unsafe {
+ unsafe extern "system" fn cmd_begin_query(
+ _command_buffer: CommandBuffer,
+ _query_pool: QueryPool,
+ _query: u32,
+ _flags: QueryControlFlags,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_begin_query)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginQuery\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_query
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_query: unsafe {
+ unsafe extern "system" fn cmd_end_query(
+ _command_buffer: CommandBuffer,
+ _query_pool: QueryPool,
+ _query: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_end_query)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndQuery\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_query
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_reset_query_pool: unsafe {
+ unsafe extern "system" fn cmd_reset_query_pool(
+ _command_buffer: CommandBuffer,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ _query_count: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_reset_query_pool)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResetQueryPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_reset_query_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_timestamp: unsafe {
+ unsafe extern "system" fn cmd_write_timestamp(
+ _command_buffer: CommandBuffer,
+ _pipeline_stage: PipelineStageFlags,
+ _query_pool: QueryPool,
+ _query: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_write_timestamp)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_timestamp
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_query_pool_results: unsafe {
+ unsafe extern "system" fn cmd_copy_query_pool_results(
+ _command_buffer: CommandBuffer,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ _query_count: u32,
+ _dst_buffer: Buffer,
+ _dst_offset: DeviceSize,
+ _stride: DeviceSize,
+ _flags: QueryResultFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_query_pool_results)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyQueryPoolResults\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_query_pool_results
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_push_constants: unsafe {
+ unsafe extern "system" fn cmd_push_constants(
+ _command_buffer: CommandBuffer,
+ _layout: PipelineLayout,
+ _stage_flags: ShaderStageFlags,
+ _offset: u32,
+ _size: u32,
+ _p_values: *const c_void,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_push_constants)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdPushConstants\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_push_constants
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_render_pass: unsafe {
+ unsafe extern "system" fn cmd_begin_render_pass(
+ _command_buffer: CommandBuffer,
+ _p_render_pass_begin: *const RenderPassBeginInfo,
+ _contents: SubpassContents,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_render_pass)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderPass\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_render_pass
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_next_subpass: unsafe {
+ unsafe extern "system" fn cmd_next_subpass(
+ _command_buffer: CommandBuffer,
+ _contents: SubpassContents,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_next_subpass)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdNextSubpass\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_next_subpass
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_render_pass: unsafe {
+ unsafe extern "system" fn cmd_end_render_pass(_command_buffer: CommandBuffer) {
+ panic!(concat!("Unable to load ", stringify!(cmd_end_render_pass)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderPass\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_render_pass
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_execute_commands: unsafe {
+ unsafe extern "system" fn cmd_execute_commands(
+ _command_buffer: CommandBuffer,
+ _command_buffer_count: u32,
+ _p_command_buffers: *const CommandBuffer,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_execute_commands)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdExecuteCommands\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_execute_commands
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkEnumerateInstanceVersion =
+ unsafe extern "system" fn(p_api_version: *mut u32) -> Result;
+#[derive(Clone)]
+pub struct EntryFnV1_1 {
+ pub enumerate_instance_version: PFN_vkEnumerateInstanceVersion,
+}
+unsafe impl Send for EntryFnV1_1 {}
+unsafe impl Sync for EntryFnV1_1 {}
+impl EntryFnV1_1 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ enumerate_instance_version: unsafe {
+ unsafe extern "system" fn enumerate_instance_version(
+ _p_api_version: *mut u32,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_instance_version)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumerateInstanceVersion\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_instance_version
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[derive(Clone)]
+pub struct InstanceFnV1_1 {
+ pub enumerate_physical_device_groups: crate::vk::PFN_vkEnumeratePhysicalDeviceGroups,
+ pub get_physical_device_features2: crate::vk::PFN_vkGetPhysicalDeviceFeatures2,
+ pub get_physical_device_properties2: crate::vk::PFN_vkGetPhysicalDeviceProperties2,
+ pub get_physical_device_format_properties2: crate::vk::PFN_vkGetPhysicalDeviceFormatProperties2,
+ pub get_physical_device_image_format_properties2:
+ crate::vk::PFN_vkGetPhysicalDeviceImageFormatProperties2,
+ pub get_physical_device_queue_family_properties2:
+ crate::vk::PFN_vkGetPhysicalDeviceQueueFamilyProperties2,
+ pub get_physical_device_memory_properties2: crate::vk::PFN_vkGetPhysicalDeviceMemoryProperties2,
+ pub get_physical_device_sparse_image_format_properties2:
+ crate::vk::PFN_vkGetPhysicalDeviceSparseImageFormatProperties2,
+ pub get_physical_device_external_buffer_properties:
+ crate::vk::PFN_vkGetPhysicalDeviceExternalBufferProperties,
+ pub get_physical_device_external_fence_properties:
+ crate::vk::PFN_vkGetPhysicalDeviceExternalFenceProperties,
+ pub get_physical_device_external_semaphore_properties:
+ crate::vk::PFN_vkGetPhysicalDeviceExternalSemaphoreProperties,
+}
+unsafe impl Send for InstanceFnV1_1 {}
+unsafe impl Sync for InstanceFnV1_1 {}
+impl InstanceFnV1_1 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ enumerate_physical_device_groups: unsafe {
+ unsafe extern "system" fn enumerate_physical_device_groups(
+ _instance: Instance,
+ _p_physical_device_group_count: *mut u32,
+ _p_physical_device_group_properties: *mut PhysicalDeviceGroupProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(enumerate_physical_device_groups)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkEnumeratePhysicalDeviceGroups\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ enumerate_physical_device_groups
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_features2: unsafe {
+ unsafe extern "system" fn get_physical_device_features2(
+ _physical_device: PhysicalDevice,
+ _p_features: *mut PhysicalDeviceFeatures2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_features2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFeatures2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_features2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_properties2: unsafe {
+ unsafe extern "system" fn get_physical_device_properties2(
+ _physical_device: PhysicalDevice,
+ _p_properties: *mut PhysicalDeviceProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_properties2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceProperties2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_properties2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_format_properties2: unsafe {
+ unsafe extern "system" fn get_physical_device_format_properties2(
+ _physical_device: PhysicalDevice,
+ _format: Format,
+ _p_format_properties: *mut FormatProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_format_properties2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceFormatProperties2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_format_properties2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_image_format_properties2: unsafe {
+ unsafe extern "system" fn get_physical_device_image_format_properties2(
+ _physical_device: PhysicalDevice,
+ _p_image_format_info: *const PhysicalDeviceImageFormatInfo2,
+ _p_image_format_properties: *mut ImageFormatProperties2,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_image_format_properties2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceImageFormatProperties2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_image_format_properties2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_queue_family_properties2: unsafe {
+ unsafe extern "system" fn get_physical_device_queue_family_properties2(
+ _physical_device: PhysicalDevice,
+ _p_queue_family_property_count: *mut u32,
+ _p_queue_family_properties: *mut QueueFamilyProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_queue_family_properties2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceQueueFamilyProperties2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_queue_family_properties2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_memory_properties2: unsafe {
+ unsafe extern "system" fn get_physical_device_memory_properties2(
+ _physical_device: PhysicalDevice,
+ _p_memory_properties: *mut PhysicalDeviceMemoryProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_memory_properties2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceMemoryProperties2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_memory_properties2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_sparse_image_format_properties2: unsafe {
+ unsafe extern "system" fn get_physical_device_sparse_image_format_properties2(
+ _physical_device: PhysicalDevice,
+ _p_format_info: *const PhysicalDeviceSparseImageFormatInfo2,
+ _p_property_count: *mut u32,
+ _p_properties: *mut SparseImageFormatProperties2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_sparse_image_format_properties2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceSparseImageFormatProperties2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_sparse_image_format_properties2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_external_buffer_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_external_buffer_properties(
+ _physical_device: PhysicalDevice,
+ _p_external_buffer_info: *const PhysicalDeviceExternalBufferInfo,
+ _p_external_buffer_properties: *mut ExternalBufferProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_buffer_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalBufferProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_buffer_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_external_fence_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_external_fence_properties(
+ _physical_device: PhysicalDevice,
+ _p_external_fence_info: *const PhysicalDeviceExternalFenceInfo,
+ _p_external_fence_properties: *mut ExternalFenceProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_fence_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalFenceProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_fence_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_physical_device_external_semaphore_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_external_semaphore_properties(
+ _physical_device: PhysicalDevice,
+ _p_external_semaphore_info: *const PhysicalDeviceExternalSemaphoreInfo,
+ _p_external_semaphore_properties: *mut ExternalSemaphoreProperties,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_external_semaphore_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceExternalSemaphoreProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_external_semaphore_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[allow(non_camel_case_types)]
+pub type PFN_vkGetDeviceQueue2 = unsafe extern "system" fn(
+ device: Device,
+ p_queue_info: *const DeviceQueueInfo2,
+ p_queue: *mut Queue,
+);
+#[derive(Clone)]
+pub struct DeviceFnV1_1 {
+ pub bind_buffer_memory2: crate::vk::PFN_vkBindBufferMemory2,
+ pub bind_image_memory2: crate::vk::PFN_vkBindImageMemory2,
+ pub get_device_group_peer_memory_features: crate::vk::PFN_vkGetDeviceGroupPeerMemoryFeatures,
+ pub cmd_set_device_mask: crate::vk::PFN_vkCmdSetDeviceMask,
+ pub cmd_dispatch_base: crate::vk::PFN_vkCmdDispatchBase,
+ pub get_image_memory_requirements2: crate::vk::PFN_vkGetImageMemoryRequirements2,
+ pub get_buffer_memory_requirements2: crate::vk::PFN_vkGetBufferMemoryRequirements2,
+ pub get_image_sparse_memory_requirements2: crate::vk::PFN_vkGetImageSparseMemoryRequirements2,
+ pub trim_command_pool: crate::vk::PFN_vkTrimCommandPool,
+ pub get_device_queue2: PFN_vkGetDeviceQueue2,
+ pub create_sampler_ycbcr_conversion: crate::vk::PFN_vkCreateSamplerYcbcrConversion,
+ pub destroy_sampler_ycbcr_conversion: crate::vk::PFN_vkDestroySamplerYcbcrConversion,
+ pub create_descriptor_update_template: crate::vk::PFN_vkCreateDescriptorUpdateTemplate,
+ pub destroy_descriptor_update_template: crate::vk::PFN_vkDestroyDescriptorUpdateTemplate,
+ pub update_descriptor_set_with_template: crate::vk::PFN_vkUpdateDescriptorSetWithTemplate,
+ pub get_descriptor_set_layout_support: crate::vk::PFN_vkGetDescriptorSetLayoutSupport,
+}
+unsafe impl Send for DeviceFnV1_1 {}
+unsafe impl Sync for DeviceFnV1_1 {}
+impl DeviceFnV1_1 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ bind_buffer_memory2: unsafe {
+ unsafe extern "system" fn bind_buffer_memory2(
+ _device: Device,
+ _bind_info_count: u32,
+ _p_bind_infos: *const BindBufferMemoryInfo,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(bind_buffer_memory2)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBindBufferMemory2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ bind_buffer_memory2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ bind_image_memory2: unsafe {
+ unsafe extern "system" fn bind_image_memory2(
+ _device: Device,
+ _bind_info_count: u32,
+ _p_bind_infos: *const BindImageMemoryInfo,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(bind_image_memory2)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkBindImageMemory2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ bind_image_memory2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_group_peer_memory_features: unsafe {
+ unsafe extern "system" fn get_device_group_peer_memory_features(
+ _device: Device,
+ _heap_index: u32,
+ _local_device_index: u32,
+ _remote_device_index: u32,
+ _p_peer_memory_features: *mut PeerMemoryFeatureFlags,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_group_peer_memory_features)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceGroupPeerMemoryFeatures\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_group_peer_memory_features
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_device_mask: unsafe {
+ unsafe extern "system" fn cmd_set_device_mask(
+ _command_buffer: CommandBuffer,
+ _device_mask: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_device_mask)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDeviceMask\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_device_mask
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_dispatch_base: unsafe {
+ unsafe extern "system" fn cmd_dispatch_base(
+ _command_buffer: CommandBuffer,
+ _base_group_x: u32,
+ _base_group_y: u32,
+ _base_group_z: u32,
+ _group_count_x: u32,
+ _group_count_y: u32,
+ _group_count_z: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_dispatch_base)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDispatchBase\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_dispatch_base
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_memory_requirements2: unsafe {
+ unsafe extern "system" fn get_image_memory_requirements2(
+ _device: Device,
+ _p_info: *const ImageMemoryRequirementsInfo2,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_memory_requirements2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageMemoryRequirements2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_memory_requirements2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_memory_requirements2: unsafe {
+ unsafe extern "system" fn get_buffer_memory_requirements2(
+ _device: Device,
+ _p_info: *const BufferMemoryRequirementsInfo2,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_memory_requirements2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferMemoryRequirements2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_memory_requirements2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_image_sparse_memory_requirements2: unsafe {
+ unsafe extern "system" fn get_image_sparse_memory_requirements2(
+ _device: Device,
+ _p_info: *const ImageSparseMemoryRequirementsInfo2,
+ _p_sparse_memory_requirement_count: *mut u32,
+ _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_image_sparse_memory_requirements2)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetImageSparseMemoryRequirements2\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_image_sparse_memory_requirements2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ trim_command_pool: unsafe {
+ unsafe extern "system" fn trim_command_pool(
+ _device: Device,
+ _command_pool: CommandPool,
+ _flags: CommandPoolTrimFlags,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(trim_command_pool)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkTrimCommandPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ trim_command_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_queue2: unsafe {
+ unsafe extern "system" fn get_device_queue2(
+ _device: Device,
+ _p_queue_info: *const DeviceQueueInfo2,
+ _p_queue: *mut Queue,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(get_device_queue2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetDeviceQueue2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_queue2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_sampler_ycbcr_conversion: unsafe {
+ unsafe extern "system" fn create_sampler_ycbcr_conversion(
+ _device: Device,
+ _p_create_info: *const SamplerYcbcrConversionCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_ycbcr_conversion: *mut SamplerYcbcrConversion,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_sampler_ycbcr_conversion)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateSamplerYcbcrConversion\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_sampler_ycbcr_conversion
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_sampler_ycbcr_conversion: unsafe {
+ unsafe extern "system" fn destroy_sampler_ycbcr_conversion(
+ _device: Device,
+ _ycbcr_conversion: SamplerYcbcrConversion,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_sampler_ycbcr_conversion)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroySamplerYcbcrConversion\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_sampler_ycbcr_conversion
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_descriptor_update_template: unsafe {
+ unsafe extern "system" fn create_descriptor_update_template(
+ _device: Device,
+ _p_create_info: *const DescriptorUpdateTemplateCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_descriptor_update_template: *mut DescriptorUpdateTemplate,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_descriptor_update_template)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCreateDescriptorUpdateTemplate\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ create_descriptor_update_template
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_descriptor_update_template: unsafe {
+ unsafe extern "system" fn destroy_descriptor_update_template(
+ _device: Device,
+ _descriptor_update_template: DescriptorUpdateTemplate,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_descriptor_update_template)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkDestroyDescriptorUpdateTemplate\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_descriptor_update_template
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ update_descriptor_set_with_template: unsafe {
+ unsafe extern "system" fn update_descriptor_set_with_template(
+ _device: Device,
+ _descriptor_set: DescriptorSet,
+ _descriptor_update_template: DescriptorUpdateTemplate,
+ _p_data: *const c_void,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(update_descriptor_set_with_template)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkUpdateDescriptorSetWithTemplate\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ update_descriptor_set_with_template
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_descriptor_set_layout_support: unsafe {
+ unsafe extern "system" fn get_descriptor_set_layout_support(
+ _device: Device,
+ _p_create_info: *const DescriptorSetLayoutCreateInfo,
+ _p_support: *mut DescriptorSetLayoutSupport,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_descriptor_set_layout_support)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDescriptorSetLayoutSupport\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_descriptor_set_layout_support
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[derive(Clone)]
+pub struct EntryFnV1_2 {}
+unsafe impl Send for EntryFnV1_2 {}
+unsafe impl Sync for EntryFnV1_2 {}
+impl EntryFnV1_2 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[derive(Clone)]
+pub struct InstanceFnV1_2 {}
+unsafe impl Send for InstanceFnV1_2 {}
+unsafe impl Sync for InstanceFnV1_2 {}
+impl InstanceFnV1_2 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[derive(Clone)]
+pub struct DeviceFnV1_2 {
+ pub cmd_draw_indirect_count: crate::vk::PFN_vkCmdDrawIndirectCount,
+ pub cmd_draw_indexed_indirect_count: crate::vk::PFN_vkCmdDrawIndexedIndirectCount,
+ pub create_render_pass2: crate::vk::PFN_vkCreateRenderPass2,
+ pub cmd_begin_render_pass2: crate::vk::PFN_vkCmdBeginRenderPass2,
+ pub cmd_next_subpass2: crate::vk::PFN_vkCmdNextSubpass2,
+ pub cmd_end_render_pass2: crate::vk::PFN_vkCmdEndRenderPass2,
+ pub reset_query_pool: crate::vk::PFN_vkResetQueryPool,
+ pub get_semaphore_counter_value: crate::vk::PFN_vkGetSemaphoreCounterValue,
+ pub wait_semaphores: crate::vk::PFN_vkWaitSemaphores,
+ pub signal_semaphore: crate::vk::PFN_vkSignalSemaphore,
+ pub get_buffer_device_address: crate::vk::PFN_vkGetBufferDeviceAddress,
+ pub get_buffer_opaque_capture_address: crate::vk::PFN_vkGetBufferOpaqueCaptureAddress,
+ pub get_device_memory_opaque_capture_address:
+ crate::vk::PFN_vkGetDeviceMemoryOpaqueCaptureAddress,
+}
+unsafe impl Send for DeviceFnV1_2 {}
+unsafe impl Sync for DeviceFnV1_2 {}
+impl DeviceFnV1_2 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ cmd_draw_indirect_count: unsafe {
+ unsafe extern "system" fn cmd_draw_indirect_count(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indirect_count)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdDrawIndirectCount\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indirect_count
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_draw_indexed_indirect_count: unsafe {
+ unsafe extern "system" fn cmd_draw_indexed_indirect_count(
+ _command_buffer: CommandBuffer,
+ _buffer: Buffer,
+ _offset: DeviceSize,
+ _count_buffer: Buffer,
+ _count_buffer_offset: DeviceSize,
+ _max_draw_count: u32,
+ _stride: u32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_draw_indexed_indirect_count)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdDrawIndexedIndirectCount\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_draw_indexed_indirect_count
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ create_render_pass2: unsafe {
+ unsafe extern "system" fn create_render_pass2(
+ _device: Device,
+ _p_create_info: *const RenderPassCreateInfo2,
+ _p_allocator: *const AllocationCallbacks,
+ _p_render_pass: *mut RenderPass,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(create_render_pass2)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreateRenderPass2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_render_pass2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_render_pass2: unsafe {
+ unsafe extern "system" fn cmd_begin_render_pass2(
+ _command_buffer: CommandBuffer,
+ _p_render_pass_begin: *const RenderPassBeginInfo,
+ _p_subpass_begin_info: *const SubpassBeginInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_begin_render_pass2)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRenderPass2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_render_pass2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_next_subpass2: unsafe {
+ unsafe extern "system" fn cmd_next_subpass2(
+ _command_buffer: CommandBuffer,
+ _p_subpass_begin_info: *const SubpassBeginInfo,
+ _p_subpass_end_info: *const SubpassEndInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_next_subpass2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdNextSubpass2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_next_subpass2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_render_pass2: unsafe {
+ unsafe extern "system" fn cmd_end_render_pass2(
+ _command_buffer: CommandBuffer,
+ _p_subpass_end_info: *const SubpassEndInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_end_render_pass2)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRenderPass2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_render_pass2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ reset_query_pool: unsafe {
+ unsafe extern "system" fn reset_query_pool(
+ _device: Device,
+ _query_pool: QueryPool,
+ _first_query: u32,
+ _query_count: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(reset_query_pool)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkResetQueryPool\0");
+ let val = _f(cname);
+ if val.is_null() {
+ reset_query_pool
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_semaphore_counter_value: unsafe {
+ unsafe extern "system" fn get_semaphore_counter_value(
+ _device: Device,
+ _semaphore: Semaphore,
+ _p_value: *mut u64,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_semaphore_counter_value)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetSemaphoreCounterValue\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_semaphore_counter_value
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ wait_semaphores: unsafe {
+ unsafe extern "system" fn wait_semaphores(
+ _device: Device,
+ _p_wait_info: *const SemaphoreWaitInfo,
+ _timeout: u64,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(wait_semaphores)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkWaitSemaphores\0");
+ let val = _f(cname);
+ if val.is_null() {
+ wait_semaphores
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ signal_semaphore: unsafe {
+ unsafe extern "system" fn signal_semaphore(
+ _device: Device,
+ _p_signal_info: *const SemaphoreSignalInfo,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(signal_semaphore)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSignalSemaphore\0");
+ let val = _f(cname);
+ if val.is_null() {
+ signal_semaphore
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_device_address: unsafe {
+ unsafe extern "system" fn get_buffer_device_address(
+ _device: Device,
+ _p_info: *const BufferDeviceAddressInfo,
+ ) -> DeviceAddress {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_device_address)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetBufferDeviceAddress\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_device_address
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_buffer_opaque_capture_address: unsafe {
+ unsafe extern "system" fn get_buffer_opaque_capture_address(
+ _device: Device,
+ _p_info: *const BufferDeviceAddressInfo,
+ ) -> u64 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_buffer_opaque_capture_address)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetBufferOpaqueCaptureAddress\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_buffer_opaque_capture_address
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_memory_opaque_capture_address: unsafe {
+ unsafe extern "system" fn get_device_memory_opaque_capture_address(
+ _device: Device,
+ _p_info: *const DeviceMemoryOpaqueCaptureAddressInfo,
+ ) -> u64 {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_memory_opaque_capture_address)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceMemoryOpaqueCaptureAddress\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_memory_opaque_capture_address
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[derive(Clone)]
+pub struct EntryFnV1_3 {}
+unsafe impl Send for EntryFnV1_3 {}
+unsafe impl Sync for EntryFnV1_3 {}
+impl EntryFnV1_3 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {}
+ }
+}
+#[derive(Clone)]
+pub struct InstanceFnV1_3 {
+ pub get_physical_device_tool_properties: crate::vk::PFN_vkGetPhysicalDeviceToolProperties,
+}
+unsafe impl Send for InstanceFnV1_3 {}
+unsafe impl Sync for InstanceFnV1_3 {}
+impl InstanceFnV1_3 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ get_physical_device_tool_properties: unsafe {
+ unsafe extern "system" fn get_physical_device_tool_properties(
+ _physical_device: PhysicalDevice,
+ _p_tool_count: *mut u32,
+ _p_tool_properties: *mut PhysicalDeviceToolProperties,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_physical_device_tool_properties)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetPhysicalDeviceToolProperties\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_physical_device_tool_properties
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
+#[derive(Clone)]
+pub struct DeviceFnV1_3 {
+ pub create_private_data_slot: crate::vk::PFN_vkCreatePrivateDataSlot,
+ pub destroy_private_data_slot: crate::vk::PFN_vkDestroyPrivateDataSlot,
+ pub set_private_data: crate::vk::PFN_vkSetPrivateData,
+ pub get_private_data: crate::vk::PFN_vkGetPrivateData,
+ pub cmd_set_event2: crate::vk::PFN_vkCmdSetEvent2,
+ pub cmd_reset_event2: crate::vk::PFN_vkCmdResetEvent2,
+ pub cmd_wait_events2: crate::vk::PFN_vkCmdWaitEvents2,
+ pub cmd_pipeline_barrier2: crate::vk::PFN_vkCmdPipelineBarrier2,
+ pub cmd_write_timestamp2: crate::vk::PFN_vkCmdWriteTimestamp2,
+ pub queue_submit2: crate::vk::PFN_vkQueueSubmit2,
+ pub cmd_copy_buffer2: crate::vk::PFN_vkCmdCopyBuffer2,
+ pub cmd_copy_image2: crate::vk::PFN_vkCmdCopyImage2,
+ pub cmd_copy_buffer_to_image2: crate::vk::PFN_vkCmdCopyBufferToImage2,
+ pub cmd_copy_image_to_buffer2: crate::vk::PFN_vkCmdCopyImageToBuffer2,
+ pub cmd_blit_image2: crate::vk::PFN_vkCmdBlitImage2,
+ pub cmd_resolve_image2: crate::vk::PFN_vkCmdResolveImage2,
+ pub cmd_begin_rendering: crate::vk::PFN_vkCmdBeginRendering,
+ pub cmd_end_rendering: crate::vk::PFN_vkCmdEndRendering,
+ pub cmd_set_cull_mode: crate::vk::PFN_vkCmdSetCullMode,
+ pub cmd_set_front_face: crate::vk::PFN_vkCmdSetFrontFace,
+ pub cmd_set_primitive_topology: crate::vk::PFN_vkCmdSetPrimitiveTopology,
+ pub cmd_set_viewport_with_count: crate::vk::PFN_vkCmdSetViewportWithCount,
+ pub cmd_set_scissor_with_count: crate::vk::PFN_vkCmdSetScissorWithCount,
+ pub cmd_bind_vertex_buffers2: crate::vk::PFN_vkCmdBindVertexBuffers2,
+ pub cmd_set_depth_test_enable: crate::vk::PFN_vkCmdSetDepthTestEnable,
+ pub cmd_set_depth_write_enable: crate::vk::PFN_vkCmdSetDepthWriteEnable,
+ pub cmd_set_depth_compare_op: crate::vk::PFN_vkCmdSetDepthCompareOp,
+ pub cmd_set_depth_bounds_test_enable: crate::vk::PFN_vkCmdSetDepthBoundsTestEnable,
+ pub cmd_set_stencil_test_enable: crate::vk::PFN_vkCmdSetStencilTestEnable,
+ pub cmd_set_stencil_op: crate::vk::PFN_vkCmdSetStencilOp,
+ pub cmd_set_rasterizer_discard_enable: crate::vk::PFN_vkCmdSetRasterizerDiscardEnable,
+ pub cmd_set_depth_bias_enable: crate::vk::PFN_vkCmdSetDepthBiasEnable,
+ pub cmd_set_primitive_restart_enable: crate::vk::PFN_vkCmdSetPrimitiveRestartEnable,
+ pub get_device_buffer_memory_requirements: crate::vk::PFN_vkGetDeviceBufferMemoryRequirements,
+ pub get_device_image_memory_requirements: crate::vk::PFN_vkGetDeviceImageMemoryRequirements,
+ pub get_device_image_sparse_memory_requirements:
+ crate::vk::PFN_vkGetDeviceImageSparseMemoryRequirements,
+}
+unsafe impl Send for DeviceFnV1_3 {}
+unsafe impl Sync for DeviceFnV1_3 {}
+impl DeviceFnV1_3 {
+ pub fn load<F>(mut _f: F) -> Self
+ where
+ F: FnMut(&::std::ffi::CStr) -> *const c_void,
+ {
+ Self {
+ create_private_data_slot: unsafe {
+ unsafe extern "system" fn create_private_data_slot(
+ _device: Device,
+ _p_create_info: *const PrivateDataSlotCreateInfo,
+ _p_allocator: *const AllocationCallbacks,
+ _p_private_data_slot: *mut PrivateDataSlot,
+ ) -> Result {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(create_private_data_slot)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCreatePrivateDataSlot\0");
+ let val = _f(cname);
+ if val.is_null() {
+ create_private_data_slot
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ destroy_private_data_slot: unsafe {
+ unsafe extern "system" fn destroy_private_data_slot(
+ _device: Device,
+ _private_data_slot: PrivateDataSlot,
+ _p_allocator: *const AllocationCallbacks,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(destroy_private_data_slot)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkDestroyPrivateDataSlot\0");
+ let val = _f(cname);
+ if val.is_null() {
+ destroy_private_data_slot
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ set_private_data: unsafe {
+ unsafe extern "system" fn set_private_data(
+ _device: Device,
+ _object_type: ObjectType,
+ _object_handle: u64,
+ _private_data_slot: PrivateDataSlot,
+ _data: u64,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(set_private_data)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkSetPrivateData\0");
+ let val = _f(cname);
+ if val.is_null() {
+ set_private_data
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_private_data: unsafe {
+ unsafe extern "system" fn get_private_data(
+ _device: Device,
+ _object_type: ObjectType,
+ _object_handle: u64,
+ _private_data_slot: PrivateDataSlot,
+ _p_data: *mut u64,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(get_private_data)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkGetPrivateData\0");
+ let val = _f(cname);
+ if val.is_null() {
+ get_private_data
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_event2: unsafe {
+ unsafe extern "system" fn cmd_set_event2(
+ _command_buffer: CommandBuffer,
+ _event: Event,
+ _p_dependency_info: *const DependencyInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_event2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetEvent2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_event2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_reset_event2: unsafe {
+ unsafe extern "system" fn cmd_reset_event2(
+ _command_buffer: CommandBuffer,
+ _event: Event,
+ _stage_mask: PipelineStageFlags2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_reset_event2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResetEvent2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_reset_event2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_wait_events2: unsafe {
+ unsafe extern "system" fn cmd_wait_events2(
+ _command_buffer: CommandBuffer,
+ _event_count: u32,
+ _p_events: *const Event,
+ _p_dependency_infos: *const DependencyInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_wait_events2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWaitEvents2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_wait_events2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_pipeline_barrier2: unsafe {
+ unsafe extern "system" fn cmd_pipeline_barrier2(
+ _command_buffer: CommandBuffer,
+ _p_dependency_info: *const DependencyInfo,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_pipeline_barrier2)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdPipelineBarrier2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_pipeline_barrier2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_write_timestamp2: unsafe {
+ unsafe extern "system" fn cmd_write_timestamp2(
+ _command_buffer: CommandBuffer,
+ _stage: PipelineStageFlags2,
+ _query_pool: QueryPool,
+ _query: u32,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_write_timestamp2)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdWriteTimestamp2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_write_timestamp2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ queue_submit2: unsafe {
+ unsafe extern "system" fn queue_submit2(
+ _queue: Queue,
+ _submit_count: u32,
+ _p_submits: *const SubmitInfo2,
+ _fence: Fence,
+ ) -> Result {
+ panic!(concat!("Unable to load ", stringify!(queue_submit2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkQueueSubmit2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ queue_submit2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_buffer2: unsafe {
+ unsafe extern "system" fn cmd_copy_buffer2(
+ _command_buffer: CommandBuffer,
+ _p_copy_buffer_info: *const CopyBufferInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_copy_buffer2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBuffer2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_buffer2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_image2: unsafe {
+ unsafe extern "system" fn cmd_copy_image2(
+ _command_buffer: CommandBuffer,
+ _p_copy_image_info: *const CopyImageInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_copy_image2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImage2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_image2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_buffer_to_image2: unsafe {
+ unsafe extern "system" fn cmd_copy_buffer_to_image2(
+ _command_buffer: CommandBuffer,
+ _p_copy_buffer_to_image_info: *const CopyBufferToImageInfo2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_buffer_to_image2)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyBufferToImage2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_buffer_to_image2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_copy_image_to_buffer2: unsafe {
+ unsafe extern "system" fn cmd_copy_image_to_buffer2(
+ _command_buffer: CommandBuffer,
+ _p_copy_image_to_buffer_info: *const CopyImageToBufferInfo2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_copy_image_to_buffer2)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdCopyImageToBuffer2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_copy_image_to_buffer2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_blit_image2: unsafe {
+ unsafe extern "system" fn cmd_blit_image2(
+ _command_buffer: CommandBuffer,
+ _p_blit_image_info: *const BlitImageInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_blit_image2)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBlitImage2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_blit_image2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_resolve_image2: unsafe {
+ unsafe extern "system" fn cmd_resolve_image2(
+ _command_buffer: CommandBuffer,
+ _p_resolve_image_info: *const ResolveImageInfo2,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_resolve_image2)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdResolveImage2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_resolve_image2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_begin_rendering: unsafe {
+ unsafe extern "system" fn cmd_begin_rendering(
+ _command_buffer: CommandBuffer,
+ _p_rendering_info: *const RenderingInfo,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_begin_rendering)))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBeginRendering\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_begin_rendering
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_end_rendering: unsafe {
+ unsafe extern "system" fn cmd_end_rendering(_command_buffer: CommandBuffer) {
+ panic!(concat!("Unable to load ", stringify!(cmd_end_rendering)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdEndRendering\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_end_rendering
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_cull_mode: unsafe {
+ unsafe extern "system" fn cmd_set_cull_mode(
+ _command_buffer: CommandBuffer,
+ _cull_mode: CullModeFlags,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_cull_mode)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetCullMode\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_cull_mode
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_front_face: unsafe {
+ unsafe extern "system" fn cmd_set_front_face(
+ _command_buffer: CommandBuffer,
+ _front_face: FrontFace,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_front_face)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetFrontFace\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_front_face
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_primitive_topology: unsafe {
+ unsafe extern "system" fn cmd_set_primitive_topology(
+ _command_buffer: CommandBuffer,
+ _primitive_topology: PrimitiveTopology,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_primitive_topology)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetPrimitiveTopology\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_primitive_topology
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_viewport_with_count: unsafe {
+ unsafe extern "system" fn cmd_set_viewport_with_count(
+ _command_buffer: CommandBuffer,
+ _viewport_count: u32,
+ _p_viewports: *const Viewport,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_viewport_with_count)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetViewportWithCount\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_viewport_with_count
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_scissor_with_count: unsafe {
+ unsafe extern "system" fn cmd_set_scissor_with_count(
+ _command_buffer: CommandBuffer,
+ _scissor_count: u32,
+ _p_scissors: *const Rect2D,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_scissor_with_count)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetScissorWithCount\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_scissor_with_count
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_bind_vertex_buffers2: unsafe {
+ unsafe extern "system" fn cmd_bind_vertex_buffers2(
+ _command_buffer: CommandBuffer,
+ _first_binding: u32,
+ _binding_count: u32,
+ _p_buffers: *const Buffer,
+ _p_offsets: *const DeviceSize,
+ _p_sizes: *const DeviceSize,
+ _p_strides: *const DeviceSize,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_bind_vertex_buffers2)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdBindVertexBuffers2\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_bind_vertex_buffers2
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_test_enable: unsafe {
+ unsafe extern "system" fn cmd_set_depth_test_enable(
+ _command_buffer: CommandBuffer,
+ _depth_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_test_enable)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthTestEnable\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_test_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_write_enable: unsafe {
+ unsafe extern "system" fn cmd_set_depth_write_enable(
+ _command_buffer: CommandBuffer,
+ _depth_write_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_write_enable)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthWriteEnable\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_write_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_compare_op: unsafe {
+ unsafe extern "system" fn cmd_set_depth_compare_op(
+ _command_buffer: CommandBuffer,
+ _depth_compare_op: CompareOp,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_compare_op)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthCompareOp\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_compare_op
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_bounds_test_enable: unsafe {
+ unsafe extern "system" fn cmd_set_depth_bounds_test_enable(
+ _command_buffer: CommandBuffer,
+ _depth_bounds_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_bounds_test_enable)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetDepthBoundsTestEnable\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_bounds_test_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_test_enable: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_test_enable(
+ _command_buffer: CommandBuffer,
+ _stencil_test_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_stencil_test_enable)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilTestEnable\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_test_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_stencil_op: unsafe {
+ unsafe extern "system" fn cmd_set_stencil_op(
+ _command_buffer: CommandBuffer,
+ _face_mask: StencilFaceFlags,
+ _fail_op: StencilOp,
+ _pass_op: StencilOp,
+ _depth_fail_op: StencilOp,
+ _compare_op: CompareOp,
+ ) {
+ panic!(concat!("Unable to load ", stringify!(cmd_set_stencil_op)))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetStencilOp\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_stencil_op
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_rasterizer_discard_enable: unsafe {
+ unsafe extern "system" fn cmd_set_rasterizer_discard_enable(
+ _command_buffer: CommandBuffer,
+ _rasterizer_discard_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_rasterizer_discard_enable)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetRasterizerDiscardEnable\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_rasterizer_discard_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_depth_bias_enable: unsafe {
+ unsafe extern "system" fn cmd_set_depth_bias_enable(
+ _command_buffer: CommandBuffer,
+ _depth_bias_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_depth_bias_enable)
+ ))
+ }
+ let cname =
+ ::std::ffi::CStr::from_bytes_with_nul_unchecked(b"vkCmdSetDepthBiasEnable\0");
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_depth_bias_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ cmd_set_primitive_restart_enable: unsafe {
+ unsafe extern "system" fn cmd_set_primitive_restart_enable(
+ _command_buffer: CommandBuffer,
+ _primitive_restart_enable: Bool32,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(cmd_set_primitive_restart_enable)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkCmdSetPrimitiveRestartEnable\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ cmd_set_primitive_restart_enable
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_buffer_memory_requirements: unsafe {
+ unsafe extern "system" fn get_device_buffer_memory_requirements(
+ _device: Device,
+ _p_info: *const DeviceBufferMemoryRequirements,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_buffer_memory_requirements)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceBufferMemoryRequirements\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_buffer_memory_requirements
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_image_memory_requirements: unsafe {
+ unsafe extern "system" fn get_device_image_memory_requirements(
+ _device: Device,
+ _p_info: *const DeviceImageMemoryRequirements,
+ _p_memory_requirements: *mut MemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_image_memory_requirements)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceImageMemoryRequirements\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_image_memory_requirements
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ get_device_image_sparse_memory_requirements: unsafe {
+ unsafe extern "system" fn get_device_image_sparse_memory_requirements(
+ _device: Device,
+ _p_info: *const DeviceImageMemoryRequirements,
+ _p_sparse_memory_requirement_count: *mut u32,
+ _p_sparse_memory_requirements: *mut SparseImageMemoryRequirements2,
+ ) {
+ panic!(concat!(
+ "Unable to load ",
+ stringify!(get_device_image_sparse_memory_requirements)
+ ))
+ }
+ let cname = ::std::ffi::CStr::from_bytes_with_nul_unchecked(
+ b"vkGetDeviceImageSparseMemoryRequirements\0",
+ );
+ let val = _f(cname);
+ if val.is_null() {
+ get_device_image_sparse_memory_requirements
+ } else {
+ ::std::mem::transmute(val)
+ }
+ },
+ }
+ }
+}
diff --git a/third_party/rust/ash/src/vk/macros.rs b/third_party/rust/ash/src/vk/macros.rs
new file mode 100644
index 0000000000..e986be3125
--- /dev/null
+++ b/third_party/rust/ash/src/vk/macros.rs
@@ -0,0 +1,162 @@
+#[macro_export]
+macro_rules! vk_bitflags_wrapped {
+ ($ name : ident , $ flag_type : ty) => {
+ impl Default for $name {
+ fn default() -> Self {
+ Self(0)
+ }
+ }
+ impl $name {
+ #[inline]
+ pub const fn empty() -> Self {
+ Self(0)
+ }
+ #[inline]
+ pub const fn from_raw(x: $flag_type) -> Self {
+ Self(x)
+ }
+ #[inline]
+ pub const fn as_raw(self) -> $flag_type {
+ self.0
+ }
+ #[inline]
+ pub const fn is_empty(self) -> bool {
+ self.0 == Self::empty().0
+ }
+ #[inline]
+ pub const fn intersects(self, other: Self) -> bool {
+ !Self(self.0 & other.0).is_empty()
+ }
+ #[doc = r" Returns whether `other` is a subset of `self`"]
+ #[inline]
+ pub const fn contains(self, other: Self) -> bool {
+ self.0 & other.0 == other.0
+ }
+ }
+ impl ::std::ops::BitOr for $name {
+ type Output = Self;
+ #[inline]
+ fn bitor(self, rhs: Self) -> Self {
+ Self(self.0 | rhs.0)
+ }
+ }
+ impl ::std::ops::BitOrAssign for $name {
+ #[inline]
+ fn bitor_assign(&mut self, rhs: Self) {
+ *self = *self | rhs
+ }
+ }
+ impl ::std::ops::BitAnd for $name {
+ type Output = Self;
+ #[inline]
+ fn bitand(self, rhs: Self) -> Self {
+ Self(self.0 & rhs.0)
+ }
+ }
+ impl ::std::ops::BitAndAssign for $name {
+ #[inline]
+ fn bitand_assign(&mut self, rhs: Self) {
+ *self = *self & rhs
+ }
+ }
+ impl ::std::ops::BitXor for $name {
+ type Output = Self;
+ #[inline]
+ fn bitxor(self, rhs: Self) -> Self {
+ Self(self.0 ^ rhs.0)
+ }
+ }
+ impl ::std::ops::BitXorAssign for $name {
+ #[inline]
+ fn bitxor_assign(&mut self, rhs: Self) {
+ *self = *self ^ rhs
+ }
+ }
+ impl ::std::ops::Not for $name {
+ type Output = Self;
+ #[inline]
+ fn not(self) -> Self {
+ Self(!self.0)
+ }
+ }
+ };
+}
+#[macro_export]
+macro_rules! handle_nondispatchable {
+ ($ name : ident , $ ty : ident) => {
+ handle_nondispatchable!($name, $ty, doc = "");
+ };
+ ($ name : ident , $ ty : ident , $ doc_link : meta) => {
+ #[repr(transparent)]
+ #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Default)]
+ #[$doc_link]
+ pub struct $name(u64);
+ impl Handle for $name {
+ const TYPE: ObjectType = ObjectType::$ty;
+ fn as_raw(self) -> u64 {
+ self.0
+ }
+ fn from_raw(x: u64) -> Self {
+ Self(x)
+ }
+ }
+ impl $name {
+ pub const fn null() -> Self {
+ Self(0)
+ }
+ }
+ impl fmt::Pointer for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "0x{:x}", self.0)
+ }
+ }
+ impl fmt::Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "0x{:x}", self.0)
+ }
+ }
+ };
+}
+#[macro_export]
+macro_rules! define_handle {
+ ($ name : ident , $ ty : ident) => {
+ define_handle!($name, $ty, doc = "");
+ };
+ ($ name : ident , $ ty : ident , $ doc_link : meta) => {
+ #[repr(transparent)]
+ #[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash)]
+ #[$doc_link]
+ pub struct $name(*mut u8);
+ impl Default for $name {
+ fn default() -> Self {
+ Self::null()
+ }
+ }
+ impl Handle for $name {
+ const TYPE: ObjectType = ObjectType::$ty;
+ fn as_raw(self) -> u64 {
+ self.0 as u64
+ }
+ fn from_raw(x: u64) -> Self {
+ Self(x as _)
+ }
+ }
+ unsafe impl Send for $name {}
+ unsafe impl Sync for $name {}
+ impl $name {
+ pub const fn null() -> Self {
+ Self(::std::ptr::null_mut())
+ }
+ }
+ impl fmt::Pointer for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Pointer::fmt(&self.0, f)
+ }
+ }
+ impl fmt::Debug for $name {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Debug::fmt(&self.0, f)
+ }
+ }
+ };
+}
diff --git a/third_party/rust/ash/src/vk/native.rs b/third_party/rust/ash/src/vk/native.rs
new file mode 100644
index 0000000000..02edfba5c8
--- /dev/null
+++ b/third_party/rust/ash/src/vk/native.rs
@@ -0,0 +1,8998 @@
+/* automatically generated by rust-bindgen 0.61.0 */
+
+#[repr(C)]
+#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
+pub struct __BindgenBitfieldUnit<Storage> {
+ storage: Storage,
+}
+impl<Storage> __BindgenBitfieldUnit<Storage> {
+ #[inline]
+ pub const fn new(storage: Storage) -> Self {
+ Self { storage }
+ }
+}
+impl<Storage> __BindgenBitfieldUnit<Storage>
+where
+ Storage: AsRef<[u8]> + AsMut<[u8]>,
+{
+ #[inline]
+ pub fn get_bit(&self, index: usize) -> bool {
+ debug_assert!(index / 8 < self.storage.as_ref().len());
+ let byte_index = index / 8;
+ let byte = self.storage.as_ref()[byte_index];
+ let bit_index = if cfg!(target_endian = "big") {
+ 7 - (index % 8)
+ } else {
+ index % 8
+ };
+ let mask = 1 << bit_index;
+ byte & mask == mask
+ }
+ #[inline]
+ pub fn set_bit(&mut self, index: usize, val: bool) {
+ debug_assert!(index / 8 < self.storage.as_ref().len());
+ let byte_index = index / 8;
+ let byte = &mut self.storage.as_mut()[byte_index];
+ let bit_index = if cfg!(target_endian = "big") {
+ 7 - (index % 8)
+ } else {
+ index % 8
+ };
+ let mask = 1 << bit_index;
+ if val {
+ *byte |= mask;
+ } else {
+ *byte &= !mask;
+ }
+ }
+ #[inline]
+ pub fn get(&self, bit_offset: usize, bit_width: u8) -> u64 {
+ debug_assert!(bit_width <= 64);
+ debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
+ debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
+ let mut val = 0;
+ for i in 0..(bit_width as usize) {
+ if self.get_bit(i + bit_offset) {
+ let index = if cfg!(target_endian = "big") {
+ bit_width as usize - 1 - i
+ } else {
+ i
+ };
+ val |= 1 << index;
+ }
+ }
+ val
+ }
+ #[inline]
+ pub fn set(&mut self, bit_offset: usize, bit_width: u8, val: u64) {
+ debug_assert!(bit_width <= 64);
+ debug_assert!(bit_offset / 8 < self.storage.as_ref().len());
+ debug_assert!((bit_offset + (bit_width as usize)) / 8 <= self.storage.as_ref().len());
+ for i in 0..(bit_width as usize) {
+ let mask = 1 << i;
+ let val_bit_is_set = val & mask == mask;
+ let index = if cfg!(target_endian = "big") {
+ bit_width as usize - 1 - i
+ } else {
+ i
+ };
+ self.set_bit(index + bit_offset, val_bit_is_set);
+ }
+ }
+}
+pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME:
+ StdVideoH264ChromaFormatIdc = 0;
+pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_420:
+ StdVideoH264ChromaFormatIdc = 1;
+pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_422:
+ StdVideoH264ChromaFormatIdc = 2;
+pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_444:
+ StdVideoH264ChromaFormatIdc = 3;
+pub const StdVideoH264ChromaFormatIdc_STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID:
+ StdVideoH264ChromaFormatIdc = 2147483647;
+pub type StdVideoH264ChromaFormatIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_BASELINE: StdVideoH264ProfileIdc = 66;
+pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_MAIN: StdVideoH264ProfileIdc = 77;
+pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_HIGH: StdVideoH264ProfileIdc = 100;
+pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE:
+ StdVideoH264ProfileIdc = 244;
+pub const StdVideoH264ProfileIdc_STD_VIDEO_H264_PROFILE_IDC_INVALID: StdVideoH264ProfileIdc =
+ 2147483647;
+pub type StdVideoH264ProfileIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_0: StdVideoH264LevelIdc = 0;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_1: StdVideoH264LevelIdc = 1;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_2: StdVideoH264LevelIdc = 2;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_1_3: StdVideoH264LevelIdc = 3;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_2_0: StdVideoH264LevelIdc = 4;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_2_1: StdVideoH264LevelIdc = 5;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_2_2: StdVideoH264LevelIdc = 6;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_3_0: StdVideoH264LevelIdc = 7;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_3_1: StdVideoH264LevelIdc = 8;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_3_2: StdVideoH264LevelIdc = 9;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_4_0: StdVideoH264LevelIdc = 10;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_4_1: StdVideoH264LevelIdc = 11;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_4_2: StdVideoH264LevelIdc = 12;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_0: StdVideoH264LevelIdc = 13;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_1: StdVideoH264LevelIdc = 14;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_5_2: StdVideoH264LevelIdc = 15;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_6_0: StdVideoH264LevelIdc = 16;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_6_1: StdVideoH264LevelIdc = 17;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_6_2: StdVideoH264LevelIdc = 18;
+pub const StdVideoH264LevelIdc_STD_VIDEO_H264_LEVEL_IDC_INVALID: StdVideoH264LevelIdc = 2147483647;
+pub type StdVideoH264LevelIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_0: StdVideoH264PocType = 0;
+pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_1: StdVideoH264PocType = 1;
+pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_2: StdVideoH264PocType = 2;
+pub const StdVideoH264PocType_STD_VIDEO_H264_POC_TYPE_INVALID: StdVideoH264PocType = 2147483647;
+pub type StdVideoH264PocType = ::std::os::raw::c_uint;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED:
+ StdVideoH264AspectRatioIdc = 0;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE:
+ StdVideoH264AspectRatioIdc = 1;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11:
+ StdVideoH264AspectRatioIdc = 2;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11:
+ StdVideoH264AspectRatioIdc = 3;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11:
+ StdVideoH264AspectRatioIdc = 4;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33:
+ StdVideoH264AspectRatioIdc = 5;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11:
+ StdVideoH264AspectRatioIdc = 6;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11:
+ StdVideoH264AspectRatioIdc = 7;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11:
+ StdVideoH264AspectRatioIdc = 8;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33:
+ StdVideoH264AspectRatioIdc = 9;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11:
+ StdVideoH264AspectRatioIdc = 10;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11:
+ StdVideoH264AspectRatioIdc = 11;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33:
+ StdVideoH264AspectRatioIdc = 12;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99:
+ StdVideoH264AspectRatioIdc = 13;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3:
+ StdVideoH264AspectRatioIdc = 14;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2:
+ StdVideoH264AspectRatioIdc = 15;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1:
+ StdVideoH264AspectRatioIdc = 16;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR:
+ StdVideoH264AspectRatioIdc = 255;
+pub const StdVideoH264AspectRatioIdc_STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID:
+ StdVideoH264AspectRatioIdc = 2147483647;
+pub type StdVideoH264AspectRatioIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT:
+ StdVideoH264WeightedBipredIdc = 0;
+pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT:
+ StdVideoH264WeightedBipredIdc = 1;
+pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT:
+ StdVideoH264WeightedBipredIdc = 2;
+pub const StdVideoH264WeightedBipredIdc_STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID:
+ StdVideoH264WeightedBipredIdc = 2147483647;
+pub type StdVideoH264WeightedBipredIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT : StdVideoH264ModificationOfPicNumsIdc = 0 ;
+pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD : StdVideoH264ModificationOfPicNumsIdc = 1 ;
+pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM : StdVideoH264ModificationOfPicNumsIdc = 2 ;
+pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END:
+ StdVideoH264ModificationOfPicNumsIdc = 3;
+pub const StdVideoH264ModificationOfPicNumsIdc_STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID : StdVideoH264ModificationOfPicNumsIdc = 2147483647 ;
+pub type StdVideoH264ModificationOfPicNumsIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END:
+ StdVideoH264MemMgmtControlOp = 0;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM:
+ StdVideoH264MemMgmtControlOp = 1;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM:
+ StdVideoH264MemMgmtControlOp = 2;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM:
+ StdVideoH264MemMgmtControlOp = 3;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX : StdVideoH264MemMgmtControlOp = 4 ;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL:
+ StdVideoH264MemMgmtControlOp = 5;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM : StdVideoH264MemMgmtControlOp = 6 ;
+pub const StdVideoH264MemMgmtControlOp_STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID:
+ StdVideoH264MemMgmtControlOp = 2147483647;
+pub type StdVideoH264MemMgmtControlOp = ::std::os::raw::c_uint;
+pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_0: StdVideoH264CabacInitIdc = 0;
+pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_1: StdVideoH264CabacInitIdc = 1;
+pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_2: StdVideoH264CabacInitIdc = 2;
+pub const StdVideoH264CabacInitIdc_STD_VIDEO_H264_CABAC_INIT_IDC_INVALID: StdVideoH264CabacInitIdc =
+ 2147483647;
+pub type StdVideoH264CabacInitIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED : StdVideoH264DisableDeblockingFilterIdc = 0 ;
+pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED : StdVideoH264DisableDeblockingFilterIdc = 1 ;
+pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL : StdVideoH264DisableDeblockingFilterIdc = 2 ;
+pub const StdVideoH264DisableDeblockingFilterIdc_STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID : StdVideoH264DisableDeblockingFilterIdc = 2147483647 ;
+pub type StdVideoH264DisableDeblockingFilterIdc = ::std::os::raw::c_uint;
+pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_P: StdVideoH264SliceType = 0;
+pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_B: StdVideoH264SliceType = 1;
+pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_I: StdVideoH264SliceType = 2;
+pub const StdVideoH264SliceType_STD_VIDEO_H264_SLICE_TYPE_INVALID: StdVideoH264SliceType =
+ 2147483647;
+pub type StdVideoH264SliceType = ::std::os::raw::c_uint;
+pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_P: StdVideoH264PictureType = 0;
+pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_B: StdVideoH264PictureType = 1;
+pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_I: StdVideoH264PictureType = 2;
+pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_IDR: StdVideoH264PictureType = 5;
+pub const StdVideoH264PictureType_STD_VIDEO_H264_PICTURE_TYPE_INVALID: StdVideoH264PictureType =
+ 2147483647;
+pub type StdVideoH264PictureType = ::std::os::raw::c_uint;
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264SpsVuiFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 2usize]>,
+ pub __bindgen_padding_0: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264SpsVuiFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264SpsVuiFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH264SpsVuiFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264SpsVuiFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH264SpsVuiFlags))
+ );
+}
+impl StdVideoH264SpsVuiFlags {
+ #[inline]
+ pub fn aspect_ratio_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_aspect_ratio_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn overscan_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_overscan_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn overscan_appropriate_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_overscan_appropriate_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn video_signal_type_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_video_signal_type_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn video_full_range_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_video_full_range_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn color_description_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_color_description_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn chroma_loc_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_chroma_loc_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn timing_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_timing_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn fixed_frame_rate_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_fixed_frame_rate_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(8usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn bitstream_restriction_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_bitstream_restriction_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(9usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn nal_hrd_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_nal_hrd_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(10usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vcl_hrd_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vcl_hrd_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(11usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ aspect_ratio_info_present_flag: u32,
+ overscan_info_present_flag: u32,
+ overscan_appropriate_flag: u32,
+ video_signal_type_present_flag: u32,
+ video_full_range_flag: u32,
+ color_description_present_flag: u32,
+ chroma_loc_info_present_flag: u32,
+ timing_info_present_flag: u32,
+ fixed_frame_rate_flag: u32,
+ bitstream_restriction_flag: u32,
+ nal_hrd_parameters_present_flag: u32,
+ vcl_hrd_parameters_present_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 2usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 2usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let aspect_ratio_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(aspect_ratio_info_present_flag) };
+ aspect_ratio_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let overscan_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(overscan_info_present_flag) };
+ overscan_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let overscan_appropriate_flag: u32 =
+ unsafe { ::std::mem::transmute(overscan_appropriate_flag) };
+ overscan_appropriate_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let video_signal_type_present_flag: u32 =
+ unsafe { ::std::mem::transmute(video_signal_type_present_flag) };
+ video_signal_type_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let video_full_range_flag: u32 =
+ unsafe { ::std::mem::transmute(video_full_range_flag) };
+ video_full_range_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let color_description_present_flag: u32 =
+ unsafe { ::std::mem::transmute(color_description_present_flag) };
+ color_description_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let chroma_loc_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(chroma_loc_info_present_flag) };
+ chroma_loc_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let timing_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(timing_info_present_flag) };
+ timing_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(8usize, 1u8, {
+ let fixed_frame_rate_flag: u32 =
+ unsafe { ::std::mem::transmute(fixed_frame_rate_flag) };
+ fixed_frame_rate_flag as u64
+ });
+ __bindgen_bitfield_unit.set(9usize, 1u8, {
+ let bitstream_restriction_flag: u32 =
+ unsafe { ::std::mem::transmute(bitstream_restriction_flag) };
+ bitstream_restriction_flag as u64
+ });
+ __bindgen_bitfield_unit.set(10usize, 1u8, {
+ let nal_hrd_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(nal_hrd_parameters_present_flag) };
+ nal_hrd_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(11usize, 1u8, {
+ let vcl_hrd_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vcl_hrd_parameters_present_flag) };
+ vcl_hrd_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264HrdParameters {
+ pub cpb_cnt_minus1: u8,
+ pub bit_rate_scale: u8,
+ pub cpb_size_scale: u8,
+ pub reserved1: u8,
+ pub bit_rate_value_minus1: [u32; 32usize],
+ pub cpb_size_value_minus1: [u32; 32usize],
+ pub cbr_flag: [u8; 32usize],
+ pub initial_cpb_removal_delay_length_minus1: u32,
+ pub cpb_removal_delay_length_minus1: u32,
+ pub dpb_output_delay_length_minus1: u32,
+ pub time_offset_length: u32,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264HrdParameters() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH264HrdParameters> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264HrdParameters>(),
+ 308usize,
+ concat!("Size of: ", stringify!(StdVideoH264HrdParameters))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264HrdParameters>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH264HrdParameters))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_cnt_minus1) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(cpb_cnt_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_rate_scale) as usize - ptr as usize },
+ 1usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(bit_rate_scale)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_size_scale) as usize - ptr as usize },
+ 2usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(cpb_size_scale)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 3usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_rate_value_minus1) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(bit_rate_value_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_size_value_minus1) as usize - ptr as usize },
+ 132usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(cpb_size_value_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cbr_flag) as usize - ptr as usize },
+ 260usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(cbr_flag)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).initial_cpb_removal_delay_length_minus1) as usize
+ - ptr as usize
+ },
+ 292usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(initial_cpb_removal_delay_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).cpb_removal_delay_length_minus1) as usize - ptr as usize
+ },
+ 296usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(cpb_removal_delay_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).dpb_output_delay_length_minus1) as usize - ptr as usize
+ },
+ 300usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(dpb_output_delay_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).time_offset_length) as usize - ptr as usize },
+ 304usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264HrdParameters),
+ "::",
+ stringify!(time_offset_length)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264SequenceParameterSetVui {
+ pub flags: StdVideoH264SpsVuiFlags,
+ pub aspect_ratio_idc: StdVideoH264AspectRatioIdc,
+ pub sar_width: u16,
+ pub sar_height: u16,
+ pub video_format: u8,
+ pub colour_primaries: u8,
+ pub transfer_characteristics: u8,
+ pub matrix_coefficients: u8,
+ pub num_units_in_tick: u32,
+ pub time_scale: u32,
+ pub max_num_reorder_frames: u8,
+ pub max_dec_frame_buffering: u8,
+ pub chroma_sample_loc_type_top_field: u8,
+ pub chroma_sample_loc_type_bottom_field: u8,
+ pub reserved1: u32,
+ pub pHrdParameters: *const StdVideoH264HrdParameters,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264SequenceParameterSetVui() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH264SequenceParameterSetVui> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264SequenceParameterSetVui>(),
+ 40usize,
+ concat!("Size of: ", stringify!(StdVideoH264SequenceParameterSetVui))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264SequenceParameterSetVui>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH264SequenceParameterSetVui)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).aspect_ratio_idc) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(aspect_ratio_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sar_width) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(sar_width)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sar_height) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(sar_height)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).video_format) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(video_format)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).colour_primaries) as usize - ptr as usize },
+ 13usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(colour_primaries)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).transfer_characteristics) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(transfer_characteristics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).matrix_coefficients) as usize - ptr as usize },
+ 15usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(matrix_coefficients)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_units_in_tick) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(num_units_in_tick)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).time_scale) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(time_scale)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_num_reorder_frames) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(max_num_reorder_frames)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_dec_frame_buffering) as usize - ptr as usize },
+ 25usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(max_dec_frame_buffering)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).chroma_sample_loc_type_top_field) as usize - ptr as usize
+ },
+ 26usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(chroma_sample_loc_type_top_field)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).chroma_sample_loc_type_bottom_field) as usize - ptr as usize
+ },
+ 27usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(chroma_sample_loc_type_bottom_field)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 28usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pHrdParameters) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSetVui),
+ "::",
+ stringify!(pHrdParameters)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264SpsFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 2usize]>,
+ pub __bindgen_padding_0: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264SpsFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264SpsFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH264SpsFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264SpsFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH264SpsFlags))
+ );
+}
+impl StdVideoH264SpsFlags {
+ #[inline]
+ pub fn constraint_set0_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constraint_set0_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constraint_set1_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constraint_set1_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constraint_set2_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constraint_set2_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constraint_set3_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constraint_set3_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constraint_set4_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constraint_set4_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constraint_set5_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constraint_set5_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn direct_8x8_inference_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_direct_8x8_inference_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn mb_adaptive_frame_field_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_mb_adaptive_frame_field_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn frame_mbs_only_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_frame_mbs_only_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(8usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn delta_pic_order_always_zero_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_delta_pic_order_always_zero_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(9usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn separate_colour_plane_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_separate_colour_plane_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(10usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn gaps_in_frame_num_value_allowed_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_gaps_in_frame_num_value_allowed_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(11usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn qpprime_y_zero_transform_bypass_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_qpprime_y_zero_transform_bypass_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(12usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn frame_cropping_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_frame_cropping_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(13usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn seq_scaling_matrix_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_seq_scaling_matrix_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(14usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vui_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vui_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(15usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ constraint_set0_flag: u32,
+ constraint_set1_flag: u32,
+ constraint_set2_flag: u32,
+ constraint_set3_flag: u32,
+ constraint_set4_flag: u32,
+ constraint_set5_flag: u32,
+ direct_8x8_inference_flag: u32,
+ mb_adaptive_frame_field_flag: u32,
+ frame_mbs_only_flag: u32,
+ delta_pic_order_always_zero_flag: u32,
+ separate_colour_plane_flag: u32,
+ gaps_in_frame_num_value_allowed_flag: u32,
+ qpprime_y_zero_transform_bypass_flag: u32,
+ frame_cropping_flag: u32,
+ seq_scaling_matrix_present_flag: u32,
+ vui_parameters_present_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 2usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 2usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let constraint_set0_flag: u32 = unsafe { ::std::mem::transmute(constraint_set0_flag) };
+ constraint_set0_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let constraint_set1_flag: u32 = unsafe { ::std::mem::transmute(constraint_set1_flag) };
+ constraint_set1_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let constraint_set2_flag: u32 = unsafe { ::std::mem::transmute(constraint_set2_flag) };
+ constraint_set2_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let constraint_set3_flag: u32 = unsafe { ::std::mem::transmute(constraint_set3_flag) };
+ constraint_set3_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let constraint_set4_flag: u32 = unsafe { ::std::mem::transmute(constraint_set4_flag) };
+ constraint_set4_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let constraint_set5_flag: u32 = unsafe { ::std::mem::transmute(constraint_set5_flag) };
+ constraint_set5_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let direct_8x8_inference_flag: u32 =
+ unsafe { ::std::mem::transmute(direct_8x8_inference_flag) };
+ direct_8x8_inference_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let mb_adaptive_frame_field_flag: u32 =
+ unsafe { ::std::mem::transmute(mb_adaptive_frame_field_flag) };
+ mb_adaptive_frame_field_flag as u64
+ });
+ __bindgen_bitfield_unit.set(8usize, 1u8, {
+ let frame_mbs_only_flag: u32 = unsafe { ::std::mem::transmute(frame_mbs_only_flag) };
+ frame_mbs_only_flag as u64
+ });
+ __bindgen_bitfield_unit.set(9usize, 1u8, {
+ let delta_pic_order_always_zero_flag: u32 =
+ unsafe { ::std::mem::transmute(delta_pic_order_always_zero_flag) };
+ delta_pic_order_always_zero_flag as u64
+ });
+ __bindgen_bitfield_unit.set(10usize, 1u8, {
+ let separate_colour_plane_flag: u32 =
+ unsafe { ::std::mem::transmute(separate_colour_plane_flag) };
+ separate_colour_plane_flag as u64
+ });
+ __bindgen_bitfield_unit.set(11usize, 1u8, {
+ let gaps_in_frame_num_value_allowed_flag: u32 =
+ unsafe { ::std::mem::transmute(gaps_in_frame_num_value_allowed_flag) };
+ gaps_in_frame_num_value_allowed_flag as u64
+ });
+ __bindgen_bitfield_unit.set(12usize, 1u8, {
+ let qpprime_y_zero_transform_bypass_flag: u32 =
+ unsafe { ::std::mem::transmute(qpprime_y_zero_transform_bypass_flag) };
+ qpprime_y_zero_transform_bypass_flag as u64
+ });
+ __bindgen_bitfield_unit.set(13usize, 1u8, {
+ let frame_cropping_flag: u32 = unsafe { ::std::mem::transmute(frame_cropping_flag) };
+ frame_cropping_flag as u64
+ });
+ __bindgen_bitfield_unit.set(14usize, 1u8, {
+ let seq_scaling_matrix_present_flag: u32 =
+ unsafe { ::std::mem::transmute(seq_scaling_matrix_present_flag) };
+ seq_scaling_matrix_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(15usize, 1u8, {
+ let vui_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vui_parameters_present_flag) };
+ vui_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264ScalingLists {
+ pub scaling_list_present_mask: u16,
+ pub use_default_scaling_matrix_mask: u16,
+ pub ScalingList4x4: [[u8; 16usize]; 6usize],
+ pub ScalingList8x8: [[u8; 64usize]; 6usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264ScalingLists() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH264ScalingLists> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264ScalingLists>(),
+ 484usize,
+ concat!("Size of: ", stringify!(StdVideoH264ScalingLists))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264ScalingLists>(),
+ 2usize,
+ concat!("Alignment of ", stringify!(StdVideoH264ScalingLists))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).scaling_list_present_mask) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264ScalingLists),
+ "::",
+ stringify!(scaling_list_present_mask)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).use_default_scaling_matrix_mask) as usize - ptr as usize
+ },
+ 2usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264ScalingLists),
+ "::",
+ stringify!(use_default_scaling_matrix_mask)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingList4x4) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264ScalingLists),
+ "::",
+ stringify!(ScalingList4x4)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingList8x8) as usize - ptr as usize },
+ 100usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264ScalingLists),
+ "::",
+ stringify!(ScalingList8x8)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264SequenceParameterSet {
+ pub flags: StdVideoH264SpsFlags,
+ pub profile_idc: StdVideoH264ProfileIdc,
+ pub level_idc: StdVideoH264LevelIdc,
+ pub chroma_format_idc: StdVideoH264ChromaFormatIdc,
+ pub seq_parameter_set_id: u8,
+ pub bit_depth_luma_minus8: u8,
+ pub bit_depth_chroma_minus8: u8,
+ pub log2_max_frame_num_minus4: u8,
+ pub pic_order_cnt_type: StdVideoH264PocType,
+ pub offset_for_non_ref_pic: i32,
+ pub offset_for_top_to_bottom_field: i32,
+ pub log2_max_pic_order_cnt_lsb_minus4: u8,
+ pub num_ref_frames_in_pic_order_cnt_cycle: u8,
+ pub max_num_ref_frames: u8,
+ pub reserved1: u8,
+ pub pic_width_in_mbs_minus1: u32,
+ pub pic_height_in_map_units_minus1: u32,
+ pub frame_crop_left_offset: u32,
+ pub frame_crop_right_offset: u32,
+ pub frame_crop_top_offset: u32,
+ pub frame_crop_bottom_offset: u32,
+ pub reserved2: u32,
+ pub pOffsetForRefFrame: *const i32,
+ pub pScalingLists: *const StdVideoH264ScalingLists,
+ pub pSequenceParameterSetVui: *const StdVideoH264SequenceParameterSetVui,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264SequenceParameterSet() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH264SequenceParameterSet> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264SequenceParameterSet>(),
+ 88usize,
+ concat!("Size of: ", stringify!(StdVideoH264SequenceParameterSet))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264SequenceParameterSet>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH264SequenceParameterSet)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).profile_idc) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(profile_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).level_idc) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(level_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_format_idc) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(chroma_format_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_depth_luma_minus8) as usize - ptr as usize },
+ 17usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(bit_depth_luma_minus8)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_depth_chroma_minus8) as usize - ptr as usize },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(bit_depth_chroma_minus8)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).log2_max_frame_num_minus4) as usize - ptr as usize },
+ 19usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(log2_max_frame_num_minus4)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_order_cnt_type) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(pic_order_cnt_type)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).offset_for_non_ref_pic) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(offset_for_non_ref_pic)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).offset_for_top_to_bottom_field) as usize - ptr as usize
+ },
+ 28usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(offset_for_top_to_bottom_field)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_max_pic_order_cnt_lsb_minus4) as usize - ptr as usize
+ },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(log2_max_pic_order_cnt_lsb_minus4)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_frames_in_pic_order_cnt_cycle) as usize
+ - ptr as usize
+ },
+ 33usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(num_ref_frames_in_pic_order_cnt_cycle)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_num_ref_frames) as usize - ptr as usize },
+ 34usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(max_num_ref_frames)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 35usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_width_in_mbs_minus1) as usize - ptr as usize },
+ 36usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(pic_width_in_mbs_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).pic_height_in_map_units_minus1) as usize - ptr as usize
+ },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(pic_height_in_map_units_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).frame_crop_left_offset) as usize - ptr as usize },
+ 44usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(frame_crop_left_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).frame_crop_right_offset) as usize - ptr as usize },
+ 48usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(frame_crop_right_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).frame_crop_top_offset) as usize - ptr as usize },
+ 52usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(frame_crop_top_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).frame_crop_bottom_offset) as usize - ptr as usize },
+ 56usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(frame_crop_bottom_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 60usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pOffsetForRefFrame) as usize - ptr as usize },
+ 64usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(pOffsetForRefFrame)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize },
+ 72usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(pScalingLists)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pSequenceParameterSetVui) as usize - ptr as usize },
+ 80usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264SequenceParameterSet),
+ "::",
+ stringify!(pSequenceParameterSetVui)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264PpsFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264PpsFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264PpsFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH264PpsFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264PpsFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH264PpsFlags))
+ );
+}
+impl StdVideoH264PpsFlags {
+ #[inline]
+ pub fn transform_8x8_mode_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_transform_8x8_mode_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn redundant_pic_cnt_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_redundant_pic_cnt_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constrained_intra_pred_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constrained_intra_pred_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn deblocking_filter_control_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_deblocking_filter_control_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn weighted_pred_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_weighted_pred_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn bottom_field_pic_order_in_frame_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_bottom_field_pic_order_in_frame_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn entropy_coding_mode_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_entropy_coding_mode_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pic_scaling_matrix_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pic_scaling_matrix_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ transform_8x8_mode_flag: u32,
+ redundant_pic_cnt_present_flag: u32,
+ constrained_intra_pred_flag: u32,
+ deblocking_filter_control_present_flag: u32,
+ weighted_pred_flag: u32,
+ bottom_field_pic_order_in_frame_present_flag: u32,
+ entropy_coding_mode_flag: u32,
+ pic_scaling_matrix_present_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let transform_8x8_mode_flag: u32 =
+ unsafe { ::std::mem::transmute(transform_8x8_mode_flag) };
+ transform_8x8_mode_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let redundant_pic_cnt_present_flag: u32 =
+ unsafe { ::std::mem::transmute(redundant_pic_cnt_present_flag) };
+ redundant_pic_cnt_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let constrained_intra_pred_flag: u32 =
+ unsafe { ::std::mem::transmute(constrained_intra_pred_flag) };
+ constrained_intra_pred_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let deblocking_filter_control_present_flag: u32 =
+ unsafe { ::std::mem::transmute(deblocking_filter_control_present_flag) };
+ deblocking_filter_control_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let weighted_pred_flag: u32 = unsafe { ::std::mem::transmute(weighted_pred_flag) };
+ weighted_pred_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let bottom_field_pic_order_in_frame_present_flag: u32 =
+ unsafe { ::std::mem::transmute(bottom_field_pic_order_in_frame_present_flag) };
+ bottom_field_pic_order_in_frame_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let entropy_coding_mode_flag: u32 =
+ unsafe { ::std::mem::transmute(entropy_coding_mode_flag) };
+ entropy_coding_mode_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let pic_scaling_matrix_present_flag: u32 =
+ unsafe { ::std::mem::transmute(pic_scaling_matrix_present_flag) };
+ pic_scaling_matrix_present_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH264PictureParameterSet {
+ pub flags: StdVideoH264PpsFlags,
+ pub seq_parameter_set_id: u8,
+ pub pic_parameter_set_id: u8,
+ pub num_ref_idx_l0_default_active_minus1: u8,
+ pub num_ref_idx_l1_default_active_minus1: u8,
+ pub weighted_bipred_idc: StdVideoH264WeightedBipredIdc,
+ pub pic_init_qp_minus26: i8,
+ pub pic_init_qs_minus26: i8,
+ pub chroma_qp_index_offset: i8,
+ pub second_chroma_qp_index_offset: i8,
+ pub pScalingLists: *const StdVideoH264ScalingLists,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH264PictureParameterSet() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH264PictureParameterSet> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH264PictureParameterSet>(),
+ 24usize,
+ concat!("Size of: ", stringify!(StdVideoH264PictureParameterSet))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH264PictureParameterSet>(),
+ 8usize,
+ concat!("Alignment of ", stringify!(StdVideoH264PictureParameterSet))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_parameter_set_id) as usize - ptr as usize },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(pic_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l0_default_active_minus1) as usize
+ - ptr as usize
+ },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(num_ref_idx_l0_default_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l1_default_active_minus1) as usize
+ - ptr as usize
+ },
+ 7usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(num_ref_idx_l1_default_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).weighted_bipred_idc) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(weighted_bipred_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_init_qp_minus26) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(pic_init_qp_minus26)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_init_qs_minus26) as usize - ptr as usize },
+ 13usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(pic_init_qs_minus26)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_qp_index_offset) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(chroma_qp_index_offset)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).second_chroma_qp_index_offset) as usize - ptr as usize
+ },
+ 15usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(second_chroma_qp_index_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH264PictureParameterSet),
+ "::",
+ stringify!(pScalingLists)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH264PictureInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH264PictureInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH264PictureInfoFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoDecodeH264PictureInfoFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH264PictureInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoDecodeH264PictureInfoFlags)
+ )
+ );
+}
+impl StdVideoDecodeH264PictureInfoFlags {
+ #[inline]
+ pub fn field_pic_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_field_pic_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn is_intra(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_is_intra(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn IdrPicFlag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_IdrPicFlag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn bottom_field_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_bottom_field_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn is_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_is_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn complementary_field_pair(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_complementary_field_pair(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ field_pic_flag: u32,
+ is_intra: u32,
+ IdrPicFlag: u32,
+ bottom_field_flag: u32,
+ is_reference: u32,
+ complementary_field_pair: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let field_pic_flag: u32 = unsafe { ::std::mem::transmute(field_pic_flag) };
+ field_pic_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let is_intra: u32 = unsafe { ::std::mem::transmute(is_intra) };
+ is_intra as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let IdrPicFlag: u32 = unsafe { ::std::mem::transmute(IdrPicFlag) };
+ IdrPicFlag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let bottom_field_flag: u32 = unsafe { ::std::mem::transmute(bottom_field_flag) };
+ bottom_field_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let is_reference: u32 = unsafe { ::std::mem::transmute(is_reference) };
+ is_reference as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let complementary_field_pair: u32 =
+ unsafe { ::std::mem::transmute(complementary_field_pair) };
+ complementary_field_pair as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH264PictureInfo {
+ pub flags: StdVideoDecodeH264PictureInfoFlags,
+ pub seq_parameter_set_id: u8,
+ pub pic_parameter_set_id: u8,
+ pub reserved1: u8,
+ pub reserved2: u8,
+ pub frame_num: u16,
+ pub idr_pic_id: u16,
+ pub PicOrderCnt: [i32; 2usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH264PictureInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoDecodeH264PictureInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH264PictureInfo>(),
+ 20usize,
+ concat!("Size of: ", stringify!(StdVideoDecodeH264PictureInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH264PictureInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoDecodeH264PictureInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_parameter_set_id) as usize - ptr as usize },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(pic_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 7usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).frame_num) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(frame_num)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).idr_pic_id) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(idr_pic_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264PictureInfo),
+ "::",
+ stringify!(PicOrderCnt)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH264ReferenceInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH264ReferenceInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH264ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoDecodeH264ReferenceInfoFlags)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH264ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoDecodeH264ReferenceInfoFlags)
+ )
+ );
+}
+impl StdVideoDecodeH264ReferenceInfoFlags {
+ #[inline]
+ pub fn top_field_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_top_field_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn bottom_field_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_bottom_field_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn used_for_long_term_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_used_for_long_term_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn is_non_existing(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_is_non_existing(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ top_field_flag: u32,
+ bottom_field_flag: u32,
+ used_for_long_term_reference: u32,
+ is_non_existing: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let top_field_flag: u32 = unsafe { ::std::mem::transmute(top_field_flag) };
+ top_field_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let bottom_field_flag: u32 = unsafe { ::std::mem::transmute(bottom_field_flag) };
+ bottom_field_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let used_for_long_term_reference: u32 =
+ unsafe { ::std::mem::transmute(used_for_long_term_reference) };
+ used_for_long_term_reference as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let is_non_existing: u32 = unsafe { ::std::mem::transmute(is_non_existing) };
+ is_non_existing as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH264ReferenceInfo {
+ pub flags: StdVideoDecodeH264ReferenceInfoFlags,
+ pub FrameNum: u16,
+ pub reserved: u16,
+ pub PicOrderCnt: [i32; 2usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH264ReferenceInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoDecodeH264ReferenceInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH264ReferenceInfo>(),
+ 16usize,
+ concat!("Size of: ", stringify!(StdVideoDecodeH264ReferenceInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH264ReferenceInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoDecodeH264ReferenceInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264ReferenceInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).FrameNum) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264ReferenceInfo),
+ "::",
+ stringify!(FrameNum)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264ReferenceInfo),
+ "::",
+ stringify!(reserved)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH264ReferenceInfo),
+ "::",
+ stringify!(PicOrderCnt)
+ )
+ );
+}
+pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME:
+ StdVideoH265ChromaFormatIdc = 0;
+pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_420:
+ StdVideoH265ChromaFormatIdc = 1;
+pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_422:
+ StdVideoH265ChromaFormatIdc = 2;
+pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_444:
+ StdVideoH265ChromaFormatIdc = 3;
+pub const StdVideoH265ChromaFormatIdc_STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID:
+ StdVideoH265ChromaFormatIdc = 2147483647;
+pub type StdVideoH265ChromaFormatIdc = ::std::os::raw::c_uint;
+pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_MAIN: StdVideoH265ProfileIdc = 1;
+pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_MAIN_10: StdVideoH265ProfileIdc = 2;
+pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE:
+ StdVideoH265ProfileIdc = 3;
+pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS:
+ StdVideoH265ProfileIdc = 4;
+pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS: StdVideoH265ProfileIdc =
+ 9;
+pub const StdVideoH265ProfileIdc_STD_VIDEO_H265_PROFILE_IDC_INVALID: StdVideoH265ProfileIdc =
+ 2147483647;
+pub type StdVideoH265ProfileIdc = ::std::os::raw::c_uint;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_1_0: StdVideoH265LevelIdc = 0;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_2_0: StdVideoH265LevelIdc = 1;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_2_1: StdVideoH265LevelIdc = 2;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_3_0: StdVideoH265LevelIdc = 3;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_3_1: StdVideoH265LevelIdc = 4;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_4_0: StdVideoH265LevelIdc = 5;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_4_1: StdVideoH265LevelIdc = 6;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_0: StdVideoH265LevelIdc = 7;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_1: StdVideoH265LevelIdc = 8;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_5_2: StdVideoH265LevelIdc = 9;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_6_0: StdVideoH265LevelIdc = 10;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_6_1: StdVideoH265LevelIdc = 11;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_6_2: StdVideoH265LevelIdc = 12;
+pub const StdVideoH265LevelIdc_STD_VIDEO_H265_LEVEL_IDC_INVALID: StdVideoH265LevelIdc = 2147483647;
+pub type StdVideoH265LevelIdc = ::std::os::raw::c_uint;
+pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_B: StdVideoH265SliceType = 0;
+pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_P: StdVideoH265SliceType = 1;
+pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_I: StdVideoH265SliceType = 2;
+pub const StdVideoH265SliceType_STD_VIDEO_H265_SLICE_TYPE_INVALID: StdVideoH265SliceType =
+ 2147483647;
+pub type StdVideoH265SliceType = ::std::os::raw::c_uint;
+pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_P: StdVideoH265PictureType = 0;
+pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_B: StdVideoH265PictureType = 1;
+pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_I: StdVideoH265PictureType = 2;
+pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_IDR: StdVideoH265PictureType = 3;
+pub const StdVideoH265PictureType_STD_VIDEO_H265_PICTURE_TYPE_INVALID: StdVideoH265PictureType =
+ 2147483647;
+pub type StdVideoH265PictureType = ::std::os::raw::c_uint;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED:
+ StdVideoH265AspectRatioIdc = 0;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE:
+ StdVideoH265AspectRatioIdc = 1;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11:
+ StdVideoH265AspectRatioIdc = 2;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11:
+ StdVideoH265AspectRatioIdc = 3;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11:
+ StdVideoH265AspectRatioIdc = 4;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33:
+ StdVideoH265AspectRatioIdc = 5;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11:
+ StdVideoH265AspectRatioIdc = 6;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11:
+ StdVideoH265AspectRatioIdc = 7;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11:
+ StdVideoH265AspectRatioIdc = 8;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33:
+ StdVideoH265AspectRatioIdc = 9;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11:
+ StdVideoH265AspectRatioIdc = 10;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11:
+ StdVideoH265AspectRatioIdc = 11;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33:
+ StdVideoH265AspectRatioIdc = 12;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99:
+ StdVideoH265AspectRatioIdc = 13;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3:
+ StdVideoH265AspectRatioIdc = 14;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2:
+ StdVideoH265AspectRatioIdc = 15;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1:
+ StdVideoH265AspectRatioIdc = 16;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR:
+ StdVideoH265AspectRatioIdc = 255;
+pub const StdVideoH265AspectRatioIdc_STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID:
+ StdVideoH265AspectRatioIdc = 2147483647;
+pub type StdVideoH265AspectRatioIdc = ::std::os::raw::c_uint;
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265DecPicBufMgr {
+ pub max_latency_increase_plus1: [u32; 7usize],
+ pub max_dec_pic_buffering_minus1: [u8; 7usize],
+ pub max_num_reorder_pics: [u8; 7usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265DecPicBufMgr() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265DecPicBufMgr> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265DecPicBufMgr>(),
+ 44usize,
+ concat!("Size of: ", stringify!(StdVideoH265DecPicBufMgr))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265DecPicBufMgr>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265DecPicBufMgr))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_latency_increase_plus1) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265DecPicBufMgr),
+ "::",
+ stringify!(max_latency_increase_plus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).max_dec_pic_buffering_minus1) as usize - ptr as usize
+ },
+ 28usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265DecPicBufMgr),
+ "::",
+ stringify!(max_dec_pic_buffering_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_num_reorder_pics) as usize - ptr as usize },
+ 35usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265DecPicBufMgr),
+ "::",
+ stringify!(max_num_reorder_pics)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265SubLayerHrdParameters {
+ pub bit_rate_value_minus1: [u32; 32usize],
+ pub cpb_size_value_minus1: [u32; 32usize],
+ pub cpb_size_du_value_minus1: [u32; 32usize],
+ pub bit_rate_du_value_minus1: [u32; 32usize],
+ pub cbr_flag: u32,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265SubLayerHrdParameters() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265SubLayerHrdParameters> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265SubLayerHrdParameters>(),
+ 516usize,
+ concat!("Size of: ", stringify!(StdVideoH265SubLayerHrdParameters))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265SubLayerHrdParameters>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH265SubLayerHrdParameters)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_rate_value_minus1) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SubLayerHrdParameters),
+ "::",
+ stringify!(bit_rate_value_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_size_value_minus1) as usize - ptr as usize },
+ 128usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SubLayerHrdParameters),
+ "::",
+ stringify!(cpb_size_value_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_size_du_value_minus1) as usize - ptr as usize },
+ 256usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SubLayerHrdParameters),
+ "::",
+ stringify!(cpb_size_du_value_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_rate_du_value_minus1) as usize - ptr as usize },
+ 384usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SubLayerHrdParameters),
+ "::",
+ stringify!(bit_rate_du_value_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cbr_flag) as usize - ptr as usize },
+ 512usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SubLayerHrdParameters),
+ "::",
+ stringify!(cbr_flag)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265HrdFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265HrdFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265HrdFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265HrdFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265HrdFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265HrdFlags))
+ );
+}
+impl StdVideoH265HrdFlags {
+ #[inline]
+ pub fn nal_hrd_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_nal_hrd_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vcl_hrd_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vcl_hrd_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sub_pic_hrd_params_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sub_pic_hrd_params_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sub_pic_cpb_params_in_pic_timing_sei_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sub_pic_cpb_params_in_pic_timing_sei_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn fixed_pic_rate_general_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 8u8) as u32) }
+ }
+ #[inline]
+ pub fn set_fixed_pic_rate_general_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 8u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn fixed_pic_rate_within_cvs_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(12usize, 8u8) as u32) }
+ }
+ #[inline]
+ pub fn set_fixed_pic_rate_within_cvs_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(12usize, 8u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn low_delay_hrd_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(20usize, 8u8) as u32) }
+ }
+ #[inline]
+ pub fn set_low_delay_hrd_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(20usize, 8u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ nal_hrd_parameters_present_flag: u32,
+ vcl_hrd_parameters_present_flag: u32,
+ sub_pic_hrd_params_present_flag: u32,
+ sub_pic_cpb_params_in_pic_timing_sei_flag: u32,
+ fixed_pic_rate_general_flag: u32,
+ fixed_pic_rate_within_cvs_flag: u32,
+ low_delay_hrd_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 4usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let nal_hrd_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(nal_hrd_parameters_present_flag) };
+ nal_hrd_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let vcl_hrd_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vcl_hrd_parameters_present_flag) };
+ vcl_hrd_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let sub_pic_hrd_params_present_flag: u32 =
+ unsafe { ::std::mem::transmute(sub_pic_hrd_params_present_flag) };
+ sub_pic_hrd_params_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let sub_pic_cpb_params_in_pic_timing_sei_flag: u32 =
+ unsafe { ::std::mem::transmute(sub_pic_cpb_params_in_pic_timing_sei_flag) };
+ sub_pic_cpb_params_in_pic_timing_sei_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 8u8, {
+ let fixed_pic_rate_general_flag: u32 =
+ unsafe { ::std::mem::transmute(fixed_pic_rate_general_flag) };
+ fixed_pic_rate_general_flag as u64
+ });
+ __bindgen_bitfield_unit.set(12usize, 8u8, {
+ let fixed_pic_rate_within_cvs_flag: u32 =
+ unsafe { ::std::mem::transmute(fixed_pic_rate_within_cvs_flag) };
+ fixed_pic_rate_within_cvs_flag as u64
+ });
+ __bindgen_bitfield_unit.set(20usize, 8u8, {
+ let low_delay_hrd_flag: u32 = unsafe { ::std::mem::transmute(low_delay_hrd_flag) };
+ low_delay_hrd_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265HrdParameters {
+ pub flags: StdVideoH265HrdFlags,
+ pub tick_divisor_minus2: u8,
+ pub du_cpb_removal_delay_increment_length_minus1: u8,
+ pub dpb_output_delay_du_length_minus1: u8,
+ pub bit_rate_scale: u8,
+ pub cpb_size_scale: u8,
+ pub cpb_size_du_scale: u8,
+ pub initial_cpb_removal_delay_length_minus1: u8,
+ pub au_cpb_removal_delay_length_minus1: u8,
+ pub dpb_output_delay_length_minus1: u8,
+ pub cpb_cnt_minus1: [u8; 7usize],
+ pub elemental_duration_in_tc_minus1: [u16; 7usize],
+ pub reserved: [u16; 3usize],
+ pub pSubLayerHrdParametersNal: *const StdVideoH265SubLayerHrdParameters,
+ pub pSubLayerHrdParametersVcl: *const StdVideoH265SubLayerHrdParameters,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265HrdParameters() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265HrdParameters> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265HrdParameters>(),
+ 56usize,
+ concat!("Size of: ", stringify!(StdVideoH265HrdParameters))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265HrdParameters>(),
+ 8usize,
+ concat!("Alignment of ", stringify!(StdVideoH265HrdParameters))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).tick_divisor_minus2) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(tick_divisor_minus2)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).du_cpb_removal_delay_increment_length_minus1) as usize
+ - ptr as usize
+ },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(du_cpb_removal_delay_increment_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).dpb_output_delay_du_length_minus1) as usize - ptr as usize
+ },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(dpb_output_delay_du_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_rate_scale) as usize - ptr as usize },
+ 7usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(bit_rate_scale)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_size_scale) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(cpb_size_scale)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_size_du_scale) as usize - ptr as usize },
+ 9usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(cpb_size_du_scale)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).initial_cpb_removal_delay_length_minus1) as usize
+ - ptr as usize
+ },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(initial_cpb_removal_delay_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).au_cpb_removal_delay_length_minus1) as usize - ptr as usize
+ },
+ 11usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(au_cpb_removal_delay_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).dpb_output_delay_length_minus1) as usize - ptr as usize
+ },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(dpb_output_delay_length_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cpb_cnt_minus1) as usize - ptr as usize },
+ 13usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(cpb_cnt_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).elemental_duration_in_tc_minus1) as usize - ptr as usize
+ },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(elemental_duration_in_tc_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved) as usize - ptr as usize },
+ 34usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(reserved)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pSubLayerHrdParametersNal) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(pSubLayerHrdParametersNal)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pSubLayerHrdParametersVcl) as usize - ptr as usize },
+ 48usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265HrdParameters),
+ "::",
+ stringify!(pSubLayerHrdParametersVcl)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265VpsFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265VpsFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265VpsFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265VpsFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265VpsFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265VpsFlags))
+ );
+}
+impl StdVideoH265VpsFlags {
+ #[inline]
+ pub fn vps_temporal_id_nesting_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vps_temporal_id_nesting_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vps_sub_layer_ordering_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vps_sub_layer_ordering_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vps_timing_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vps_timing_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vps_poc_proportional_to_timing_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vps_poc_proportional_to_timing_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ vps_temporal_id_nesting_flag: u32,
+ vps_sub_layer_ordering_info_present_flag: u32,
+ vps_timing_info_present_flag: u32,
+ vps_poc_proportional_to_timing_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let vps_temporal_id_nesting_flag: u32 =
+ unsafe { ::std::mem::transmute(vps_temporal_id_nesting_flag) };
+ vps_temporal_id_nesting_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let vps_sub_layer_ordering_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vps_sub_layer_ordering_info_present_flag) };
+ vps_sub_layer_ordering_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let vps_timing_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vps_timing_info_present_flag) };
+ vps_timing_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let vps_poc_proportional_to_timing_flag: u32 =
+ unsafe { ::std::mem::transmute(vps_poc_proportional_to_timing_flag) };
+ vps_poc_proportional_to_timing_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265ProfileTierLevelFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265ProfileTierLevelFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265ProfileTierLevelFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265ProfileTierLevelFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265ProfileTierLevelFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH265ProfileTierLevelFlags)
+ )
+ );
+}
+impl StdVideoH265ProfileTierLevelFlags {
+ #[inline]
+ pub fn general_tier_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_general_tier_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn general_progressive_source_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_general_progressive_source_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn general_interlaced_source_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_general_interlaced_source_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn general_non_packed_constraint_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_general_non_packed_constraint_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn general_frame_only_constraint_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_general_frame_only_constraint_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ general_tier_flag: u32,
+ general_progressive_source_flag: u32,
+ general_interlaced_source_flag: u32,
+ general_non_packed_constraint_flag: u32,
+ general_frame_only_constraint_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let general_tier_flag: u32 = unsafe { ::std::mem::transmute(general_tier_flag) };
+ general_tier_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let general_progressive_source_flag: u32 =
+ unsafe { ::std::mem::transmute(general_progressive_source_flag) };
+ general_progressive_source_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let general_interlaced_source_flag: u32 =
+ unsafe { ::std::mem::transmute(general_interlaced_source_flag) };
+ general_interlaced_source_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let general_non_packed_constraint_flag: u32 =
+ unsafe { ::std::mem::transmute(general_non_packed_constraint_flag) };
+ general_non_packed_constraint_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let general_frame_only_constraint_flag: u32 =
+ unsafe { ::std::mem::transmute(general_frame_only_constraint_flag) };
+ general_frame_only_constraint_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265ProfileTierLevel {
+ pub flags: StdVideoH265ProfileTierLevelFlags,
+ pub general_profile_idc: StdVideoH265ProfileIdc,
+ pub general_level_idc: StdVideoH265LevelIdc,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265ProfileTierLevel() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265ProfileTierLevel> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265ProfileTierLevel>(),
+ 12usize,
+ concat!("Size of: ", stringify!(StdVideoH265ProfileTierLevel))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265ProfileTierLevel>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265ProfileTierLevel))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ProfileTierLevel),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).general_profile_idc) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ProfileTierLevel),
+ "::",
+ stringify!(general_profile_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).general_level_idc) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ProfileTierLevel),
+ "::",
+ stringify!(general_level_idc)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265VideoParameterSet {
+ pub flags: StdVideoH265VpsFlags,
+ pub vps_video_parameter_set_id: u8,
+ pub vps_max_sub_layers_minus1: u8,
+ pub reserved1: u8,
+ pub reserved2: u8,
+ pub vps_num_units_in_tick: u32,
+ pub vps_time_scale: u32,
+ pub vps_num_ticks_poc_diff_one_minus1: u32,
+ pub reserved3: u32,
+ pub pDecPicBufMgr: *const StdVideoH265DecPicBufMgr,
+ pub pHrdParameters: *const StdVideoH265HrdParameters,
+ pub pProfileTierLevel: *const StdVideoH265ProfileTierLevel,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265VideoParameterSet() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265VideoParameterSet> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265VideoParameterSet>(),
+ 48usize,
+ concat!("Size of: ", stringify!(StdVideoH265VideoParameterSet))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265VideoParameterSet>(),
+ 8usize,
+ concat!("Alignment of ", stringify!(StdVideoH265VideoParameterSet))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).vps_video_parameter_set_id) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(vps_video_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).vps_max_sub_layers_minus1) as usize - ptr as usize },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(vps_max_sub_layers_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 7usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).vps_num_units_in_tick) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(vps_num_units_in_tick)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).vps_time_scale) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(vps_time_scale)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).vps_num_ticks_poc_diff_one_minus1) as usize - ptr as usize
+ },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(vps_num_ticks_poc_diff_one_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(reserved3)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pDecPicBufMgr) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(pDecPicBufMgr)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pHrdParameters) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(pHrdParameters)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pProfileTierLevel) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265VideoParameterSet),
+ "::",
+ stringify!(pProfileTierLevel)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265ScalingLists {
+ pub ScalingList4x4: [[u8; 16usize]; 6usize],
+ pub ScalingList8x8: [[u8; 64usize]; 6usize],
+ pub ScalingList16x16: [[u8; 64usize]; 6usize],
+ pub ScalingList32x32: [[u8; 64usize]; 2usize],
+ pub ScalingListDCCoef16x16: [u8; 6usize],
+ pub ScalingListDCCoef32x32: [u8; 2usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265ScalingLists() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265ScalingLists> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265ScalingLists>(),
+ 1000usize,
+ concat!("Size of: ", stringify!(StdVideoH265ScalingLists))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265ScalingLists>(),
+ 1usize,
+ concat!("Alignment of ", stringify!(StdVideoH265ScalingLists))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingList4x4) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ScalingLists),
+ "::",
+ stringify!(ScalingList4x4)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingList8x8) as usize - ptr as usize },
+ 96usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ScalingLists),
+ "::",
+ stringify!(ScalingList8x8)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingList16x16) as usize - ptr as usize },
+ 480usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ScalingLists),
+ "::",
+ stringify!(ScalingList16x16)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingList32x32) as usize - ptr as usize },
+ 864usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ScalingLists),
+ "::",
+ stringify!(ScalingList32x32)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingListDCCoef16x16) as usize - ptr as usize },
+ 992usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ScalingLists),
+ "::",
+ stringify!(ScalingListDCCoef16x16)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).ScalingListDCCoef32x32) as usize - ptr as usize },
+ 998usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ScalingLists),
+ "::",
+ stringify!(ScalingListDCCoef32x32)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265SpsVuiFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 3usize]>,
+ pub __bindgen_padding_0: u8,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265SpsVuiFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265SpsVuiFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265SpsVuiFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265SpsVuiFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265SpsVuiFlags))
+ );
+}
+impl StdVideoH265SpsVuiFlags {
+ #[inline]
+ pub fn aspect_ratio_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_aspect_ratio_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn overscan_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_overscan_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn overscan_appropriate_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_overscan_appropriate_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn video_signal_type_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_video_signal_type_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn video_full_range_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_video_full_range_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn colour_description_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_colour_description_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn chroma_loc_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_chroma_loc_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn neutral_chroma_indication_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_neutral_chroma_indication_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn field_seq_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_field_seq_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(8usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn frame_field_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_frame_field_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(9usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn default_display_window_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_default_display_window_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(10usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vui_timing_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vui_timing_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(11usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vui_poc_proportional_to_timing_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vui_poc_proportional_to_timing_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(12usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vui_hrd_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vui_hrd_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(13usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn bitstream_restriction_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_bitstream_restriction_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(14usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn tiles_fixed_structure_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_tiles_fixed_structure_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(15usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn motion_vectors_over_pic_boundaries_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_motion_vectors_over_pic_boundaries_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(16usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn restricted_ref_pic_lists_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_restricted_ref_pic_lists_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(17usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ aspect_ratio_info_present_flag: u32,
+ overscan_info_present_flag: u32,
+ overscan_appropriate_flag: u32,
+ video_signal_type_present_flag: u32,
+ video_full_range_flag: u32,
+ colour_description_present_flag: u32,
+ chroma_loc_info_present_flag: u32,
+ neutral_chroma_indication_flag: u32,
+ field_seq_flag: u32,
+ frame_field_info_present_flag: u32,
+ default_display_window_flag: u32,
+ vui_timing_info_present_flag: u32,
+ vui_poc_proportional_to_timing_flag: u32,
+ vui_hrd_parameters_present_flag: u32,
+ bitstream_restriction_flag: u32,
+ tiles_fixed_structure_flag: u32,
+ motion_vectors_over_pic_boundaries_flag: u32,
+ restricted_ref_pic_lists_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 3usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 3usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let aspect_ratio_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(aspect_ratio_info_present_flag) };
+ aspect_ratio_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let overscan_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(overscan_info_present_flag) };
+ overscan_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let overscan_appropriate_flag: u32 =
+ unsafe { ::std::mem::transmute(overscan_appropriate_flag) };
+ overscan_appropriate_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let video_signal_type_present_flag: u32 =
+ unsafe { ::std::mem::transmute(video_signal_type_present_flag) };
+ video_signal_type_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let video_full_range_flag: u32 =
+ unsafe { ::std::mem::transmute(video_full_range_flag) };
+ video_full_range_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let colour_description_present_flag: u32 =
+ unsafe { ::std::mem::transmute(colour_description_present_flag) };
+ colour_description_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let chroma_loc_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(chroma_loc_info_present_flag) };
+ chroma_loc_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let neutral_chroma_indication_flag: u32 =
+ unsafe { ::std::mem::transmute(neutral_chroma_indication_flag) };
+ neutral_chroma_indication_flag as u64
+ });
+ __bindgen_bitfield_unit.set(8usize, 1u8, {
+ let field_seq_flag: u32 = unsafe { ::std::mem::transmute(field_seq_flag) };
+ field_seq_flag as u64
+ });
+ __bindgen_bitfield_unit.set(9usize, 1u8, {
+ let frame_field_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(frame_field_info_present_flag) };
+ frame_field_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(10usize, 1u8, {
+ let default_display_window_flag: u32 =
+ unsafe { ::std::mem::transmute(default_display_window_flag) };
+ default_display_window_flag as u64
+ });
+ __bindgen_bitfield_unit.set(11usize, 1u8, {
+ let vui_timing_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vui_timing_info_present_flag) };
+ vui_timing_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(12usize, 1u8, {
+ let vui_poc_proportional_to_timing_flag: u32 =
+ unsafe { ::std::mem::transmute(vui_poc_proportional_to_timing_flag) };
+ vui_poc_proportional_to_timing_flag as u64
+ });
+ __bindgen_bitfield_unit.set(13usize, 1u8, {
+ let vui_hrd_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vui_hrd_parameters_present_flag) };
+ vui_hrd_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(14usize, 1u8, {
+ let bitstream_restriction_flag: u32 =
+ unsafe { ::std::mem::transmute(bitstream_restriction_flag) };
+ bitstream_restriction_flag as u64
+ });
+ __bindgen_bitfield_unit.set(15usize, 1u8, {
+ let tiles_fixed_structure_flag: u32 =
+ unsafe { ::std::mem::transmute(tiles_fixed_structure_flag) };
+ tiles_fixed_structure_flag as u64
+ });
+ __bindgen_bitfield_unit.set(16usize, 1u8, {
+ let motion_vectors_over_pic_boundaries_flag: u32 =
+ unsafe { ::std::mem::transmute(motion_vectors_over_pic_boundaries_flag) };
+ motion_vectors_over_pic_boundaries_flag as u64
+ });
+ __bindgen_bitfield_unit.set(17usize, 1u8, {
+ let restricted_ref_pic_lists_flag: u32 =
+ unsafe { ::std::mem::transmute(restricted_ref_pic_lists_flag) };
+ restricted_ref_pic_lists_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265SequenceParameterSetVui {
+ pub flags: StdVideoH265SpsVuiFlags,
+ pub aspect_ratio_idc: StdVideoH265AspectRatioIdc,
+ pub sar_width: u16,
+ pub sar_height: u16,
+ pub video_format: u8,
+ pub colour_primaries: u8,
+ pub transfer_characteristics: u8,
+ pub matrix_coeffs: u8,
+ pub chroma_sample_loc_type_top_field: u8,
+ pub chroma_sample_loc_type_bottom_field: u8,
+ pub reserved1: u8,
+ pub reserved2: u8,
+ pub def_disp_win_left_offset: u16,
+ pub def_disp_win_right_offset: u16,
+ pub def_disp_win_top_offset: u16,
+ pub def_disp_win_bottom_offset: u16,
+ pub vui_num_units_in_tick: u32,
+ pub vui_time_scale: u32,
+ pub vui_num_ticks_poc_diff_one_minus1: u32,
+ pub min_spatial_segmentation_idc: u16,
+ pub reserved3: u16,
+ pub max_bytes_per_pic_denom: u8,
+ pub max_bits_per_min_cu_denom: u8,
+ pub log2_max_mv_length_horizontal: u8,
+ pub log2_max_mv_length_vertical: u8,
+ pub pHrdParameters: *const StdVideoH265HrdParameters,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265SequenceParameterSetVui() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265SequenceParameterSetVui> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265SequenceParameterSetVui>(),
+ 56usize,
+ concat!("Size of: ", stringify!(StdVideoH265SequenceParameterSetVui))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265SequenceParameterSetVui>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH265SequenceParameterSetVui)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).aspect_ratio_idc) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(aspect_ratio_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sar_width) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(sar_width)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sar_height) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(sar_height)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).video_format) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(video_format)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).colour_primaries) as usize - ptr as usize },
+ 13usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(colour_primaries)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).transfer_characteristics) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(transfer_characteristics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).matrix_coeffs) as usize - ptr as usize },
+ 15usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(matrix_coeffs)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).chroma_sample_loc_type_top_field) as usize - ptr as usize
+ },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(chroma_sample_loc_type_top_field)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).chroma_sample_loc_type_bottom_field) as usize - ptr as usize
+ },
+ 17usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(chroma_sample_loc_type_bottom_field)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 19usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).def_disp_win_left_offset) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(def_disp_win_left_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).def_disp_win_right_offset) as usize - ptr as usize },
+ 22usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(def_disp_win_right_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).def_disp_win_top_offset) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(def_disp_win_top_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).def_disp_win_bottom_offset) as usize - ptr as usize },
+ 26usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(def_disp_win_bottom_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).vui_num_units_in_tick) as usize - ptr as usize },
+ 28usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(vui_num_units_in_tick)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).vui_time_scale) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(vui_time_scale)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).vui_num_ticks_poc_diff_one_minus1) as usize - ptr as usize
+ },
+ 36usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(vui_num_ticks_poc_diff_one_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).min_spatial_segmentation_idc) as usize - ptr as usize
+ },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(min_spatial_segmentation_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize },
+ 42usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(reserved3)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_bytes_per_pic_denom) as usize - ptr as usize },
+ 44usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(max_bytes_per_pic_denom)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).max_bits_per_min_cu_denom) as usize - ptr as usize },
+ 45usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(max_bits_per_min_cu_denom)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_max_mv_length_horizontal) as usize - ptr as usize
+ },
+ 46usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(log2_max_mv_length_horizontal)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).log2_max_mv_length_vertical) as usize - ptr as usize },
+ 47usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(log2_max_mv_length_vertical)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pHrdParameters) as usize - ptr as usize },
+ 48usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSetVui),
+ "::",
+ stringify!(pHrdParameters)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265PredictorPaletteEntries {
+ pub PredictorPaletteEntries: [[u16; 128usize]; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265PredictorPaletteEntries() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265PredictorPaletteEntries> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265PredictorPaletteEntries>(),
+ 768usize,
+ concat!("Size of: ", stringify!(StdVideoH265PredictorPaletteEntries))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265PredictorPaletteEntries>(),
+ 2usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH265PredictorPaletteEntries)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PredictorPaletteEntries) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PredictorPaletteEntries),
+ "::",
+ stringify!(PredictorPaletteEntries)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265SpsFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265SpsFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265SpsFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265SpsFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265SpsFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265SpsFlags))
+ );
+}
+impl StdVideoH265SpsFlags {
+ #[inline]
+ pub fn sps_temporal_id_nesting_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_temporal_id_nesting_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn separate_colour_plane_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_separate_colour_plane_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn conformance_window_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_conformance_window_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_sub_layer_ordering_info_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_sub_layer_ordering_info_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn scaling_list_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_scaling_list_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_scaling_list_data_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_scaling_list_data_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn amp_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_amp_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sample_adaptive_offset_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sample_adaptive_offset_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pcm_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pcm_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(8usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pcm_loop_filter_disabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pcm_loop_filter_disabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(9usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn long_term_ref_pics_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_long_term_ref_pics_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(10usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_temporal_mvp_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_temporal_mvp_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(11usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn strong_intra_smoothing_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_strong_intra_smoothing_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(12usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn vui_parameters_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_vui_parameters_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(13usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_extension_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_extension_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(14usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_range_extension_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_range_extension_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(15usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn transform_skip_rotation_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_transform_skip_rotation_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(16usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn transform_skip_context_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_transform_skip_context_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(17usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn implicit_rdpcm_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_implicit_rdpcm_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(18usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn explicit_rdpcm_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_explicit_rdpcm_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(19usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn extended_precision_processing_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_extended_precision_processing_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(20usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn intra_smoothing_disabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_intra_smoothing_disabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(21usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn high_precision_offsets_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_high_precision_offsets_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(22usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn persistent_rice_adaptation_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_persistent_rice_adaptation_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(23usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cabac_bypass_alignment_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cabac_bypass_alignment_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(24usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_scc_extension_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_scc_extension_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(25usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_curr_pic_ref_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_curr_pic_ref_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(26usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn palette_mode_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_palette_mode_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(27usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sps_palette_predictor_initializers_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sps_palette_predictor_initializers_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(28usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn intra_boundary_filtering_disabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(29usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_intra_boundary_filtering_disabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(29usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ sps_temporal_id_nesting_flag: u32,
+ separate_colour_plane_flag: u32,
+ conformance_window_flag: u32,
+ sps_sub_layer_ordering_info_present_flag: u32,
+ scaling_list_enabled_flag: u32,
+ sps_scaling_list_data_present_flag: u32,
+ amp_enabled_flag: u32,
+ sample_adaptive_offset_enabled_flag: u32,
+ pcm_enabled_flag: u32,
+ pcm_loop_filter_disabled_flag: u32,
+ long_term_ref_pics_present_flag: u32,
+ sps_temporal_mvp_enabled_flag: u32,
+ strong_intra_smoothing_enabled_flag: u32,
+ vui_parameters_present_flag: u32,
+ sps_extension_present_flag: u32,
+ sps_range_extension_flag: u32,
+ transform_skip_rotation_enabled_flag: u32,
+ transform_skip_context_enabled_flag: u32,
+ implicit_rdpcm_enabled_flag: u32,
+ explicit_rdpcm_enabled_flag: u32,
+ extended_precision_processing_flag: u32,
+ intra_smoothing_disabled_flag: u32,
+ high_precision_offsets_enabled_flag: u32,
+ persistent_rice_adaptation_enabled_flag: u32,
+ cabac_bypass_alignment_enabled_flag: u32,
+ sps_scc_extension_flag: u32,
+ sps_curr_pic_ref_enabled_flag: u32,
+ palette_mode_enabled_flag: u32,
+ sps_palette_predictor_initializers_present_flag: u32,
+ intra_boundary_filtering_disabled_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 4usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let sps_temporal_id_nesting_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_temporal_id_nesting_flag) };
+ sps_temporal_id_nesting_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let separate_colour_plane_flag: u32 =
+ unsafe { ::std::mem::transmute(separate_colour_plane_flag) };
+ separate_colour_plane_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let conformance_window_flag: u32 =
+ unsafe { ::std::mem::transmute(conformance_window_flag) };
+ conformance_window_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let sps_sub_layer_ordering_info_present_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_sub_layer_ordering_info_present_flag) };
+ sps_sub_layer_ordering_info_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let scaling_list_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(scaling_list_enabled_flag) };
+ scaling_list_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let sps_scaling_list_data_present_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_scaling_list_data_present_flag) };
+ sps_scaling_list_data_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let amp_enabled_flag: u32 = unsafe { ::std::mem::transmute(amp_enabled_flag) };
+ amp_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let sample_adaptive_offset_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(sample_adaptive_offset_enabled_flag) };
+ sample_adaptive_offset_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(8usize, 1u8, {
+ let pcm_enabled_flag: u32 = unsafe { ::std::mem::transmute(pcm_enabled_flag) };
+ pcm_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(9usize, 1u8, {
+ let pcm_loop_filter_disabled_flag: u32 =
+ unsafe { ::std::mem::transmute(pcm_loop_filter_disabled_flag) };
+ pcm_loop_filter_disabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(10usize, 1u8, {
+ let long_term_ref_pics_present_flag: u32 =
+ unsafe { ::std::mem::transmute(long_term_ref_pics_present_flag) };
+ long_term_ref_pics_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(11usize, 1u8, {
+ let sps_temporal_mvp_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_temporal_mvp_enabled_flag) };
+ sps_temporal_mvp_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(12usize, 1u8, {
+ let strong_intra_smoothing_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(strong_intra_smoothing_enabled_flag) };
+ strong_intra_smoothing_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(13usize, 1u8, {
+ let vui_parameters_present_flag: u32 =
+ unsafe { ::std::mem::transmute(vui_parameters_present_flag) };
+ vui_parameters_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(14usize, 1u8, {
+ let sps_extension_present_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_extension_present_flag) };
+ sps_extension_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(15usize, 1u8, {
+ let sps_range_extension_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_range_extension_flag) };
+ sps_range_extension_flag as u64
+ });
+ __bindgen_bitfield_unit.set(16usize, 1u8, {
+ let transform_skip_rotation_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(transform_skip_rotation_enabled_flag) };
+ transform_skip_rotation_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(17usize, 1u8, {
+ let transform_skip_context_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(transform_skip_context_enabled_flag) };
+ transform_skip_context_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(18usize, 1u8, {
+ let implicit_rdpcm_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(implicit_rdpcm_enabled_flag) };
+ implicit_rdpcm_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(19usize, 1u8, {
+ let explicit_rdpcm_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(explicit_rdpcm_enabled_flag) };
+ explicit_rdpcm_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(20usize, 1u8, {
+ let extended_precision_processing_flag: u32 =
+ unsafe { ::std::mem::transmute(extended_precision_processing_flag) };
+ extended_precision_processing_flag as u64
+ });
+ __bindgen_bitfield_unit.set(21usize, 1u8, {
+ let intra_smoothing_disabled_flag: u32 =
+ unsafe { ::std::mem::transmute(intra_smoothing_disabled_flag) };
+ intra_smoothing_disabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(22usize, 1u8, {
+ let high_precision_offsets_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(high_precision_offsets_enabled_flag) };
+ high_precision_offsets_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(23usize, 1u8, {
+ let persistent_rice_adaptation_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(persistent_rice_adaptation_enabled_flag) };
+ persistent_rice_adaptation_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(24usize, 1u8, {
+ let cabac_bypass_alignment_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(cabac_bypass_alignment_enabled_flag) };
+ cabac_bypass_alignment_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(25usize, 1u8, {
+ let sps_scc_extension_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_scc_extension_flag) };
+ sps_scc_extension_flag as u64
+ });
+ __bindgen_bitfield_unit.set(26usize, 1u8, {
+ let sps_curr_pic_ref_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_curr_pic_ref_enabled_flag) };
+ sps_curr_pic_ref_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(27usize, 1u8, {
+ let palette_mode_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(palette_mode_enabled_flag) };
+ palette_mode_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(28usize, 1u8, {
+ let sps_palette_predictor_initializers_present_flag: u32 =
+ unsafe { ::std::mem::transmute(sps_palette_predictor_initializers_present_flag) };
+ sps_palette_predictor_initializers_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(29usize, 1u8, {
+ let intra_boundary_filtering_disabled_flag: u32 =
+ unsafe { ::std::mem::transmute(intra_boundary_filtering_disabled_flag) };
+ intra_boundary_filtering_disabled_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265ShortTermRefPicSetFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265ShortTermRefPicSetFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265ShortTermRefPicSetFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265ShortTermRefPicSetFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265ShortTermRefPicSetFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH265ShortTermRefPicSetFlags)
+ )
+ );
+}
+impl StdVideoH265ShortTermRefPicSetFlags {
+ #[inline]
+ pub fn inter_ref_pic_set_prediction_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_inter_ref_pic_set_prediction_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn delta_rps_sign(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_delta_rps_sign(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ inter_ref_pic_set_prediction_flag: u32,
+ delta_rps_sign: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let inter_ref_pic_set_prediction_flag: u32 =
+ unsafe { ::std::mem::transmute(inter_ref_pic_set_prediction_flag) };
+ inter_ref_pic_set_prediction_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let delta_rps_sign: u32 = unsafe { ::std::mem::transmute(delta_rps_sign) };
+ delta_rps_sign as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265ShortTermRefPicSet {
+ pub flags: StdVideoH265ShortTermRefPicSetFlags,
+ pub delta_idx_minus1: u32,
+ pub use_delta_flag: u16,
+ pub abs_delta_rps_minus1: u16,
+ pub used_by_curr_pic_flag: u16,
+ pub used_by_curr_pic_s0_flag: u16,
+ pub used_by_curr_pic_s1_flag: u16,
+ pub reserved1: u16,
+ pub reserved2: u8,
+ pub reserved3: u8,
+ pub num_negative_pics: u8,
+ pub num_positive_pics: u8,
+ pub delta_poc_s0_minus1: [u16; 16usize],
+ pub delta_poc_s1_minus1: [u16; 16usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265ShortTermRefPicSet() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265ShortTermRefPicSet> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265ShortTermRefPicSet>(),
+ 88usize,
+ concat!("Size of: ", stringify!(StdVideoH265ShortTermRefPicSet))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265ShortTermRefPicSet>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265ShortTermRefPicSet))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_idx_minus1) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(delta_idx_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).use_delta_flag) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(use_delta_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).abs_delta_rps_minus1) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(abs_delta_rps_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).used_by_curr_pic_flag) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(used_by_curr_pic_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).used_by_curr_pic_s0_flag) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(used_by_curr_pic_s0_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).used_by_curr_pic_s1_flag) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(used_by_curr_pic_s1_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize },
+ 21usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(reserved3)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_negative_pics) as usize - ptr as usize },
+ 22usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(num_negative_pics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_positive_pics) as usize - ptr as usize },
+ 23usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(num_positive_pics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_poc_s0_minus1) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(delta_poc_s0_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_poc_s1_minus1) as usize - ptr as usize },
+ 56usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265ShortTermRefPicSet),
+ "::",
+ stringify!(delta_poc_s1_minus1)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265LongTermRefPicsSps {
+ pub used_by_curr_pic_lt_sps_flag: u32,
+ pub lt_ref_pic_poc_lsb_sps: [u32; 32usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265LongTermRefPicsSps() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265LongTermRefPicsSps> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265LongTermRefPicsSps>(),
+ 132usize,
+ concat!("Size of: ", stringify!(StdVideoH265LongTermRefPicsSps))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265LongTermRefPicsSps>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265LongTermRefPicsSps))
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).used_by_curr_pic_lt_sps_flag) as usize - ptr as usize
+ },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265LongTermRefPicsSps),
+ "::",
+ stringify!(used_by_curr_pic_lt_sps_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).lt_ref_pic_poc_lsb_sps) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265LongTermRefPicsSps),
+ "::",
+ stringify!(lt_ref_pic_poc_lsb_sps)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265SequenceParameterSet {
+ pub flags: StdVideoH265SpsFlags,
+ pub chroma_format_idc: StdVideoH265ChromaFormatIdc,
+ pub pic_width_in_luma_samples: u32,
+ pub pic_height_in_luma_samples: u32,
+ pub sps_video_parameter_set_id: u8,
+ pub sps_max_sub_layers_minus1: u8,
+ pub sps_seq_parameter_set_id: u8,
+ pub bit_depth_luma_minus8: u8,
+ pub bit_depth_chroma_minus8: u8,
+ pub log2_max_pic_order_cnt_lsb_minus4: u8,
+ pub log2_min_luma_coding_block_size_minus3: u8,
+ pub log2_diff_max_min_luma_coding_block_size: u8,
+ pub log2_min_luma_transform_block_size_minus2: u8,
+ pub log2_diff_max_min_luma_transform_block_size: u8,
+ pub max_transform_hierarchy_depth_inter: u8,
+ pub max_transform_hierarchy_depth_intra: u8,
+ pub num_short_term_ref_pic_sets: u8,
+ pub num_long_term_ref_pics_sps: u8,
+ pub pcm_sample_bit_depth_luma_minus1: u8,
+ pub pcm_sample_bit_depth_chroma_minus1: u8,
+ pub log2_min_pcm_luma_coding_block_size_minus3: u8,
+ pub log2_diff_max_min_pcm_luma_coding_block_size: u8,
+ pub reserved1: u8,
+ pub reserved2: u8,
+ pub palette_max_size: u8,
+ pub delta_palette_max_predictor_size: u8,
+ pub motion_vector_resolution_control_idc: u8,
+ pub sps_num_palette_predictor_initializers_minus1: u8,
+ pub conf_win_left_offset: u32,
+ pub conf_win_right_offset: u32,
+ pub conf_win_top_offset: u32,
+ pub conf_win_bottom_offset: u32,
+ pub pProfileTierLevel: *const StdVideoH265ProfileTierLevel,
+ pub pDecPicBufMgr: *const StdVideoH265DecPicBufMgr,
+ pub pScalingLists: *const StdVideoH265ScalingLists,
+ pub pShortTermRefPicSet: *const StdVideoH265ShortTermRefPicSet,
+ pub pLongTermRefPicsSps: *const StdVideoH265LongTermRefPicsSps,
+ pub pSequenceParameterSetVui: *const StdVideoH265SequenceParameterSetVui,
+ pub pPredictorPaletteEntries: *const StdVideoH265PredictorPaletteEntries,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265SequenceParameterSet() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265SequenceParameterSet> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265SequenceParameterSet>(),
+ 112usize,
+ concat!("Size of: ", stringify!(StdVideoH265SequenceParameterSet))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265SequenceParameterSet>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoH265SequenceParameterSet)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_format_idc) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(chroma_format_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_width_in_luma_samples) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pic_width_in_luma_samples)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_height_in_luma_samples) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pic_height_in_luma_samples)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(sps_video_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sps_max_sub_layers_minus1) as usize - ptr as usize },
+ 17usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(sps_max_sub_layers_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sps_seq_parameter_set_id) as usize - ptr as usize },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(sps_seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_depth_luma_minus8) as usize - ptr as usize },
+ 19usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(bit_depth_luma_minus8)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).bit_depth_chroma_minus8) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(bit_depth_chroma_minus8)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_max_pic_order_cnt_lsb_minus4) as usize - ptr as usize
+ },
+ 21usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_max_pic_order_cnt_lsb_minus4)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_min_luma_coding_block_size_minus3) as usize
+ - ptr as usize
+ },
+ 22usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_min_luma_coding_block_size_minus3)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_diff_max_min_luma_coding_block_size) as usize
+ - ptr as usize
+ },
+ 23usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_diff_max_min_luma_coding_block_size)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_min_luma_transform_block_size_minus2) as usize
+ - ptr as usize
+ },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_min_luma_transform_block_size_minus2)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_diff_max_min_luma_transform_block_size) as usize
+ - ptr as usize
+ },
+ 25usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_diff_max_min_luma_transform_block_size)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).max_transform_hierarchy_depth_inter) as usize - ptr as usize
+ },
+ 26usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(max_transform_hierarchy_depth_inter)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).max_transform_hierarchy_depth_intra) as usize - ptr as usize
+ },
+ 27usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(max_transform_hierarchy_depth_intra)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_short_term_ref_pic_sets) as usize - ptr as usize },
+ 28usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(num_short_term_ref_pic_sets)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_long_term_ref_pics_sps) as usize - ptr as usize },
+ 29usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(num_long_term_ref_pics_sps)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).pcm_sample_bit_depth_luma_minus1) as usize - ptr as usize
+ },
+ 30usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pcm_sample_bit_depth_luma_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).pcm_sample_bit_depth_chroma_minus1) as usize - ptr as usize
+ },
+ 31usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pcm_sample_bit_depth_chroma_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_min_pcm_luma_coding_block_size_minus3) as usize
+ - ptr as usize
+ },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_min_pcm_luma_coding_block_size_minus3)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_diff_max_min_pcm_luma_coding_block_size) as usize
+ - ptr as usize
+ },
+ 33usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(log2_diff_max_min_pcm_luma_coding_block_size)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 34usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 35usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).palette_max_size) as usize - ptr as usize },
+ 36usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(palette_max_size)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).delta_palette_max_predictor_size) as usize - ptr as usize
+ },
+ 37usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(delta_palette_max_predictor_size)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).motion_vector_resolution_control_idc) as usize
+ - ptr as usize
+ },
+ 38usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(motion_vector_resolution_control_idc)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).sps_num_palette_predictor_initializers_minus1) as usize
+ - ptr as usize
+ },
+ 39usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(sps_num_palette_predictor_initializers_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).conf_win_left_offset) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(conf_win_left_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).conf_win_right_offset) as usize - ptr as usize },
+ 44usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(conf_win_right_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).conf_win_top_offset) as usize - ptr as usize },
+ 48usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(conf_win_top_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).conf_win_bottom_offset) as usize - ptr as usize },
+ 52usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(conf_win_bottom_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pProfileTierLevel) as usize - ptr as usize },
+ 56usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pProfileTierLevel)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pDecPicBufMgr) as usize - ptr as usize },
+ 64usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pDecPicBufMgr)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize },
+ 72usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pScalingLists)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pShortTermRefPicSet) as usize - ptr as usize },
+ 80usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pShortTermRefPicSet)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pLongTermRefPicsSps) as usize - ptr as usize },
+ 88usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pLongTermRefPicsSps)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pSequenceParameterSetVui) as usize - ptr as usize },
+ 96usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pSequenceParameterSetVui)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pPredictorPaletteEntries) as usize - ptr as usize },
+ 104usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265SequenceParameterSet),
+ "::",
+ stringify!(pPredictorPaletteEntries)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265PpsFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 4usize]>,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265PpsFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265PpsFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoH265PpsFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265PpsFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoH265PpsFlags))
+ );
+}
+impl StdVideoH265PpsFlags {
+ #[inline]
+ pub fn dependent_slice_segments_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_dependent_slice_segments_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn output_flag_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_output_flag_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn sign_data_hiding_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_sign_data_hiding_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cabac_init_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cabac_init_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn constrained_intra_pred_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_constrained_intra_pred_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn transform_skip_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_transform_skip_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cu_qp_delta_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cu_qp_delta_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_slice_chroma_qp_offsets_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_slice_chroma_qp_offsets_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn weighted_pred_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_weighted_pred_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(8usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn weighted_bipred_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_weighted_bipred_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(9usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn transquant_bypass_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_transquant_bypass_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(10usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn tiles_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_tiles_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(11usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn entropy_coding_sync_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_entropy_coding_sync_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(12usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn uniform_spacing_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_uniform_spacing_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(13usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn loop_filter_across_tiles_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_loop_filter_across_tiles_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(14usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_loop_filter_across_slices_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_loop_filter_across_slices_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(15usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn deblocking_filter_control_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(16usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_deblocking_filter_control_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(16usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn deblocking_filter_override_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(17usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_deblocking_filter_override_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(17usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_deblocking_filter_disabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(18usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_deblocking_filter_disabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(18usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_scaling_list_data_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(19usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_scaling_list_data_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(19usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn lists_modification_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(20usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_lists_modification_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(20usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn slice_segment_header_extension_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(21usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_slice_segment_header_extension_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(21usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_extension_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(22usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_extension_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(22usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cross_component_prediction_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(23usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cross_component_prediction_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(23usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn chroma_qp_offset_list_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(24usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_chroma_qp_offset_list_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(24usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_curr_pic_ref_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(25usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_curr_pic_ref_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(25usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn residual_adaptive_colour_transform_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(26usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_residual_adaptive_colour_transform_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(26usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_slice_act_qp_offsets_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(27usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_slice_act_qp_offsets_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(27usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_palette_predictor_initializers_present_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(28usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_palette_predictor_initializers_present_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(28usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn monochrome_palette_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(29usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_monochrome_palette_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(29usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pps_range_extension_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(30usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pps_range_extension_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(30usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ dependent_slice_segments_enabled_flag: u32,
+ output_flag_present_flag: u32,
+ sign_data_hiding_enabled_flag: u32,
+ cabac_init_present_flag: u32,
+ constrained_intra_pred_flag: u32,
+ transform_skip_enabled_flag: u32,
+ cu_qp_delta_enabled_flag: u32,
+ pps_slice_chroma_qp_offsets_present_flag: u32,
+ weighted_pred_flag: u32,
+ weighted_bipred_flag: u32,
+ transquant_bypass_enabled_flag: u32,
+ tiles_enabled_flag: u32,
+ entropy_coding_sync_enabled_flag: u32,
+ uniform_spacing_flag: u32,
+ loop_filter_across_tiles_enabled_flag: u32,
+ pps_loop_filter_across_slices_enabled_flag: u32,
+ deblocking_filter_control_present_flag: u32,
+ deblocking_filter_override_enabled_flag: u32,
+ pps_deblocking_filter_disabled_flag: u32,
+ pps_scaling_list_data_present_flag: u32,
+ lists_modification_present_flag: u32,
+ slice_segment_header_extension_present_flag: u32,
+ pps_extension_present_flag: u32,
+ cross_component_prediction_enabled_flag: u32,
+ chroma_qp_offset_list_enabled_flag: u32,
+ pps_curr_pic_ref_enabled_flag: u32,
+ residual_adaptive_colour_transform_enabled_flag: u32,
+ pps_slice_act_qp_offsets_present_flag: u32,
+ pps_palette_predictor_initializers_present_flag: u32,
+ monochrome_palette_flag: u32,
+ pps_range_extension_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 4usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 4usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let dependent_slice_segments_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(dependent_slice_segments_enabled_flag) };
+ dependent_slice_segments_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let output_flag_present_flag: u32 =
+ unsafe { ::std::mem::transmute(output_flag_present_flag) };
+ output_flag_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let sign_data_hiding_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(sign_data_hiding_enabled_flag) };
+ sign_data_hiding_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let cabac_init_present_flag: u32 =
+ unsafe { ::std::mem::transmute(cabac_init_present_flag) };
+ cabac_init_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let constrained_intra_pred_flag: u32 =
+ unsafe { ::std::mem::transmute(constrained_intra_pred_flag) };
+ constrained_intra_pred_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let transform_skip_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(transform_skip_enabled_flag) };
+ transform_skip_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let cu_qp_delta_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(cu_qp_delta_enabled_flag) };
+ cu_qp_delta_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let pps_slice_chroma_qp_offsets_present_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_slice_chroma_qp_offsets_present_flag) };
+ pps_slice_chroma_qp_offsets_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(8usize, 1u8, {
+ let weighted_pred_flag: u32 = unsafe { ::std::mem::transmute(weighted_pred_flag) };
+ weighted_pred_flag as u64
+ });
+ __bindgen_bitfield_unit.set(9usize, 1u8, {
+ let weighted_bipred_flag: u32 = unsafe { ::std::mem::transmute(weighted_bipred_flag) };
+ weighted_bipred_flag as u64
+ });
+ __bindgen_bitfield_unit.set(10usize, 1u8, {
+ let transquant_bypass_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(transquant_bypass_enabled_flag) };
+ transquant_bypass_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(11usize, 1u8, {
+ let tiles_enabled_flag: u32 = unsafe { ::std::mem::transmute(tiles_enabled_flag) };
+ tiles_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(12usize, 1u8, {
+ let entropy_coding_sync_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(entropy_coding_sync_enabled_flag) };
+ entropy_coding_sync_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(13usize, 1u8, {
+ let uniform_spacing_flag: u32 = unsafe { ::std::mem::transmute(uniform_spacing_flag) };
+ uniform_spacing_flag as u64
+ });
+ __bindgen_bitfield_unit.set(14usize, 1u8, {
+ let loop_filter_across_tiles_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(loop_filter_across_tiles_enabled_flag) };
+ loop_filter_across_tiles_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(15usize, 1u8, {
+ let pps_loop_filter_across_slices_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_loop_filter_across_slices_enabled_flag) };
+ pps_loop_filter_across_slices_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(16usize, 1u8, {
+ let deblocking_filter_control_present_flag: u32 =
+ unsafe { ::std::mem::transmute(deblocking_filter_control_present_flag) };
+ deblocking_filter_control_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(17usize, 1u8, {
+ let deblocking_filter_override_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(deblocking_filter_override_enabled_flag) };
+ deblocking_filter_override_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(18usize, 1u8, {
+ let pps_deblocking_filter_disabled_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_deblocking_filter_disabled_flag) };
+ pps_deblocking_filter_disabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(19usize, 1u8, {
+ let pps_scaling_list_data_present_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_scaling_list_data_present_flag) };
+ pps_scaling_list_data_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(20usize, 1u8, {
+ let lists_modification_present_flag: u32 =
+ unsafe { ::std::mem::transmute(lists_modification_present_flag) };
+ lists_modification_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(21usize, 1u8, {
+ let slice_segment_header_extension_present_flag: u32 =
+ unsafe { ::std::mem::transmute(slice_segment_header_extension_present_flag) };
+ slice_segment_header_extension_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(22usize, 1u8, {
+ let pps_extension_present_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_extension_present_flag) };
+ pps_extension_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(23usize, 1u8, {
+ let cross_component_prediction_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(cross_component_prediction_enabled_flag) };
+ cross_component_prediction_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(24usize, 1u8, {
+ let chroma_qp_offset_list_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(chroma_qp_offset_list_enabled_flag) };
+ chroma_qp_offset_list_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(25usize, 1u8, {
+ let pps_curr_pic_ref_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_curr_pic_ref_enabled_flag) };
+ pps_curr_pic_ref_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(26usize, 1u8, {
+ let residual_adaptive_colour_transform_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(residual_adaptive_colour_transform_enabled_flag) };
+ residual_adaptive_colour_transform_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(27usize, 1u8, {
+ let pps_slice_act_qp_offsets_present_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_slice_act_qp_offsets_present_flag) };
+ pps_slice_act_qp_offsets_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(28usize, 1u8, {
+ let pps_palette_predictor_initializers_present_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_palette_predictor_initializers_present_flag) };
+ pps_palette_predictor_initializers_present_flag as u64
+ });
+ __bindgen_bitfield_unit.set(29usize, 1u8, {
+ let monochrome_palette_flag: u32 =
+ unsafe { ::std::mem::transmute(monochrome_palette_flag) };
+ monochrome_palette_flag as u64
+ });
+ __bindgen_bitfield_unit.set(30usize, 1u8, {
+ let pps_range_extension_flag: u32 =
+ unsafe { ::std::mem::transmute(pps_range_extension_flag) };
+ pps_range_extension_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoH265PictureParameterSet {
+ pub flags: StdVideoH265PpsFlags,
+ pub pps_pic_parameter_set_id: u8,
+ pub pps_seq_parameter_set_id: u8,
+ pub sps_video_parameter_set_id: u8,
+ pub num_extra_slice_header_bits: u8,
+ pub num_ref_idx_l0_default_active_minus1: u8,
+ pub num_ref_idx_l1_default_active_minus1: u8,
+ pub init_qp_minus26: i8,
+ pub diff_cu_qp_delta_depth: u8,
+ pub pps_cb_qp_offset: i8,
+ pub pps_cr_qp_offset: i8,
+ pub pps_beta_offset_div2: i8,
+ pub pps_tc_offset_div2: i8,
+ pub log2_parallel_merge_level_minus2: u8,
+ pub log2_max_transform_skip_block_size_minus2: u8,
+ pub diff_cu_chroma_qp_offset_depth: u8,
+ pub chroma_qp_offset_list_len_minus1: u8,
+ pub cb_qp_offset_list: [i8; 6usize],
+ pub cr_qp_offset_list: [i8; 6usize],
+ pub log2_sao_offset_scale_luma: u8,
+ pub log2_sao_offset_scale_chroma: u8,
+ pub pps_act_y_qp_offset_plus5: i8,
+ pub pps_act_cb_qp_offset_plus5: i8,
+ pub pps_act_cr_qp_offset_plus3: i8,
+ pub pps_num_palette_predictor_initializers: u8,
+ pub luma_bit_depth_entry_minus8: u8,
+ pub chroma_bit_depth_entry_minus8: u8,
+ pub num_tile_columns_minus1: u8,
+ pub num_tile_rows_minus1: u8,
+ pub reserved1: u8,
+ pub reserved2: u8,
+ pub column_width_minus1: [u16; 19usize],
+ pub row_height_minus1: [u16; 21usize],
+ pub reserved3: u32,
+ pub pScalingLists: *const StdVideoH265ScalingLists,
+ pub pPredictorPaletteEntries: *const StdVideoH265PredictorPaletteEntries,
+}
+#[test]
+fn bindgen_test_layout_StdVideoH265PictureParameterSet() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoH265PictureParameterSet> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoH265PictureParameterSet>(),
+ 144usize,
+ concat!("Size of: ", stringify!(StdVideoH265PictureParameterSet))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoH265PictureParameterSet>(),
+ 8usize,
+ concat!("Alignment of ", stringify!(StdVideoH265PictureParameterSet))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_pic_parameter_set_id) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_pic_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_seq_parameter_set_id) as usize - ptr as usize },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(sps_video_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_extra_slice_header_bits) as usize - ptr as usize },
+ 7usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(num_extra_slice_header_bits)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l0_default_active_minus1) as usize
+ - ptr as usize
+ },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(num_ref_idx_l0_default_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l1_default_active_minus1) as usize
+ - ptr as usize
+ },
+ 9usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(num_ref_idx_l1_default_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).init_qp_minus26) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(init_qp_minus26)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).diff_cu_qp_delta_depth) as usize - ptr as usize },
+ 11usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(diff_cu_qp_delta_depth)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_cb_qp_offset) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_cb_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_cr_qp_offset) as usize - ptr as usize },
+ 13usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_cr_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_beta_offset_div2) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_beta_offset_div2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_tc_offset_div2) as usize - ptr as usize },
+ 15usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_tc_offset_div2)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_parallel_merge_level_minus2) as usize - ptr as usize
+ },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(log2_parallel_merge_level_minus2)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_max_transform_skip_block_size_minus2) as usize
+ - ptr as usize
+ },
+ 17usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(log2_max_transform_skip_block_size_minus2)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).diff_cu_chroma_qp_offset_depth) as usize - ptr as usize
+ },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(diff_cu_chroma_qp_offset_depth)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).chroma_qp_offset_list_len_minus1) as usize - ptr as usize
+ },
+ 19usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(chroma_qp_offset_list_len_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cb_qp_offset_list) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(cb_qp_offset_list)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cr_qp_offset_list) as usize - ptr as usize },
+ 26usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(cr_qp_offset_list)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).log2_sao_offset_scale_luma) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(log2_sao_offset_scale_luma)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).log2_sao_offset_scale_chroma) as usize - ptr as usize
+ },
+ 33usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(log2_sao_offset_scale_chroma)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_act_y_qp_offset_plus5) as usize - ptr as usize },
+ 34usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_act_y_qp_offset_plus5)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_act_cb_qp_offset_plus5) as usize - ptr as usize },
+ 35usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_act_cb_qp_offset_plus5)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_act_cr_qp_offset_plus3) as usize - ptr as usize },
+ 36usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_act_cr_qp_offset_plus3)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).pps_num_palette_predictor_initializers) as usize
+ - ptr as usize
+ },
+ 37usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pps_num_palette_predictor_initializers)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_bit_depth_entry_minus8) as usize - ptr as usize },
+ 38usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(luma_bit_depth_entry_minus8)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).chroma_bit_depth_entry_minus8) as usize - ptr as usize
+ },
+ 39usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(chroma_bit_depth_entry_minus8)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_tile_columns_minus1) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(num_tile_columns_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_tile_rows_minus1) as usize - ptr as usize },
+ 41usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(num_tile_rows_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved1) as usize - ptr as usize },
+ 42usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(reserved1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved2) as usize - ptr as usize },
+ 43usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(reserved2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).column_width_minus1) as usize - ptr as usize },
+ 44usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(column_width_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).row_height_minus1) as usize - ptr as usize },
+ 82usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(row_height_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved3) as usize - ptr as usize },
+ 124usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(reserved3)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pScalingLists) as usize - ptr as usize },
+ 128usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pScalingLists)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pPredictorPaletteEntries) as usize - ptr as usize },
+ 136usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoH265PictureParameterSet),
+ "::",
+ stringify!(pPredictorPaletteEntries)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH265PictureInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH265PictureInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH265PictureInfoFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoDecodeH265PictureInfoFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH265PictureInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoDecodeH265PictureInfoFlags)
+ )
+ );
+}
+impl StdVideoDecodeH265PictureInfoFlags {
+ #[inline]
+ pub fn IrapPicFlag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_IrapPicFlag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn IdrPicFlag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_IdrPicFlag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn IsReference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_IsReference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn short_term_ref_pic_set_sps_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_short_term_ref_pic_set_sps_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ IrapPicFlag: u32,
+ IdrPicFlag: u32,
+ IsReference: u32,
+ short_term_ref_pic_set_sps_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let IrapPicFlag: u32 = unsafe { ::std::mem::transmute(IrapPicFlag) };
+ IrapPicFlag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let IdrPicFlag: u32 = unsafe { ::std::mem::transmute(IdrPicFlag) };
+ IdrPicFlag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let IsReference: u32 = unsafe { ::std::mem::transmute(IsReference) };
+ IsReference as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let short_term_ref_pic_set_sps_flag: u32 =
+ unsafe { ::std::mem::transmute(short_term_ref_pic_set_sps_flag) };
+ short_term_ref_pic_set_sps_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH265PictureInfo {
+ pub flags: StdVideoDecodeH265PictureInfoFlags,
+ pub sps_video_parameter_set_id: u8,
+ pub pps_seq_parameter_set_id: u8,
+ pub pps_pic_parameter_set_id: u8,
+ pub NumDeltaPocsOfRefRpsIdx: u8,
+ pub PicOrderCntVal: i32,
+ pub NumBitsForSTRefPicSetInSlice: u16,
+ pub reserved: u16,
+ pub RefPicSetStCurrBefore: [u8; 8usize],
+ pub RefPicSetStCurrAfter: [u8; 8usize],
+ pub RefPicSetLtCurr: [u8; 8usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH265PictureInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoDecodeH265PictureInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH265PictureInfo>(),
+ 40usize,
+ concat!("Size of: ", stringify!(StdVideoDecodeH265PictureInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH265PictureInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoDecodeH265PictureInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(sps_video_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_seq_parameter_set_id) as usize - ptr as usize },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(pps_seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_pic_parameter_set_id) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(pps_pic_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).NumDeltaPocsOfRefRpsIdx) as usize - ptr as usize },
+ 7usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(NumDeltaPocsOfRefRpsIdx)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(PicOrderCntVal)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).NumBitsForSTRefPicSetInSlice) as usize - ptr as usize
+ },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(NumBitsForSTRefPicSetInSlice)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).reserved) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(reserved)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).RefPicSetStCurrBefore) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(RefPicSetStCurrBefore)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).RefPicSetStCurrAfter) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(RefPicSetStCurrAfter)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).RefPicSetLtCurr) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265PictureInfo),
+ "::",
+ stringify!(RefPicSetLtCurr)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH265ReferenceInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH265ReferenceInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH265ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoDecodeH265ReferenceInfoFlags)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH265ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoDecodeH265ReferenceInfoFlags)
+ )
+ );
+}
+impl StdVideoDecodeH265ReferenceInfoFlags {
+ #[inline]
+ pub fn used_for_long_term_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_used_for_long_term_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn unused_for_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_unused_for_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ used_for_long_term_reference: u32,
+ unused_for_reference: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let used_for_long_term_reference: u32 =
+ unsafe { ::std::mem::transmute(used_for_long_term_reference) };
+ used_for_long_term_reference as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let unused_for_reference: u32 = unsafe { ::std::mem::transmute(unused_for_reference) };
+ unused_for_reference as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoDecodeH265ReferenceInfo {
+ pub flags: StdVideoDecodeH265ReferenceInfoFlags,
+ pub PicOrderCntVal: i32,
+}
+#[test]
+fn bindgen_test_layout_StdVideoDecodeH265ReferenceInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoDecodeH265ReferenceInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoDecodeH265ReferenceInfo>(),
+ 8usize,
+ concat!("Size of: ", stringify!(StdVideoDecodeH265ReferenceInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoDecodeH265ReferenceInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoDecodeH265ReferenceInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265ReferenceInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoDecodeH265ReferenceInfo),
+ "::",
+ stringify!(PicOrderCntVal)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264WeightTableFlags {
+ pub luma_weight_l0_flag: u32,
+ pub chroma_weight_l0_flag: u32,
+ pub luma_weight_l1_flag: u32,
+ pub chroma_weight_l1_flag: u32,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264WeightTableFlags() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264WeightTableFlags> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264WeightTableFlags>(),
+ 16usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264WeightTableFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264WeightTableFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264WeightTableFlags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_weight_l0_flag) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTableFlags),
+ "::",
+ stringify!(luma_weight_l0_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_weight_l0_flag) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTableFlags),
+ "::",
+ stringify!(chroma_weight_l0_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_weight_l1_flag) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTableFlags),
+ "::",
+ stringify!(luma_weight_l1_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_weight_l1_flag) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTableFlags),
+ "::",
+ stringify!(chroma_weight_l1_flag)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264WeightTable {
+ pub flags: StdVideoEncodeH264WeightTableFlags,
+ pub luma_log2_weight_denom: u8,
+ pub chroma_log2_weight_denom: u8,
+ pub luma_weight_l0: [i8; 32usize],
+ pub luma_offset_l0: [i8; 32usize],
+ pub chroma_weight_l0: [[i8; 2usize]; 32usize],
+ pub chroma_offset_l0: [[i8; 2usize]; 32usize],
+ pub luma_weight_l1: [i8; 32usize],
+ pub luma_offset_l1: [i8; 32usize],
+ pub chroma_weight_l1: [[i8; 2usize]; 32usize],
+ pub chroma_offset_l1: [[i8; 2usize]; 32usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264WeightTable() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264WeightTable> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264WeightTable>(),
+ 404usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264WeightTable))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264WeightTable>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH264WeightTable))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_log2_weight_denom) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(luma_log2_weight_denom)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_log2_weight_denom) as usize - ptr as usize },
+ 17usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(chroma_log2_weight_denom)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_weight_l0) as usize - ptr as usize },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(luma_weight_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_offset_l0) as usize - ptr as usize },
+ 50usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(luma_offset_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_weight_l0) as usize - ptr as usize },
+ 82usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(chroma_weight_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_offset_l0) as usize - ptr as usize },
+ 146usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(chroma_offset_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_weight_l1) as usize - ptr as usize },
+ 210usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(luma_weight_l1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_offset_l1) as usize - ptr as usize },
+ 242usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(luma_offset_l1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_weight_l1) as usize - ptr as usize },
+ 274usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(chroma_weight_l1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_offset_l1) as usize - ptr as usize },
+ 338usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264WeightTable),
+ "::",
+ stringify!(chroma_offset_l1)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264SliceHeaderFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264SliceHeaderFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264SliceHeaderFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264SliceHeaderFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264SliceHeaderFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264SliceHeaderFlags)
+ )
+ );
+}
+impl StdVideoEncodeH264SliceHeaderFlags {
+ #[inline]
+ pub fn direct_spatial_mv_pred_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_direct_spatial_mv_pred_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn num_ref_idx_active_override_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_num_ref_idx_active_override_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn no_output_of_prior_pics_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_no_output_of_prior_pics_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn adaptive_ref_pic_marking_mode_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_adaptive_ref_pic_marking_mode_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn no_prior_references_available_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_no_prior_references_available_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ direct_spatial_mv_pred_flag: u32,
+ num_ref_idx_active_override_flag: u32,
+ no_output_of_prior_pics_flag: u32,
+ adaptive_ref_pic_marking_mode_flag: u32,
+ no_prior_references_available_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let direct_spatial_mv_pred_flag: u32 =
+ unsafe { ::std::mem::transmute(direct_spatial_mv_pred_flag) };
+ direct_spatial_mv_pred_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let num_ref_idx_active_override_flag: u32 =
+ unsafe { ::std::mem::transmute(num_ref_idx_active_override_flag) };
+ num_ref_idx_active_override_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let no_output_of_prior_pics_flag: u32 =
+ unsafe { ::std::mem::transmute(no_output_of_prior_pics_flag) };
+ no_output_of_prior_pics_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let adaptive_ref_pic_marking_mode_flag: u32 =
+ unsafe { ::std::mem::transmute(adaptive_ref_pic_marking_mode_flag) };
+ adaptive_ref_pic_marking_mode_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let no_prior_references_available_flag: u32 =
+ unsafe { ::std::mem::transmute(no_prior_references_available_flag) };
+ no_prior_references_available_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264PictureInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264PictureInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264PictureInfoFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264PictureInfoFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264PictureInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264PictureInfoFlags)
+ )
+ );
+}
+impl StdVideoEncodeH264PictureInfoFlags {
+ #[inline]
+ pub fn idr_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_idr_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn is_reference_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_is_reference_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn used_for_long_term_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_used_for_long_term_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ idr_flag: u32,
+ is_reference_flag: u32,
+ used_for_long_term_reference: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let idr_flag: u32 = unsafe { ::std::mem::transmute(idr_flag) };
+ idr_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let is_reference_flag: u32 = unsafe { ::std::mem::transmute(is_reference_flag) };
+ is_reference_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let used_for_long_term_reference: u32 =
+ unsafe { ::std::mem::transmute(used_for_long_term_reference) };
+ used_for_long_term_reference as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264ReferenceInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264ReferenceInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH264ReferenceInfoFlags)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264ReferenceInfoFlags)
+ )
+ );
+}
+impl StdVideoEncodeH264ReferenceInfoFlags {
+ #[inline]
+ pub fn used_for_long_term_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_used_for_long_term_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ used_for_long_term_reference: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let used_for_long_term_reference: u32 =
+ unsafe { ::std::mem::transmute(used_for_long_term_reference) };
+ used_for_long_term_reference as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264RefMgmtFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264RefMgmtFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264RefMgmtFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264RefMgmtFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264RefMgmtFlags>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH264RefMgmtFlags))
+ );
+}
+impl StdVideoEncodeH264RefMgmtFlags {
+ #[inline]
+ pub fn ref_pic_list_modification_l0_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_ref_pic_list_modification_l0_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn ref_pic_list_modification_l1_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_ref_pic_list_modification_l1_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ ref_pic_list_modification_l0_flag: u32,
+ ref_pic_list_modification_l1_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let ref_pic_list_modification_l0_flag: u32 =
+ unsafe { ::std::mem::transmute(ref_pic_list_modification_l0_flag) };
+ ref_pic_list_modification_l0_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let ref_pic_list_modification_l1_flag: u32 =
+ unsafe { ::std::mem::transmute(ref_pic_list_modification_l1_flag) };
+ ref_pic_list_modification_l1_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264RefListModEntry {
+ pub modification_of_pic_nums_idc: StdVideoH264ModificationOfPicNumsIdc,
+ pub abs_diff_pic_num_minus1: u16,
+ pub long_term_pic_num: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264RefListModEntry() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264RefListModEntry> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264RefListModEntry>(),
+ 8usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264RefListModEntry))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264RefListModEntry>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264RefListModEntry)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).modification_of_pic_nums_idc) as usize - ptr as usize
+ },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefListModEntry),
+ "::",
+ stringify!(modification_of_pic_nums_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).abs_diff_pic_num_minus1) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefListModEntry),
+ "::",
+ stringify!(abs_diff_pic_num_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).long_term_pic_num) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefListModEntry),
+ "::",
+ stringify!(long_term_pic_num)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264RefPicMarkingEntry {
+ pub operation: StdVideoH264MemMgmtControlOp,
+ pub difference_of_pic_nums_minus1: u16,
+ pub long_term_pic_num: u16,
+ pub long_term_frame_idx: u16,
+ pub max_long_term_frame_idx_plus1: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264RefPicMarkingEntry() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264RefPicMarkingEntry> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264RefPicMarkingEntry>(),
+ 12usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264RefPicMarkingEntry>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).operation) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry),
+ "::",
+ stringify!(operation)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).difference_of_pic_nums_minus1) as usize - ptr as usize
+ },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry),
+ "::",
+ stringify!(difference_of_pic_nums_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).long_term_pic_num) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry),
+ "::",
+ stringify!(long_term_pic_num)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).long_term_frame_idx) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry),
+ "::",
+ stringify!(long_term_frame_idx)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).max_long_term_frame_idx_plus1) as usize - ptr as usize
+ },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefPicMarkingEntry),
+ "::",
+ stringify!(max_long_term_frame_idx_plus1)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264RefMemMgmtCtrlOperations {
+ pub flags: StdVideoEncodeH264RefMgmtFlags,
+ pub refList0ModOpCount: u8,
+ pub pRefList0ModOperations: *const StdVideoEncodeH264RefListModEntry,
+ pub refList1ModOpCount: u8,
+ pub pRefList1ModOperations: *const StdVideoEncodeH264RefListModEntry,
+ pub refPicMarkingOpCount: u8,
+ pub pRefPicMarkingOperations: *const StdVideoEncodeH264RefPicMarkingEntry,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264RefMemMgmtCtrlOperations() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264RefMemMgmtCtrlOperations> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264RefMemMgmtCtrlOperations>(),
+ 48usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264RefMemMgmtCtrlOperations>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).refList0ModOpCount) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(refList0ModOpCount)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pRefList0ModOperations) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(pRefList0ModOperations)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).refList1ModOpCount) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(refList1ModOpCount)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pRefList1ModOperations) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(pRefList1ModOperations)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).refPicMarkingOpCount) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(refPicMarkingOpCount)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pRefPicMarkingOperations) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264RefMemMgmtCtrlOperations),
+ "::",
+ stringify!(pRefPicMarkingOperations)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264PictureInfo {
+ pub flags: StdVideoEncodeH264PictureInfoFlags,
+ pub seq_parameter_set_id: u8,
+ pub pic_parameter_set_id: u8,
+ pub pictureType: StdVideoH264PictureType,
+ pub frame_num: u32,
+ pub PicOrderCnt: i32,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264PictureInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264PictureInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264PictureInfo>(),
+ 20usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264PictureInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264PictureInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH264PictureInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264PictureInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).seq_parameter_set_id) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264PictureInfo),
+ "::",
+ stringify!(seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pic_parameter_set_id) as usize - ptr as usize },
+ 5usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264PictureInfo),
+ "::",
+ stringify!(pic_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pictureType) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264PictureInfo),
+ "::",
+ stringify!(pictureType)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).frame_num) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264PictureInfo),
+ "::",
+ stringify!(frame_num)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264PictureInfo),
+ "::",
+ stringify!(PicOrderCnt)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264ReferenceInfo {
+ pub flags: StdVideoEncodeH264ReferenceInfoFlags,
+ pub FrameNum: u32,
+ pub PicOrderCnt: i32,
+ pub long_term_pic_num: u16,
+ pub long_term_frame_idx: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264ReferenceInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264ReferenceInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264ReferenceInfo>(),
+ 16usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264ReferenceInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264ReferenceInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH264ReferenceInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264ReferenceInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).FrameNum) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264ReferenceInfo),
+ "::",
+ stringify!(FrameNum)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCnt) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264ReferenceInfo),
+ "::",
+ stringify!(PicOrderCnt)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).long_term_pic_num) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264ReferenceInfo),
+ "::",
+ stringify!(long_term_pic_num)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).long_term_frame_idx) as usize - ptr as usize },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264ReferenceInfo),
+ "::",
+ stringify!(long_term_frame_idx)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH264SliceHeader {
+ pub flags: StdVideoEncodeH264SliceHeaderFlags,
+ pub first_mb_in_slice: u32,
+ pub slice_type: StdVideoH264SliceType,
+ pub idr_pic_id: u16,
+ pub num_ref_idx_l0_active_minus1: u8,
+ pub num_ref_idx_l1_active_minus1: u8,
+ pub cabac_init_idc: StdVideoH264CabacInitIdc,
+ pub disable_deblocking_filter_idc: StdVideoH264DisableDeblockingFilterIdc,
+ pub slice_alpha_c0_offset_div2: i8,
+ pub slice_beta_offset_div2: i8,
+ pub pWeightTable: *const StdVideoEncodeH264WeightTable,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH264SliceHeader() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH264SliceHeader> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH264SliceHeader>(),
+ 40usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH264SliceHeader))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH264SliceHeader>(),
+ 8usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH264SliceHeader))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).first_mb_in_slice) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(first_mb_in_slice)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_type) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(slice_type)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).idr_pic_id) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(idr_pic_id)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l0_active_minus1) as usize - ptr as usize
+ },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(num_ref_idx_l0_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l1_active_minus1) as usize - ptr as usize
+ },
+ 15usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(num_ref_idx_l1_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).cabac_init_idc) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(cabac_init_idc)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).disable_deblocking_filter_idc) as usize - ptr as usize
+ },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(disable_deblocking_filter_idc)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_alpha_c0_offset_div2) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(slice_alpha_c0_offset_div2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_beta_offset_div2) as usize - ptr as usize },
+ 25usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(slice_beta_offset_div2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pWeightTable) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH264SliceHeader),
+ "::",
+ stringify!(pWeightTable)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265WeightTableFlags {
+ pub luma_weight_l0_flag: u16,
+ pub chroma_weight_l0_flag: u16,
+ pub luma_weight_l1_flag: u16,
+ pub chroma_weight_l1_flag: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265WeightTableFlags() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265WeightTableFlags> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265WeightTableFlags>(),
+ 8usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH265WeightTableFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265WeightTableFlags>(),
+ 2usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265WeightTableFlags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_weight_l0_flag) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTableFlags),
+ "::",
+ stringify!(luma_weight_l0_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_weight_l0_flag) as usize - ptr as usize },
+ 2usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTableFlags),
+ "::",
+ stringify!(chroma_weight_l0_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_weight_l1_flag) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTableFlags),
+ "::",
+ stringify!(luma_weight_l1_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).chroma_weight_l1_flag) as usize - ptr as usize },
+ 6usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTableFlags),
+ "::",
+ stringify!(chroma_weight_l1_flag)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265WeightTable {
+ pub flags: StdVideoEncodeH265WeightTableFlags,
+ pub luma_log2_weight_denom: u8,
+ pub delta_chroma_log2_weight_denom: i8,
+ pub delta_luma_weight_l0: [i8; 15usize],
+ pub luma_offset_l0: [i8; 15usize],
+ pub delta_chroma_weight_l0: [[i8; 2usize]; 15usize],
+ pub delta_chroma_offset_l0: [[i8; 2usize]; 15usize],
+ pub delta_luma_weight_l1: [i8; 15usize],
+ pub luma_offset_l1: [i8; 15usize],
+ pub delta_chroma_weight_l1: [[i8; 2usize]; 15usize],
+ pub delta_chroma_offset_l1: [[i8; 2usize]; 15usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265WeightTable() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265WeightTable> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265WeightTable>(),
+ 190usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH265WeightTable))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265WeightTable>(),
+ 2usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH265WeightTable))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_log2_weight_denom) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(luma_log2_weight_denom)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).delta_chroma_log2_weight_denom) as usize - ptr as usize
+ },
+ 9usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_chroma_log2_weight_denom)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_luma_weight_l0) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_luma_weight_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_offset_l0) as usize - ptr as usize },
+ 25usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(luma_offset_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_chroma_weight_l0) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_chroma_weight_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_chroma_offset_l0) as usize - ptr as usize },
+ 70usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_chroma_offset_l0)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_luma_weight_l1) as usize - ptr as usize },
+ 100usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_luma_weight_l1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).luma_offset_l1) as usize - ptr as usize },
+ 115usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(luma_offset_l1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_chroma_weight_l1) as usize - ptr as usize },
+ 130usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_chroma_weight_l1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_chroma_offset_l1) as usize - ptr as usize },
+ 160usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265WeightTable),
+ "::",
+ stringify!(delta_chroma_offset_l1)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265SliceSegmentHeaderFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 2usize]>,
+ pub __bindgen_padding_0: u16,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265SliceSegmentHeaderFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265SliceSegmentHeaderFlags>(),
+ 4usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeaderFlags)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265SliceSegmentHeaderFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeaderFlags)
+ )
+ );
+}
+impl StdVideoEncodeH265SliceSegmentHeaderFlags {
+ #[inline]
+ pub fn first_slice_segment_in_pic_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_first_slice_segment_in_pic_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn no_output_of_prior_pics_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_no_output_of_prior_pics_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn dependent_slice_segment_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_dependent_slice_segment_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn pic_output_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_pic_output_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn short_term_ref_pic_set_sps_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_short_term_ref_pic_set_sps_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn slice_temporal_mvp_enable_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(5usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_slice_temporal_mvp_enable_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(5usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn slice_sao_luma_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(6usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_slice_sao_luma_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(6usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn slice_sao_chroma_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(7usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_slice_sao_chroma_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(7usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn num_ref_idx_active_override_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(8usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_num_ref_idx_active_override_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(8usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn mvd_l1_zero_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(9usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_mvd_l1_zero_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(9usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cabac_init_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(10usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cabac_init_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(10usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cu_chroma_qp_offset_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(11usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cu_chroma_qp_offset_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(11usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn deblocking_filter_override_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(12usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_deblocking_filter_override_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(12usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn slice_deblocking_filter_disabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(13usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_slice_deblocking_filter_disabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(13usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn collocated_from_l0_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(14usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_collocated_from_l0_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(14usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn slice_loop_filter_across_slices_enabled_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(15usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_slice_loop_filter_across_slices_enabled_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(15usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ first_slice_segment_in_pic_flag: u32,
+ no_output_of_prior_pics_flag: u32,
+ dependent_slice_segment_flag: u32,
+ pic_output_flag: u32,
+ short_term_ref_pic_set_sps_flag: u32,
+ slice_temporal_mvp_enable_flag: u32,
+ slice_sao_luma_flag: u32,
+ slice_sao_chroma_flag: u32,
+ num_ref_idx_active_override_flag: u32,
+ mvd_l1_zero_flag: u32,
+ cabac_init_flag: u32,
+ cu_chroma_qp_offset_enabled_flag: u32,
+ deblocking_filter_override_flag: u32,
+ slice_deblocking_filter_disabled_flag: u32,
+ collocated_from_l0_flag: u32,
+ slice_loop_filter_across_slices_enabled_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 2usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 2usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let first_slice_segment_in_pic_flag: u32 =
+ unsafe { ::std::mem::transmute(first_slice_segment_in_pic_flag) };
+ first_slice_segment_in_pic_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let no_output_of_prior_pics_flag: u32 =
+ unsafe { ::std::mem::transmute(no_output_of_prior_pics_flag) };
+ no_output_of_prior_pics_flag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let dependent_slice_segment_flag: u32 =
+ unsafe { ::std::mem::transmute(dependent_slice_segment_flag) };
+ dependent_slice_segment_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let pic_output_flag: u32 = unsafe { ::std::mem::transmute(pic_output_flag) };
+ pic_output_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let short_term_ref_pic_set_sps_flag: u32 =
+ unsafe { ::std::mem::transmute(short_term_ref_pic_set_sps_flag) };
+ short_term_ref_pic_set_sps_flag as u64
+ });
+ __bindgen_bitfield_unit.set(5usize, 1u8, {
+ let slice_temporal_mvp_enable_flag: u32 =
+ unsafe { ::std::mem::transmute(slice_temporal_mvp_enable_flag) };
+ slice_temporal_mvp_enable_flag as u64
+ });
+ __bindgen_bitfield_unit.set(6usize, 1u8, {
+ let slice_sao_luma_flag: u32 = unsafe { ::std::mem::transmute(slice_sao_luma_flag) };
+ slice_sao_luma_flag as u64
+ });
+ __bindgen_bitfield_unit.set(7usize, 1u8, {
+ let slice_sao_chroma_flag: u32 =
+ unsafe { ::std::mem::transmute(slice_sao_chroma_flag) };
+ slice_sao_chroma_flag as u64
+ });
+ __bindgen_bitfield_unit.set(8usize, 1u8, {
+ let num_ref_idx_active_override_flag: u32 =
+ unsafe { ::std::mem::transmute(num_ref_idx_active_override_flag) };
+ num_ref_idx_active_override_flag as u64
+ });
+ __bindgen_bitfield_unit.set(9usize, 1u8, {
+ let mvd_l1_zero_flag: u32 = unsafe { ::std::mem::transmute(mvd_l1_zero_flag) };
+ mvd_l1_zero_flag as u64
+ });
+ __bindgen_bitfield_unit.set(10usize, 1u8, {
+ let cabac_init_flag: u32 = unsafe { ::std::mem::transmute(cabac_init_flag) };
+ cabac_init_flag as u64
+ });
+ __bindgen_bitfield_unit.set(11usize, 1u8, {
+ let cu_chroma_qp_offset_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(cu_chroma_qp_offset_enabled_flag) };
+ cu_chroma_qp_offset_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(12usize, 1u8, {
+ let deblocking_filter_override_flag: u32 =
+ unsafe { ::std::mem::transmute(deblocking_filter_override_flag) };
+ deblocking_filter_override_flag as u64
+ });
+ __bindgen_bitfield_unit.set(13usize, 1u8, {
+ let slice_deblocking_filter_disabled_flag: u32 =
+ unsafe { ::std::mem::transmute(slice_deblocking_filter_disabled_flag) };
+ slice_deblocking_filter_disabled_flag as u64
+ });
+ __bindgen_bitfield_unit.set(14usize, 1u8, {
+ let collocated_from_l0_flag: u32 =
+ unsafe { ::std::mem::transmute(collocated_from_l0_flag) };
+ collocated_from_l0_flag as u64
+ });
+ __bindgen_bitfield_unit.set(15usize, 1u8, {
+ let slice_loop_filter_across_slices_enabled_flag: u32 =
+ unsafe { ::std::mem::transmute(slice_loop_filter_across_slices_enabled_flag) };
+ slice_loop_filter_across_slices_enabled_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265SliceSegmentLongTermRefPics {
+ pub num_long_term_sps: u8,
+ pub num_long_term_pics: u8,
+ pub lt_idx_sps: [u8; 32usize],
+ pub poc_lsb_lt: [u8; 16usize],
+ pub used_by_curr_pic_lt_flag: u16,
+ pub delta_poc_msb_present_flag: [u8; 48usize],
+ pub delta_poc_msb_cycle_lt: [u8; 48usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265SliceSegmentLongTermRefPics() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265SliceSegmentLongTermRefPics> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265SliceSegmentLongTermRefPics>(),
+ 148usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265SliceSegmentLongTermRefPics>(),
+ 2usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_long_term_sps) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(num_long_term_sps)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).num_long_term_pics) as usize - ptr as usize },
+ 1usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(num_long_term_pics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).lt_idx_sps) as usize - ptr as usize },
+ 2usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(lt_idx_sps)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).poc_lsb_lt) as usize - ptr as usize },
+ 34usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(poc_lsb_lt)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).used_by_curr_pic_lt_flag) as usize - ptr as usize },
+ 50usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(used_by_curr_pic_lt_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_poc_msb_present_flag) as usize - ptr as usize },
+ 52usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(delta_poc_msb_present_flag)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).delta_poc_msb_cycle_lt) as usize - ptr as usize },
+ 100usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentLongTermRefPics),
+ "::",
+ stringify!(delta_poc_msb_cycle_lt)
+ )
+ );
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265SliceSegmentHeader {
+ pub flags: StdVideoEncodeH265SliceSegmentHeaderFlags,
+ pub slice_type: StdVideoH265SliceType,
+ pub slice_segment_address: u32,
+ pub short_term_ref_pic_set_idx: u8,
+ pub collocated_ref_idx: u8,
+ pub num_ref_idx_l0_active_minus1: u8,
+ pub num_ref_idx_l1_active_minus1: u8,
+ pub MaxNumMergeCand: u8,
+ pub slice_cb_qp_offset: i8,
+ pub slice_cr_qp_offset: i8,
+ pub slice_beta_offset_div2: i8,
+ pub slice_tc_offset_div2: i8,
+ pub slice_act_y_qp_offset: i8,
+ pub slice_act_cb_qp_offset: i8,
+ pub slice_act_cr_qp_offset: i8,
+ pub pShortTermRefPicSet: *const StdVideoH265ShortTermRefPicSet,
+ pub pLongTermRefPics: *const StdVideoEncodeH265SliceSegmentLongTermRefPics,
+ pub pWeightTable: *const StdVideoEncodeH265WeightTable,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265SliceSegmentHeader() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265SliceSegmentHeader> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265SliceSegmentHeader>(),
+ 48usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265SliceSegmentHeader>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_type) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_type)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_segment_address) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_segment_address)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).short_term_ref_pic_set_idx) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(short_term_ref_pic_set_idx)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).collocated_ref_idx) as usize - ptr as usize },
+ 13usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(collocated_ref_idx)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l0_active_minus1) as usize - ptr as usize
+ },
+ 14usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(num_ref_idx_l0_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).num_ref_idx_l1_active_minus1) as usize - ptr as usize
+ },
+ 15usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(num_ref_idx_l1_active_minus1)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).MaxNumMergeCand) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(MaxNumMergeCand)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_cb_qp_offset) as usize - ptr as usize },
+ 17usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_cb_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_cr_qp_offset) as usize - ptr as usize },
+ 18usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_cr_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_beta_offset_div2) as usize - ptr as usize },
+ 19usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_beta_offset_div2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_tc_offset_div2) as usize - ptr as usize },
+ 20usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_tc_offset_div2)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_act_y_qp_offset) as usize - ptr as usize },
+ 21usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_act_y_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_act_cb_qp_offset) as usize - ptr as usize },
+ 22usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_act_cb_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).slice_act_cr_qp_offset) as usize - ptr as usize },
+ 23usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(slice_act_cr_qp_offset)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pShortTermRefPicSet) as usize - ptr as usize },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(pShortTermRefPicSet)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pLongTermRefPics) as usize - ptr as usize },
+ 32usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(pLongTermRefPics)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pWeightTable) as usize - ptr as usize },
+ 40usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265SliceSegmentHeader),
+ "::",
+ stringify!(pWeightTable)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265ReferenceModificationFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265ReferenceModificationFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265ReferenceModificationFlags>(),
+ 4usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH265ReferenceModificationFlags)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265ReferenceModificationFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265ReferenceModificationFlags)
+ )
+ );
+}
+impl StdVideoEncodeH265ReferenceModificationFlags {
+ #[inline]
+ pub fn ref_pic_list_modification_flag_l0(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_ref_pic_list_modification_flag_l0(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn ref_pic_list_modification_flag_l1(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_ref_pic_list_modification_flag_l1(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ ref_pic_list_modification_flag_l0: u32,
+ ref_pic_list_modification_flag_l1: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let ref_pic_list_modification_flag_l0: u32 =
+ unsafe { ::std::mem::transmute(ref_pic_list_modification_flag_l0) };
+ ref_pic_list_modification_flag_l0 as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let ref_pic_list_modification_flag_l1: u32 =
+ unsafe { ::std::mem::transmute(ref_pic_list_modification_flag_l1) };
+ ref_pic_list_modification_flag_l1 as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265ReferenceModifications {
+ pub flags: StdVideoEncodeH265ReferenceModificationFlags,
+ pub referenceList0ModificationsCount: u8,
+ pub pReferenceList0Modifications: *const u8,
+ pub referenceList1ModificationsCount: u8,
+ pub pReferenceList1Modifications: *const u8,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265ReferenceModifications() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265ReferenceModifications> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265ReferenceModifications>(),
+ 32usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH265ReferenceModifications)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265ReferenceModifications>(),
+ 8usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265ReferenceModifications)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceModifications),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).referenceList0ModificationsCount) as usize - ptr as usize
+ },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceModifications),
+ "::",
+ stringify!(referenceList0ModificationsCount)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).pReferenceList0Modifications) as usize - ptr as usize
+ },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceModifications),
+ "::",
+ stringify!(pReferenceList0Modifications)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).referenceList1ModificationsCount) as usize - ptr as usize
+ },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceModifications),
+ "::",
+ stringify!(referenceList1ModificationsCount)
+ )
+ );
+ assert_eq!(
+ unsafe {
+ ::std::ptr::addr_of!((*ptr).pReferenceList1Modifications) as usize - ptr as usize
+ },
+ 24usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceModifications),
+ "::",
+ stringify!(pReferenceList1Modifications)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265PictureInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265PictureInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265PictureInfoFlags>(),
+ 4usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH265PictureInfoFlags))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265PictureInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265PictureInfoFlags)
+ )
+ );
+}
+impl StdVideoEncodeH265PictureInfoFlags {
+ #[inline]
+ pub fn is_reference_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_is_reference_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn IrapPicFlag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_IrapPicFlag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn long_term_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(2usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_long_term_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(2usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn discardable_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(3usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_discardable_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(3usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn cross_layer_bla_flag(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(4usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_cross_layer_bla_flag(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(4usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ is_reference_flag: u32,
+ IrapPicFlag: u32,
+ long_term_flag: u32,
+ discardable_flag: u32,
+ cross_layer_bla_flag: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let is_reference_flag: u32 = unsafe { ::std::mem::transmute(is_reference_flag) };
+ is_reference_flag as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let IrapPicFlag: u32 = unsafe { ::std::mem::transmute(IrapPicFlag) };
+ IrapPicFlag as u64
+ });
+ __bindgen_bitfield_unit.set(2usize, 1u8, {
+ let long_term_flag: u32 = unsafe { ::std::mem::transmute(long_term_flag) };
+ long_term_flag as u64
+ });
+ __bindgen_bitfield_unit.set(3usize, 1u8, {
+ let discardable_flag: u32 = unsafe { ::std::mem::transmute(discardable_flag) };
+ discardable_flag as u64
+ });
+ __bindgen_bitfield_unit.set(4usize, 1u8, {
+ let cross_layer_bla_flag: u32 = unsafe { ::std::mem::transmute(cross_layer_bla_flag) };
+ cross_layer_bla_flag as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265PictureInfo {
+ pub flags: StdVideoEncodeH265PictureInfoFlags,
+ pub PictureType: StdVideoH265PictureType,
+ pub sps_video_parameter_set_id: u8,
+ pub pps_seq_parameter_set_id: u8,
+ pub pps_pic_parameter_set_id: u8,
+ pub PicOrderCntVal: i32,
+ pub TemporalId: u8,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265PictureInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265PictureInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265PictureInfo>(),
+ 20usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH265PictureInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265PictureInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH265PictureInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PictureType) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(PictureType)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).sps_video_parameter_set_id) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(sps_video_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_seq_parameter_set_id) as usize - ptr as usize },
+ 9usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(pps_seq_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).pps_pic_parameter_set_id) as usize - ptr as usize },
+ 10usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(pps_pic_parameter_set_id)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize },
+ 12usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(PicOrderCntVal)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).TemporalId) as usize - ptr as usize },
+ 16usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265PictureInfo),
+ "::",
+ stringify!(TemporalId)
+ )
+ );
+}
+#[repr(C)]
+#[repr(align(4))]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265ReferenceInfoFlags {
+ pub _bitfield_align_1: [u8; 0],
+ pub _bitfield_1: __BindgenBitfieldUnit<[u8; 1usize]>,
+ pub __bindgen_padding_0: [u8; 3usize],
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265ReferenceInfoFlags() {
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Size of: ",
+ stringify!(StdVideoEncodeH265ReferenceInfoFlags)
+ )
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265ReferenceInfoFlags>(),
+ 4usize,
+ concat!(
+ "Alignment of ",
+ stringify!(StdVideoEncodeH265ReferenceInfoFlags)
+ )
+ );
+}
+impl StdVideoEncodeH265ReferenceInfoFlags {
+ #[inline]
+ pub fn used_for_long_term_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(0usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_used_for_long_term_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(0usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn unused_for_reference(&self) -> u32 {
+ unsafe { ::std::mem::transmute(self._bitfield_1.get(1usize, 1u8) as u32) }
+ }
+ #[inline]
+ pub fn set_unused_for_reference(&mut self, val: u32) {
+ unsafe {
+ let val: u32 = ::std::mem::transmute(val);
+ self._bitfield_1.set(1usize, 1u8, val as u64)
+ }
+ }
+ #[inline]
+ pub fn new_bitfield_1(
+ used_for_long_term_reference: u32,
+ unused_for_reference: u32,
+ ) -> __BindgenBitfieldUnit<[u8; 1usize]> {
+ let mut __bindgen_bitfield_unit: __BindgenBitfieldUnit<[u8; 1usize]> = Default::default();
+ __bindgen_bitfield_unit.set(0usize, 1u8, {
+ let used_for_long_term_reference: u32 =
+ unsafe { ::std::mem::transmute(used_for_long_term_reference) };
+ used_for_long_term_reference as u64
+ });
+ __bindgen_bitfield_unit.set(1usize, 1u8, {
+ let unused_for_reference: u32 = unsafe { ::std::mem::transmute(unused_for_reference) };
+ unused_for_reference as u64
+ });
+ __bindgen_bitfield_unit
+ }
+}
+#[repr(C)]
+#[derive(Debug, Copy, Clone)]
+pub struct StdVideoEncodeH265ReferenceInfo {
+ pub flags: StdVideoEncodeH265ReferenceInfoFlags,
+ pub PicOrderCntVal: i32,
+ pub TemporalId: u8,
+}
+#[test]
+fn bindgen_test_layout_StdVideoEncodeH265ReferenceInfo() {
+ const UNINIT: ::std::mem::MaybeUninit<StdVideoEncodeH265ReferenceInfo> =
+ ::std::mem::MaybeUninit::uninit();
+ let ptr = UNINIT.as_ptr();
+ assert_eq!(
+ ::std::mem::size_of::<StdVideoEncodeH265ReferenceInfo>(),
+ 12usize,
+ concat!("Size of: ", stringify!(StdVideoEncodeH265ReferenceInfo))
+ );
+ assert_eq!(
+ ::std::mem::align_of::<StdVideoEncodeH265ReferenceInfo>(),
+ 4usize,
+ concat!("Alignment of ", stringify!(StdVideoEncodeH265ReferenceInfo))
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).flags) as usize - ptr as usize },
+ 0usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceInfo),
+ "::",
+ stringify!(flags)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).PicOrderCntVal) as usize - ptr as usize },
+ 4usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceInfo),
+ "::",
+ stringify!(PicOrderCntVal)
+ )
+ );
+ assert_eq!(
+ unsafe { ::std::ptr::addr_of!((*ptr).TemporalId) as usize - ptr as usize },
+ 8usize,
+ concat!(
+ "Offset of field: ",
+ stringify!(StdVideoEncodeH265ReferenceInfo),
+ "::",
+ stringify!(TemporalId)
+ )
+ );
+}
diff --git a/third_party/rust/ash/src/vk/platform_types.rs b/third_party/rust/ash/src/vk/platform_types.rs
new file mode 100644
index 0000000000..4d6ffe0597
--- /dev/null
+++ b/third_party/rust/ash/src/vk/platform_types.rs
@@ -0,0 +1,41 @@
+#![allow(non_camel_case_types)]
+
+use std::os::raw::*;
+pub type RROutput = c_ulong;
+pub type VisualID = c_uint;
+pub type Display = *const c_void;
+pub type Window = c_ulong;
+pub type xcb_connection_t = c_void;
+pub type xcb_window_t = u32;
+pub type xcb_visualid_t = u32;
+pub type MirConnection = *const c_void;
+pub type MirSurface = *const c_void;
+pub type HINSTANCE = *const c_void;
+pub type HWND = *const c_void;
+pub type wl_display = c_void;
+pub type wl_surface = c_void;
+pub type HANDLE = *mut c_void;
+pub type HMONITOR = HANDLE;
+pub type DWORD = c_ulong;
+pub type LPCWSTR = *const u16;
+pub type zx_handle_t = u32;
+pub type _screen_context = c_void;
+pub type _screen_window = c_void;
+pub type SECURITY_ATTRIBUTES = c_void;
+// Opaque types
+pub type ANativeWindow = c_void;
+pub type AHardwareBuffer = c_void;
+pub type CAMetalLayer = c_void;
+// This definition is behind an NDA with a best effort guess from
+// https://github.com/google/gapid/commit/22aafebec4638c6aaa77667096bca30f6e842d95#diff-ab3ab4a7d89b4fc8a344ff4e9332865f268ea1669ee379c1b516a954ecc2e7a6R20-R21
+pub type GgpStreamDescriptor = u32;
+pub type GgpFrameToken = u64;
+pub type IDirectFB = c_void;
+pub type IDirectFBSurface = c_void;
+pub type __IOSurface = c_void;
+pub type IOSurfaceRef = *mut __IOSurface;
+pub type MTLBuffer_id = *mut c_void;
+pub type MTLCommandQueue_id = *mut c_void;
+pub type MTLDevice_id = *mut c_void;
+pub type MTLSharedEvent_id = *mut c_void;
+pub type MTLTexture_id = *mut c_void;
diff --git a/third_party/rust/ash/src/vk/prelude.rs b/third_party/rust/ash/src/vk/prelude.rs
new file mode 100644
index 0000000000..cb2f17718c
--- /dev/null
+++ b/third_party/rust/ash/src/vk/prelude.rs
@@ -0,0 +1,61 @@
+use crate::vk;
+
+/// Holds 24 bits in the least significant bits of memory,
+/// and 8 bytes in the most significant bits of that memory,
+/// occupying a single [`u32`] in total. This is commonly used in
+/// [acceleration structure instances] such as
+/// [`vk::AccelerationStructureInstanceKHR`],
+/// [`vk::AccelerationStructureSRTMotionInstanceNV`] and
+/// [`vk::AccelerationStructureMatrixMotionInstanceNV`].
+///
+/// [acceleration structure instances]: https://www.khronos.org/registry/vulkan/specs/1.3-extensions/man/html/VkAccelerationStructureInstanceKHR.html#_description
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
+#[repr(transparent)]
+pub struct Packed24_8(u32);
+
+impl Packed24_8 {
+ pub fn new(low_24: u32, high_8: u8) -> Self {
+ Self((low_24 & 0x00ff_ffff) | (u32::from(high_8) << 24))
+ }
+
+ /// Extracts the least-significant 24 bits (3 bytes) of this integer
+ pub fn low_24(&self) -> u32 {
+ self.0 & 0xffffff
+ }
+
+ /// Extracts the most significant 8 bits (single byte) of this integer
+ pub fn high_8(&self) -> u8 {
+ (self.0 >> 24) as u8
+ }
+}
+
+impl vk::ColorComponentFlags {
+ /// Contraction of [`R`][Self::R] | [`G`][Self::G] | [`B`][Self::B] | [`A`][Self::A]
+ pub const RGBA: Self = Self(Self::R.0 | Self::G.0 | Self::B.0 | Self::A.0);
+}
+
+impl From<vk::Extent2D> for vk::Extent3D {
+ fn from(value: vk::Extent2D) -> Self {
+ Self {
+ width: value.width,
+ height: value.height,
+ depth: 1,
+ }
+ }
+}
+
+impl From<vk::Extent2D> for vk::Rect2D {
+ fn from(extent: vk::Extent2D) -> Self {
+ Self {
+ offset: Default::default(),
+ extent,
+ }
+ }
+}
+
+/// Structures implementing this trait are layout-compatible with [`vk::BaseInStructure`] and
+/// [`vk::BaseOutStructure`]. Such structures have an `s_type` field indicating its type, which
+/// must always match the value of [`TaggedStructure::STRUCTURE_TYPE`].
+pub unsafe trait TaggedStructure {
+ const STRUCTURE_TYPE: vk::StructureType;
+}