#![warn(clippy::all)]
extern crate libc;
#[cfg(any(
feature = "VK_KHR_win32_surface", feature = "VK_KHR_external_memory_win32",
feature = "VK_KHR_external_semaphore_win32", feature = "VK_KHR_external_fence_win32",
feature = "VK_NV_external_memory_win32"
))]
extern crate winapi;
#[cfg(any(feature = "VK_KHR_xlib_surface", feature = "VK_EXT_acquire_xlib_display"))]
extern crate x11;
#[cfg(feature = "VK_KHR_xcb_surface")]
extern crate xcb;
#[cfg(feature = "VK_KHR_wayland_surface")]
extern crate wayland_client;
#[cfg(feature = "VK_KHR_android_surface")]
extern crate android;
#[cfg(feature = "DynamicLoaded")]
extern crate libloading;
#[macro_use]
pub mod vk;
use vk::*;
#[cfg(feature = "Implements")]
mod vkresolve;
#[cfg(feature = "Implements")]
pub use vkresolve::{Resolver, ResolverInterface};
use std::error::Error;
#[cfg(feature = "Implements")] mod fnconv;
pub type Result<T> = std::result::Result<T, VkResultBox>;
pub trait VkResultHandler
{
fn into_result(self) -> Result<()>;
}
impl VkResultHandler for VkResult
{
fn into_result(self) -> Result<()> { if self == VK_SUCCESS { Ok(()) } else { Err(VkResultBox(self)) } }
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct VkResultBox(pub VkResult);
impl Error for VkResultBox
{
fn description(&self) -> &str
{
match self.0
{
VK_SUCCESS => "Command successfully completed", VK_NOT_READY => "A fence or query has not yet completed",
VK_TIMEOUT => "A wait operation has not completed in the specified time", VK_EVENT_SET => "An event is signaled",
VK_EVENT_RESET => "An event is unsignaled", VK_INCOMPLETE => "A return array was too small for the result",
#[cfg(feature = "VK_KHR_swapchain")]
VK_SUBOPTIMAL_KHR => "Sub-optimal swapchain",
VK_ERROR_OUT_OF_HOST_MEMORY => "A host memory allocation has failed",
VK_ERROR_OUT_OF_DEVICE_MEMORY => "A device memory allocation has failed",
VK_ERROR_INITIALIZATION_FAILED => "Initialization of an object could not be completed for implementation-specific reasons",
VK_ERROR_DEVICE_LOST => "The logical or physical device has been lost",
VK_ERROR_MEMORY_MAP_FAILED => "Mapping of a memory object has failed",
VK_ERROR_LAYER_NOT_PRESENT => "A requested layer is not presented or could not be loaded",
VK_ERROR_EXTENSION_NOT_PRESENT => "A requested extension is not supported",
VK_ERROR_FEATURE_NOT_PRESENT => "A requested feature is not supported",
VK_ERROR_INCOMPATIBLE_DRIVER => "The requested version of Vulkan is not supported by the driver or is otherwise incompatible for implementation-specific reasons",
VK_ERROR_TOO_MANY_OBJECTS => "Too many objects of the type have already been created",
VK_ERROR_FORMAT_NOT_SUPPORTED => "A requested format is not supported on this device",
VK_ERROR_FRAGMENTED_POOL => "A pool allocation has failed due to fragmentation of the pool's memory",
#[cfg(feature = "VK_KHR_surface")]
VK_ERROR_SURFACE_LOST_KHR => "Surface lost",
#[cfg(feature = "VK_KHR_surface")]
VK_ERROR_NATIVE_WINDOW_IN_USE_KHR => "Native window is in use",
#[cfg(feature = "VK_KHR_swapchain")]
VK_ERROR_OUT_OF_DATE_KHR => "Out of date",
#[cfg(feature = "VK_KHR_display_swapchain")]
VK_ERROR_INCOMPATIBLE_DISPLAY_KHR => "The display used by a swapchain does not use the same presentable image layout",
#[cfg(feature = "VK_EXT_debug_report")]
VK_ERROR_VALIDATION_FAILED_EXT => "Validation failed",
#[cfg(feature = "VK_NV_glsl_shader")]
VK_ERROR_INVALID_SHADER_NV => "Invalid GLSL shader",
#[cfg(feature = "VK_KHR_maintenance1")]
VK_ERROR_OUT_OF_POOL_MEMORY_KHR => "A pool memory allocation has failed",
#[cfg(feature = "VK_KHR_external_memory_capabilities")]
VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR => "An external handle is not a valid handle of ths specified type",
_ => "Unknown or extension-specific error"
}
}
}
impl std::fmt::Debug for VkResultBox
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result
{
write!(fmt, "[{:?}] {}", self.0, self.description())
}
}
impl std::fmt::Display for VkResultBox
{
fn fmt(&self, fmt: &mut std::fmt::Formatter) -> std::fmt::Result { std::fmt::Debug::fmt(self, fmt) }
}
pub trait VkHandle
{
type Handle;
fn native_ptr(&self) -> Self::Handle;
}
pub trait DeviceChild
{
fn device(&self) -> &Device;
}
impl<'h, H: VkHandle + ?Sized + 'h> VkHandle for Option<&'h H>
{
type Handle = <H as VkHandle>::Handle;
fn native_ptr(&self) -> Self::Handle { self.map_or(unsafe { std::mem::zeroed() }, |x| x.native_ptr()) }
}
#[cfg(feature = "Implements")]
macro_rules! DeviceChildCommonDrop
{
{ for $($t: ty [$d: ident]),* } =>
{
$(
impl Drop for $t { fn drop(&mut self) { unsafe { Resolver::get().$d(self.1.native_ptr(), self.0, ::std::ptr::null()) }; } }
)*
}
}
pub trait AnalogNumRange<T>
{
fn begin(&self) -> T; fn end(&self) -> T;
fn count(&self) -> T where T: ::std::ops::Sub<T, Output = T> + Copy
{
self.end() - self.begin()
}
}
impl<T> AnalogNumRange<T> for T where T: std::ops::Add<u32, Output = T> + Copy
{
fn begin(&self) -> T { *self } fn end(&self) -> T { *self + 1 }
}
impl<T> AnalogNumRange<T> for std::ops::Range<T> where T: Copy
{
fn begin(&self) -> T { self.start } fn end(&self) -> T { self.end }
}
pub mod traits
{
pub use super::{VkResultBox, VkHandle, DeviceChild, ClearColorValue, ImageSize, AnalogNumRange};
#[cfg(feature = "Implements")]
pub use super::{MemoryBound, Status, Waitable};
}
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq, Copy, Hash, PartialOrd, Ord)]
pub struct Extent1D(pub u32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Extent2D(pub u32, pub u32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Extent3D(pub u32, pub u32, pub u32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Extent4D(pub u32, pub u32, pub u32, pub u32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq, Copy, Hash, PartialOrd, Ord)]
pub struct Offset1D(pub i32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Offset2D(pub i32, pub i32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Offset3D(pub i32, pub i32, pub i32);
#[repr(C)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Offset4D(pub i32, pub i32, pub i32, pub i32);
impl From<Extent2D> for VkExtent2D { fn from(v: Extent2D) -> Self { VkExtent2D { width: v.0, height: v.1 } } }
impl From<Extent3D> for VkExtent3D { fn from(v: Extent3D) -> Self { VkExtent3D { width: v.0, height: v.1, depth: v.2 } } }
impl From<Offset2D> for VkOffset2D { fn from(v: Offset2D) -> Self { VkOffset2D { x: v.0, y: v.1 } } }
impl From<Offset3D> for VkOffset3D { fn from(v: Offset3D) -> Self { VkOffset3D { x: v.0, y: v.1, z: v.2 } } }
impl From<VkExtent2D> for Extent2D { fn from(v: VkExtent2D) -> Self { Extent2D(v.width, v.height) } }
impl From<VkExtent3D> for Extent3D { fn from(v: VkExtent3D) -> Self { Extent3D(v.width, v.height, v.depth) } }
impl From<VkOffset2D> for Offset2D { fn from(v: VkOffset2D) -> Self { Offset2D(v.x, v.y) } }
impl From<VkOffset3D> for Offset3D { fn from(v: VkOffset3D) -> Self { Offset3D(v.x, v.y, v.z) } }
impl From<Extent1D> for Extent2D { fn from(v: Extent1D) -> Self { Extent2D(v.0, 1) } }
impl From<Extent1D> for Extent3D { fn from(v: Extent1D) -> Self { Extent3D(v.0, 1, 1) } }
impl From<Extent2D> for Extent3D { fn from(v: Extent2D) -> Self { Extent3D(v.0, v.1, 1) } }
impl From<Offset1D> for Offset2D { fn from(v: Offset1D) -> Self { Offset2D(v.0, 0) } }
impl From<Offset1D> for Offset3D { fn from(v: Offset1D) -> Self { Offset3D(v.0, 0, 0) } }
impl From<Offset2D> for Offset3D { fn from(v: Offset2D) -> Self { Offset3D(v.0, v.1, 0) } }
impl AsRef<u32> for Extent1D { fn as_ref(&self) -> &u32 { &self.0 } }
impl AsRef<i32> for Offset1D { fn as_ref(&self) -> &i32 { &self.0 } }
macro_rules! CoordinateAsRefUnsafe
{
($a: ty, $b: ty) =>
{
impl AsRef<$b> for $a
{
fn as_ref(&self) -> &$b { unsafe { &*(self as *const $a as *const $b) } }
}
}
}
CoordinateAsRefUnsafe!(Extent2D, VkExtent2D);
CoordinateAsRefUnsafe!(Extent3D, VkExtent3D);
CoordinateAsRefUnsafe!(Offset2D, VkOffset2D);
CoordinateAsRefUnsafe!(Offset3D, VkOffset3D);
CoordinateAsRefUnsafe!(Extent1D, [u32; 1]);
CoordinateAsRefUnsafe!(Extent2D, [u32; 2]);
CoordinateAsRefUnsafe!(Extent3D, [u32; 3]);
CoordinateAsRefUnsafe!(Extent4D, [u32; 4]);
CoordinateAsRefUnsafe!(Offset1D, [i32; 1]);
CoordinateAsRefUnsafe!(Offset2D, [i32; 2]);
CoordinateAsRefUnsafe!(Offset3D, [i32; 3]);
CoordinateAsRefUnsafe!(Offset4D, [i32; 4]);
impl AsRef<Extent3D> for Extent4D { fn as_ref(&self) -> &Extent3D { unsafe { &*(self.as_ref() as *const [u32; 4] as *const Extent3D) } } }
impl AsRef<Extent2D> for Extent4D { fn as_ref(&self) -> &Extent2D { unsafe { &*(self.as_ref() as *const [u32; 4] as *const Extent2D) } } }
impl AsRef<Extent1D> for Extent4D { fn as_ref(&self) -> &Extent1D { unsafe { &*(self.as_ref() as *const [u32; 4] as *const Extent1D) } } }
impl AsRef<Extent2D> for Extent3D { fn as_ref(&self) -> &Extent2D { unsafe { &*(self.as_ref() as *const [u32; 3] as *const Extent2D) } } }
impl AsRef<Extent1D> for Extent3D { fn as_ref(&self) -> &Extent1D { unsafe { &*(self.as_ref() as *const [u32; 3] as *const Extent1D) } } }
impl AsRef<Extent1D> for Extent2D { fn as_ref(&self) -> &Extent1D { unsafe { &*(self.as_ref() as *const [u32; 2] as *const Extent1D) } } }
impl AsRef<Offset3D> for Offset4D { fn as_ref(&self) -> &Offset3D { unsafe { &*(self.as_ref() as *const [i32; 4] as *const Offset3D) } } }
impl AsRef<Offset2D> for Offset4D { fn as_ref(&self) -> &Offset2D { unsafe { &*(self.as_ref() as *const [i32; 4] as *const Offset2D) } } }
impl AsRef<Offset1D> for Offset4D { fn as_ref(&self) -> &Offset1D { unsafe { &*(self.as_ref() as *const [i32; 4] as *const Offset1D) } } }
impl AsRef<Offset2D> for Offset3D { fn as_ref(&self) -> &Offset2D { unsafe { &*(self.as_ref() as *const [i32; 3] as *const Offset2D) } } }
impl AsRef<Offset1D> for Offset3D { fn as_ref(&self) -> &Offset1D { unsafe { &*(self.as_ref() as *const [i32; 3] as *const Offset1D) } } }
impl AsRef<Offset1D> for Offset2D { fn as_ref(&self) -> &Offset1D { unsafe { &*(self.as_ref() as *const [i32; 2] as *const Offset1D) } } }
impl AsRef<Extent4D> for Extent4D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Extent3D> for Extent3D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Extent2D> for Extent2D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Extent1D> for Extent1D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Offset4D> for Offset4D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Offset3D> for Offset3D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Offset2D> for Offset2D { fn as_ref(&self) -> &Self { self } }
impl AsRef<Offset1D> for Offset1D { fn as_ref(&self) -> &Self { self } }
impl From<Extent2D> for VkRect2D
{
fn from(e: Extent2D) -> Self
{
VkRect2D { offset: VkOffset2D { x: 0, y: 0 }, extent: VkExtent2D { width: e.0, height: e.1 } }
}
}
impl From<VkViewport> for VkRect2D
{
fn from(vp: VkViewport) -> Self
{
VkRect2D
{
offset: VkOffset2D { x: vp.x as _, y: vp.y as _ },
extent: VkExtent2D { width: vp.width as _, height: vp.height as _ }
}
}
}
#[repr(transparent)]
#[derive(Clone, Debug, PartialEq)]
pub struct Viewport(VkViewport);
impl From<VkViewport> for Viewport
{
fn from(v: VkViewport) -> Self { Viewport(v) }
}
impl Viewport
{
pub fn into_inner(self) -> VkViewport { self.0 }
pub fn from_rect_with_depth_range(rect: &VkRect2D, depth_range: std::ops::Range<f32>) -> Self
{
VkViewport
{
x: rect.offset.x as _, y: rect.offset.y as _,
width: rect.extent.width as _, height: rect.extent.height as _,
minDepth: depth_range.start, maxDepth: depth_range.end
}.into()
}
pub fn set_offset(&mut self, offset: &VkOffset2D) -> &mut Self
{
self.0.x = offset.x as _;
self.0.y = offset.y as _;
self
}
pub fn set_extent(&mut self, extent: &VkExtent2D) -> &mut Self
{
self.0.width = extent.width as _;
self.0.height = extent.height as _;
self
}
pub fn set_depth_range(&mut self, range: std::ops::Range<f32>) -> &mut Self
{
self.0.minDepth = range.start;
self.0.maxDepth = range.end;
self
}
}
mod base; pub use base::*;
mod device; pub use device::*;
mod sync; pub use sync::*;
pub mod resources; pub use resources::*;
#[macro_use]
mod descriptor; pub use descriptor::*;
mod framebuffer; pub use framebuffer::*;
mod shading; pub use shading::*;
mod command; pub use command::*;
#[cfg(feature = "Presentation")] mod surface;
#[cfg(feature = "Presentation")] pub use surface::*;
#[cfg(feature = "VK_EXT_debug_report")] mod debug;
#[cfg(feature = "VK_EXT_debug_report")] pub use debug::*;
#[cfg(feature = "Implements")]
pub(self) unsafe fn preserve<T>(n: usize) -> Vec<T> { let mut v = Vec::with_capacity(n); v.set_len(n); v }
pub struct QueryPool(VkQueryPool, Device);
impl VkHandle for QueryPool { type Handle = VkQueryPool; fn native_ptr(&self) -> VkQueryPool { self.0 } }
#[cfg(feature = "Implements")]
impl QueryPool
{
pub fn new(device: &Device, qtype: QueryType, count: u32) -> Result<Self>
{
let (qtype, stats) = match qtype
{
QueryType::Occlusion => (VK_QUERY_TYPE_OCCLUSION, 0),
QueryType::PipelineStatistics(f) => (VK_QUERY_TYPE_PIPELINE_STATISTICS, f.0),
QueryType::Timestamp => (VK_QUERY_TYPE_TIMESTAMP, 0)
};
let cinfo = VkQueryPoolCreateInfo { queryType: qtype, queryCount: count, pipelineStatistics: stats, .. Default::default() };
let mut h = VK_NULL_HANDLE as _;
unsafe { Resolver::get().create_query_pool(device.native_ptr(), &cinfo, std::ptr::null(), &mut h) }
.into_result().map(|_| QueryPool(h, device.clone()))
}
pub fn results64(&self, query_range: std::ops::Range<u32>, flags: QueryResultFlags) -> Result<Vec<u64>>
{
let mut v = Vec::with_capacity(query_range.len()); unsafe { v.set_len(query_range.len()) };
unsafe { Resolver::get().get_query_pool_results(self.1.native_ptr(), self.0, query_range.start, query_range.len() as _,
8 * query_range.len(), v.as_mut_ptr() as *mut _, 8, flags.0 | VK_QUERY_RESULT_64_BIT) }
.into_result().map(|_| v)
}
pub fn results32(&self, query_range: std::ops::Range<u32>, flags: QueryResultFlags) -> Result<Vec<u32>>
{
let mut v = Vec::with_capacity(query_range.len()); unsafe { v.set_len(query_range.len()) };
unsafe { Resolver::get().get_query_pool_results(self.1.native_ptr(), self.0, query_range.start, query_range.len() as _,
4 * query_range.len(), v.as_mut_ptr() as *mut _, 4, flags.0) }.into_result().map(|_| v)
}
}
#[cfg(feature = "Implements")] DeviceChildCommonDrop!{ for QueryPool[destroy_query_pool] }
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum QueryType
{
Occlusion,
PipelineStatistics(QueryPipelineStatisticFlags),
Timestamp
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct QueryPipelineStatisticFlags(pub VkQueryPipelineStatisticFlags);
impl QueryPipelineStatisticFlags
{
pub const INPUT_ASSEMBLY_VERTICES: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT);
pub const INPUT_ASSEMBLY_PRIMITIVES: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT);
pub const VERTEX_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT);
pub const GEOMETRY_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT);
pub const GEOMETRY_SHADER_PRIMITIVES: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT);
pub const CLIPPING_INVOCATIONS: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT);
pub const CLIPPING_PRIMITIVES: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT);
pub const FRAGMENT_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT);
pub const TESSELLATION_CONTROL_SHADER_PATCHES: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT);
pub const TESSELLATION_EVALUATION_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT);
pub const COMPUTE_SHADER_INVOCATIONS: Self = QueryPipelineStatisticFlags(VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT);
pub fn input_assembly_vertices(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::INPUT_ASSEMBLY_VERTICES.0) }
pub fn input_assembly_primitives(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::INPUT_ASSEMBLY_PRIMITIVES.0) }
pub fn vertex_shader_invocations(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::VERTEX_SHADER_INVOCATIONS.0) }
pub fn geometry_shader_invocations(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::GEOMETRY_SHADER_INVOCATIONS.0) }
pub fn geometry_shader_primitives(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::GEOMETRY_SHADER_PRIMITIVES.0) }
pub fn clipping_invocations(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::CLIPPING_INVOCATIONS.0) }
pub fn clipping_primitives(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::CLIPPING_PRIMITIVES.0) }
pub fn fragment_shader_invocations(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::FRAGMENT_SHADER_INVOCATIONS.0) }
pub fn tessellation_control_shader_patches(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::TESSELLATION_CONTROL_SHADER_PATCHES.0) }
pub fn tessellation_evaluation_shader_invocations(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::TESSELLATION_EVALUATION_SHADER_INVOCATIONS.0) }
pub fn compute_shader_invocations(self) -> Self { QueryPipelineStatisticFlags(self.0 | Self::COMPUTE_SHADER_INVOCATIONS.0) }
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct QueryResultFlags(pub VkQueryResultFlags);
impl QueryResultFlags
{
pub const EMPTY: Self = QueryResultFlags(0);
pub const WAIT: Self = QueryResultFlags(VK_QUERY_RESULT_WAIT_BIT);
pub const WITH_AVAILABILITY: Self = QueryResultFlags(VK_QUERY_RESULT_WITH_AVAILABILITY_BIT);
pub const PARTIAL: Self = QueryResultFlags(VK_QUERY_RESULT_PARTIAL_BIT);
pub fn wait(self) -> Self { QueryResultFlags(self.0 | Self::WAIT.0) }
pub fn with_availability(self) -> Self { QueryResultFlags(self.0 | Self::WITH_AVAILABILITY.0) }
pub fn partial(self) -> Self { QueryResultFlags(self.0 | Self::PARTIAL.0) }
}
pub trait PixelFormat
{
fn bit_width(self) -> usize;
fn components(self) -> FormatComponents;
fn element_type(self) -> ElementType;
}
impl PixelFormat for vk::VkFormat
{
fn bit_width(self) -> usize
{
match self
{
VK_FORMAT_R4G4_UNORM_PACK8 | VK_FORMAT_R8_UNORM | VK_FORMAT_R8_SNORM |
VK_FORMAT_R8_USCALED | VK_FORMAT_R8_SSCALED | VK_FORMAT_R8_UINT | VK_FORMAT_R8_SINT |
VK_FORMAT_R8_SRGB | VK_FORMAT_S8_UINT => 8,
VK_FORMAT_R4G4B4A4_UNORM_PACK16 | VK_FORMAT_B4G4R4A4_UNORM_PACK16 |
VK_FORMAT_R5G6B5_UNORM_PACK16 | VK_FORMAT_B5G6R5_UNORM_PACK16 |
VK_FORMAT_R5G5B5A1_UNORM_PACK16 | VK_FORMAT_B5G5R5A1_UNORM_PACK16 |
VK_FORMAT_A1R5G5B5_UNORM_PACK16 | VK_FORMAT_R8G8_UNORM | VK_FORMAT_R8G8_SNORM |
VK_FORMAT_R8G8_USCALED | VK_FORMAT_R8G8_SSCALED | VK_FORMAT_R8G8_UINT |
VK_FORMAT_R8G8_SINT | VK_FORMAT_R8G8_SRGB |
VK_FORMAT_R16_UNORM | VK_FORMAT_R16_SNORM | VK_FORMAT_R16_USCALED | VK_FORMAT_R16_SSCALED |
VK_FORMAT_R16_UINT | VK_FORMAT_R16_SINT | VK_FORMAT_R16_SFLOAT |
VK_FORMAT_D16_UNORM => 16,
VK_FORMAT_R8G8B8_UNORM | VK_FORMAT_R8G8B8_SNORM | VK_FORMAT_R8G8B8_USCALED |
VK_FORMAT_R8G8B8_SSCALED | VK_FORMAT_R8G8B8_UINT | VK_FORMAT_R8G8B8_SINT |
VK_FORMAT_R8G8B8_SRGB | VK_FORMAT_B8G8R8_UNORM | VK_FORMAT_B8G8R8_SNORM |
VK_FORMAT_B8G8R8_USCALED | VK_FORMAT_B8G8R8_SSCALED | VK_FORMAT_B8G8R8_UINT |
VK_FORMAT_B8G8R8_SINT | VK_FORMAT_B8G8R8_SRGB | VK_FORMAT_D16_UNORM_S8_UINT => 24,
VK_FORMAT_R8G8B8A8_UNORM | VK_FORMAT_R8G8B8A8_SNORM | VK_FORMAT_R8G8B8A8_USCALED |
VK_FORMAT_R8G8B8A8_SSCALED | VK_FORMAT_R8G8B8A8_UINT | VK_FORMAT_R8G8B8A8_SINT |
VK_FORMAT_R8G8B8A8_SRGB | VK_FORMAT_B8G8R8A8_UNORM | VK_FORMAT_B8G8R8A8_SNORM |
VK_FORMAT_B8G8R8A8_USCALED | VK_FORMAT_B8G8R8A8_SSCALED | VK_FORMAT_B8G8R8A8_UINT |
VK_FORMAT_B8G8R8A8_SINT | VK_FORMAT_B8G8R8A8_SRGB | VK_FORMAT_A8B8G8R8_UNORM_PACK32 |
VK_FORMAT_A8B8G8R8_SNORM_PACK32 | VK_FORMAT_A8B8G8R8_USCALED_PACK32 | VK_FORMAT_A8B8G8R8_SSCALED_PACK32 |
VK_FORMAT_A8B8G8R8_UINT_PACK32 | VK_FORMAT_A8B8G8R8_SINT_PACK32 | VK_FORMAT_A8B8G8R8_SRGB_PACK32 |
VK_FORMAT_A2R10G10B10_UNORM_PACK32 | VK_FORMAT_A2R10G10B10_SNORM_PACK32 | VK_FORMAT_A2R10G10B10_USCALED_PACK32 |
VK_FORMAT_A2R10G10B10_SSCALED_PACK32 | VK_FORMAT_A2R10G10B10_UINT_PACK32 | VK_FORMAT_A2R10G10B10_SINT_PACK32 |
VK_FORMAT_R16G16_UNORM | VK_FORMAT_R16G16_SNORM | VK_FORMAT_R16G16_USCALED | VK_FORMAT_R16G16_SSCALED |
VK_FORMAT_R16G16_UINT | VK_FORMAT_R16G16_SINT | VK_FORMAT_R16G16_SFLOAT |
VK_FORMAT_R32_UINT | VK_FORMAT_R32_SINT | VK_FORMAT_R32_SFLOAT |
VK_FORMAT_B10G11R11_UFLOAT_PACK32 | VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 | VK_FORMAT_X8_D24_UNORM_PACK32 | VK_FORMAT_D32_SFLOAT |
VK_FORMAT_D24_UNORM_S8_UINT => 32,
VK_FORMAT_D32_SFLOAT_S8_UINT => 40,
VK_FORMAT_R16G16B16_UNORM | VK_FORMAT_R16G16B16_SNORM | VK_FORMAT_R16G16B16_USCALED | VK_FORMAT_R16G16B16_SSCALED |
VK_FORMAT_R16G16B16_UINT | VK_FORMAT_R16G16B16_SINT | VK_FORMAT_R16G16B16_SFLOAT => 48,
VK_FORMAT_R16G16B16A16_UNORM | VK_FORMAT_R16G16B16A16_SNORM | VK_FORMAT_R16G16B16A16_USCALED | VK_FORMAT_R16G16B16A16_SSCALED |
VK_FORMAT_R16G16B16A16_UINT | VK_FORMAT_R16G16B16A16_SINT | VK_FORMAT_R16G16B16A16_SFLOAT |
VK_FORMAT_R32G32_UINT | VK_FORMAT_R32G32_SINT | VK_FORMAT_R32G32_SFLOAT |
VK_FORMAT_R64_UINT | VK_FORMAT_R64_SINT | VK_FORMAT_R64_SFLOAT => 64,
VK_FORMAT_R32G32B32_UINT | VK_FORMAT_R32G32B32_SINT | VK_FORMAT_R32G32B32_SFLOAT => 96,
VK_FORMAT_R32G32B32A32_UINT | VK_FORMAT_R32G32B32A32_SINT | VK_FORMAT_R32G32B32A32_SFLOAT |
VK_FORMAT_R64G64_UINT | VK_FORMAT_R64G64_SINT | VK_FORMAT_R64G64_SFLOAT => 128,
VK_FORMAT_R64G64B64_UINT | VK_FORMAT_R64G64B64_SINT | VK_FORMAT_R64G64B64_SFLOAT => 192,
VK_FORMAT_R64G64B64A64_UINT | VK_FORMAT_R64G64B64A64_SINT | VK_FORMAT_R64G64B64A64_SFLOAT => 256,
_ => 0
}
}
fn components(self) -> FormatComponents
{
match self
{
VK_FORMAT_UNDEFINED => FormatComponents::Undefined,
VK_FORMAT_R8_UNORM | VK_FORMAT_R8_SNORM | VK_FORMAT_R8_USCALED | VK_FORMAT_R8_SSCALED |
VK_FORMAT_R8_UINT | VK_FORMAT_R8_SINT | VK_FORMAT_R8_SRGB | VK_FORMAT_R16_UNORM | VK_FORMAT_R16_SNORM |
VK_FORMAT_R16_USCALED | VK_FORMAT_R16_SSCALED | VK_FORMAT_R16_UINT | VK_FORMAT_R16_SINT | VK_FORMAT_R16_SFLOAT |
VK_FORMAT_R32_UINT | VK_FORMAT_R32_SINT | VK_FORMAT_R32_SFLOAT | VK_FORMAT_R64_UINT | VK_FORMAT_R64_SINT | VK_FORMAT_R64_SFLOAT => FormatComponents::R,
VK_FORMAT_R4G4_UNORM_PACK8 | VK_FORMAT_R8G8_UNORM | VK_FORMAT_R8G8_SNORM | VK_FORMAT_R8G8_USCALED | VK_FORMAT_R8G8_SSCALED |
VK_FORMAT_R8G8_UINT | VK_FORMAT_R8G8_SINT | VK_FORMAT_R8G8_SRGB | VK_FORMAT_R16G16_UNORM | VK_FORMAT_R16G16_SNORM | VK_FORMAT_R16G16_USCALED |
VK_FORMAT_R16G16_SSCALED | VK_FORMAT_R16G16_UINT | VK_FORMAT_R16G16_SINT | VK_FORMAT_R16G16_SFLOAT | VK_FORMAT_R32G32_UINT |
VK_FORMAT_R32G32_SINT | VK_FORMAT_R32G32_SFLOAT | VK_FORMAT_R64G64_UINT | VK_FORMAT_R64G64_SINT | VK_FORMAT_R64G64_SFLOAT => FormatComponents::RG,
VK_FORMAT_R5G6B5_UNORM_PACK16 | VK_FORMAT_B5G6R5_UNORM_PACK16 |
VK_FORMAT_R8G8B8_UNORM | VK_FORMAT_R8G8B8_SNORM | VK_FORMAT_R8G8B8_USCALED | VK_FORMAT_R8G8B8_SSCALED |
VK_FORMAT_R8G8B8_UINT | VK_FORMAT_R8G8B8_SINT | VK_FORMAT_R8G8B8_SRGB | VK_FORMAT_B8G8R8_UNORM | VK_FORMAT_B8G8R8_SNORM |
VK_FORMAT_B8G8R8_USCALED | VK_FORMAT_B8G8R8_SSCALED | VK_FORMAT_B8G8R8_UINT | VK_FORMAT_B8G8R8_SINT | VK_FORMAT_B8G8R8_SRGB |
VK_FORMAT_R16G16B16_UNORM | VK_FORMAT_R16G16B16_SNORM | VK_FORMAT_R16G16B16_USCALED | VK_FORMAT_R16G16B16_SSCALED | VK_FORMAT_R16G16B16_UINT |
VK_FORMAT_R16G16B16_SINT | VK_FORMAT_R16G16B16_SFLOAT | VK_FORMAT_R32G32B32_UINT | VK_FORMAT_R32G32B32_SINT | VK_FORMAT_R32G32B32_SFLOAT |
VK_FORMAT_R64G64B64_UINT | VK_FORMAT_R64G64B64_SINT | VK_FORMAT_R64G64B64_SFLOAT | VK_FORMAT_B10G11R11_UFLOAT_PACK32 => FormatComponents::RGB,
VK_FORMAT_R4G4B4A4_UNORM_PACK16 | VK_FORMAT_B4G4R4A4_UNORM_PACK16 |
VK_FORMAT_R5G5B5A1_UNORM_PACK16 | VK_FORMAT_B5G5R5A1_UNORM_PACK16 | VK_FORMAT_A1R5G5B5_UNORM_PACK16 |
VK_FORMAT_R8G8B8A8_UNORM | VK_FORMAT_R8G8B8A8_SNORM | VK_FORMAT_R8G8B8A8_USCALED | VK_FORMAT_R8G8B8A8_SSCALED |
VK_FORMAT_R8G8B8A8_UINT | VK_FORMAT_R8G8B8A8_SINT | VK_FORMAT_R8G8B8A8_SRGB | VK_FORMAT_B8G8R8A8_UNORM | VK_FORMAT_B8G8R8A8_SNORM |
VK_FORMAT_B8G8R8A8_USCALED | VK_FORMAT_B8G8R8A8_SSCALED | VK_FORMAT_B8G8R8A8_UINT | VK_FORMAT_B8G8R8A8_SINT | VK_FORMAT_B8G8R8A8_SRGB |
VK_FORMAT_A8B8G8R8_UNORM_PACK32 | VK_FORMAT_A8B8G8R8_SNORM_PACK32 | VK_FORMAT_A8B8G8R8_USCALED_PACK32 | VK_FORMAT_A8B8G8R8_SSCALED_PACK32 |
VK_FORMAT_A8B8G8R8_UINT_PACK32 | VK_FORMAT_A8B8G8R8_SINT_PACK32 | VK_FORMAT_A8B8G8R8_SRGB_PACK32 |
VK_FORMAT_A2R10G10B10_UNORM_PACK32 | VK_FORMAT_A2R10G10B10_SNORM_PACK32 | VK_FORMAT_A2R10G10B10_USCALED_PACK32 | VK_FORMAT_A2R10G10B10_SSCALED_PACK32 |
VK_FORMAT_A2R10G10B10_UINT_PACK32 | VK_FORMAT_A2R10G10B10_SINT_PACK32 | VK_FORMAT_A2B10G10R10_UNORM_PACK32 | VK_FORMAT_A2B10G10R10_SNORM_PACK32 |
VK_FORMAT_A2B10G10R10_USCALED_PACK32 | VK_FORMAT_A2B10G10R10_SSCALED_PACK32 | VK_FORMAT_A2B10G10R10_UINT_PACK32 | VK_FORMAT_A2B10G10R10_SINT_PACK32 |
VK_FORMAT_R16G16B16A16_UNORM | VK_FORMAT_R16G16B16A16_SNORM | VK_FORMAT_R16G16B16A16_USCALED | VK_FORMAT_R16G16B16A16_SSCALED | VK_FORMAT_R16G16B16A16_UINT |
VK_FORMAT_R16G16B16A16_SINT | VK_FORMAT_R16G16B16A16_SFLOAT | VK_FORMAT_R32G32B32A32_UINT | VK_FORMAT_R32G32B32A32_SINT | VK_FORMAT_R32G32B32A32_SFLOAT |
VK_FORMAT_R64G64B64A64_SINT | VK_FORMAT_R64G64B64A64_UINT | VK_FORMAT_R64G64B64A64_SFLOAT => FormatComponents::RGBA,
VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 => FormatComponents::EBGR,
VK_FORMAT_D16_UNORM | VK_FORMAT_X8_D24_UNORM_PACK32 | VK_FORMAT_D32_SFLOAT => FormatComponents::D,
VK_FORMAT_S8_UINT => FormatComponents::S,
VK_FORMAT_D16_UNORM_S8_UINT | VK_FORMAT_D24_UNORM_S8_UINT | VK_FORMAT_D32_SFLOAT_S8_UINT => FormatComponents::DS,
_ => FormatComponents::Compressed
}
}
#[allow(non_upper_case_globals)]
fn element_type(self) -> ElementType
{
match self
{
VK_FORMAT_UNDEFINED => ElementType::Undefined,
VK_FORMAT_R4G4_UNORM_PACK8 | VK_FORMAT_R4G4B4A4_UNORM_PACK16 | VK_FORMAT_B4G4R4A4_UNORM_PACK16 | VK_FORMAT_R5G6B5_UNORM_PACK16 |
VK_FORMAT_B5G6R5_UNORM_PACK16 | VK_FORMAT_R5G5B5A1_UNORM_PACK16 | VK_FORMAT_B5G5R5A1_UNORM_PACK16 | VK_FORMAT_A1R5G5B5_UNORM_PACK16 |
VK_FORMAT_R8_UNORM | VK_FORMAT_R8G8_UNORM | VK_FORMAT_R8G8B8_UNORM | VK_FORMAT_B8G8R8_UNORM | VK_FORMAT_R8G8B8A8_UNORM | VK_FORMAT_B8G8R8A8_UNORM |
VK_FORMAT_A8B8G8R8_UNORM_PACK32 | VK_FORMAT_A2R10G10B10_UNORM_PACK32 | VK_FORMAT_A2B10G10R10_UNORM_PACK32 |
VK_FORMAT_R16G16B16_UNORM | VK_FORMAT_D16_UNORM | VK_FORMAT_X8_D24_UNORM_PACK32 | VK_FORMAT_BC1_RGB_UNORM_BLOCK |
VK_FORMAT_BC1_RGBA_UNORM_BLOCK | VK_FORMAT_BC2_UNORM_BLOCK | VK_FORMAT_BC3_UNORM_BLOCK | VK_FORMAT_BC4_UNORM_BLOCK | VK_FORMAT_BC5_UNORM_BLOCK |
VK_FORMAT_BC7_UNORM_BLOCK | VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK | VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK | VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK |
VK_FORMAT_EAC_R11_UNORM_BLOCK | VK_FORMAT_EAC_R11G11_UNORM_BLOCK | VK_FORMAT_ASTC_4x4_UNORM_BLOCK |
VK_FORMAT_ASTC_5x4_UNORM_BLOCK | VK_FORMAT_ASTC_5x5_UNORM_BLOCK | VK_FORMAT_ASTC_6x5_UNORM_BLOCK | VK_FORMAT_ASTC_6x6_UNORM_BLOCK |
VK_FORMAT_ASTC_8x5_UNORM_BLOCK | VK_FORMAT_ASTC_8x6_UNORM_BLOCK | VK_FORMAT_ASTC_8x8_UNORM_BLOCK | VK_FORMAT_ASTC_10x5_UNORM_BLOCK |
VK_FORMAT_ASTC_10x6_UNORM_BLOCK | VK_FORMAT_ASTC_10x8_UNORM_BLOCK | VK_FORMAT_ASTC_10x10_UNORM_BLOCK | VK_FORMAT_ASTC_12x10_UNORM_BLOCK |
VK_FORMAT_ASTC_12x12_UNORM_BLOCK | VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG | VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG | VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG |
VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG => ElementType::UNORM,
VK_FORMAT_R8_SNORM | VK_FORMAT_R8G8_SNORM | VK_FORMAT_R8G8B8_SNORM | VK_FORMAT_B8G8R8_SNORM | VK_FORMAT_R8G8B8A8_SNORM | VK_FORMAT_B8G8R8A8_SNORM | VK_FORMAT_A8B8G8R8_SNORM_PACK32 |
VK_FORMAT_A2R10G10B10_SNORM_PACK32 | VK_FORMAT_A2B10G10R10_SNORM_PACK32 | VK_FORMAT_R16_SNORM | VK_FORMAT_R16G16_SNORM | VK_FORMAT_R16G16B16_SNORM | VK_FORMAT_R16G16B16A16_SNORM |
VK_FORMAT_BC4_SNORM_BLOCK | VK_FORMAT_BC5_SNORM_BLOCK | VK_FORMAT_EAC_R11_SNORM_BLOCK | VK_FORMAT_EAC_R11G11_SNORM_BLOCK => ElementType::SNORM,
VK_FORMAT_R8_USCALED | VK_FORMAT_R8G8_USCALED | VK_FORMAT_R8G8B8_USCALED | VK_FORMAT_B8G8R8_USCALED | VK_FORMAT_R8G8B8A8_USCALED | VK_FORMAT_B8G8R8A8_USCALED |
VK_FORMAT_A8B8G8R8_USCALED_PACK32 | VK_FORMAT_A2R10G10B10_USCALED_PACK32 | VK_FORMAT_A2B10G10R10_USCALED_PACK32 | VK_FORMAT_R16_USCALED |
VK_FORMAT_R16G16_USCALED | VK_FORMAT_R16G16B16_USCALED | VK_FORMAT_R16G16B16A16_USCALED => ElementType::USCALED,
VK_FORMAT_R8_SSCALED | VK_FORMAT_R8G8_SSCALED | VK_FORMAT_R8G8B8_SSCALED | VK_FORMAT_B8G8R8_SSCALED | VK_FORMAT_R8G8B8A8_SSCALED | VK_FORMAT_B8G8R8A8_SSCALED |
VK_FORMAT_A8B8G8R8_SSCALED_PACK32 | VK_FORMAT_A2R10G10B10_SSCALED_PACK32 | VK_FORMAT_A2B10G10R10_SSCALED_PACK32 | VK_FORMAT_R16_SSCALED | VK_FORMAT_R16G16_SSCALED |
VK_FORMAT_R16G16B16_SSCALED | VK_FORMAT_R16G16B16A16_SSCALED => ElementType::SSCALED,
VK_FORMAT_R8_UINT | VK_FORMAT_R8G8_UINT | VK_FORMAT_R8G8B8_UINT | VK_FORMAT_B8G8R8_UINT | VK_FORMAT_R8G8B8A8_UINT | VK_FORMAT_B8G8R8A8_UINT | VK_FORMAT_A8B8G8R8_UINT_PACK32 |
VK_FORMAT_A2R10G10B10_UINT_PACK32 | VK_FORMAT_A2B10G10R10_UINT_PACK32 | VK_FORMAT_R16_UINT | VK_FORMAT_R16G16_UINT | VK_FORMAT_R16G16B16_UINT | VK_FORMAT_R16G16B16A16_UINT |
VK_FORMAT_R32_UINT | VK_FORMAT_R32G32_UINT | VK_FORMAT_R32G32B32_UINT | VK_FORMAT_R32G32B32A32_UINT | VK_FORMAT_R64_UINT | VK_FORMAT_R64G64_UINT | VK_FORMAT_R64G64B64_UINT |
VK_FORMAT_R64G64B64A64_UINT | VK_FORMAT_S8_UINT => ElementType::UINT,
VK_FORMAT_R8_SINT | VK_FORMAT_R8G8_SINT | VK_FORMAT_R8G8B8_SINT | VK_FORMAT_B8G8R8_SINT | VK_FORMAT_R8G8B8A8_SINT | VK_FORMAT_B8G8R8A8_SINT | VK_FORMAT_A8B8G8R8_SINT_PACK32 |
VK_FORMAT_A2R10G10B10_SINT_PACK32 | VK_FORMAT_A2B10G10R10_SINT_PACK32 | VK_FORMAT_R16_SINT | VK_FORMAT_R16G16_SINT | VK_FORMAT_R16G16B16_SINT | VK_FORMAT_R16G16B16A16_SINT |
VK_FORMAT_R32_SINT | VK_FORMAT_R32G32_SINT | VK_FORMAT_R32G32B32_SINT | VK_FORMAT_R32G32B32A32_SINT | VK_FORMAT_R64_SINT | VK_FORMAT_R64G64_SINT | VK_FORMAT_R64G64B64_SINT |
VK_FORMAT_R64G64B64A64_SINT => ElementType::SINT,
VK_FORMAT_R8_SRGB | VK_FORMAT_R8G8_SRGB | VK_FORMAT_R8G8B8_SRGB | VK_FORMAT_B8G8R8_SRGB | VK_FORMAT_R8G8B8A8_SRGB | VK_FORMAT_B8G8R8A8_SRGB |
VK_FORMAT_A8B8G8R8_SRGB_PACK32 | VK_FORMAT_BC1_RGB_SRGB_BLOCK | VK_FORMAT_BC1_RGBA_SRGB_BLOCK | VK_FORMAT_BC2_SRGB_BLOCK | VK_FORMAT_BC3_SRGB_BLOCK |
VK_FORMAT_BC7_SRGB_BLOCK | VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK | VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK | VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK | VK_FORMAT_ASTC_4x4_SRGB_BLOCK |
VK_FORMAT_ASTC_5x4_SRGB_BLOCK | VK_FORMAT_ASTC_5x5_SRGB_BLOCK | VK_FORMAT_ASTC_6x5_SRGB_BLOCK | VK_FORMAT_ASTC_6x6_SRGB_BLOCK |
VK_FORMAT_ASTC_8x5_SRGB_BLOCK | VK_FORMAT_ASTC_8x6_SRGB_BLOCK | VK_FORMAT_ASTC_8x8_SRGB_BLOCK | VK_FORMAT_ASTC_10x5_SRGB_BLOCK |
VK_FORMAT_ASTC_10x6_SRGB_BLOCK | VK_FORMAT_ASTC_10x8_SRGB_BLOCK | VK_FORMAT_ASTC_10x10_SRGB_BLOCK | VK_FORMAT_ASTC_12x10_SRGB_BLOCK |
VK_FORMAT_ASTC_12x12_SRGB_BLOCK | VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG | VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG | VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG |
VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG => ElementType::SRGB,
VK_FORMAT_R16_SFLOAT | VK_FORMAT_R16G16_SFLOAT | VK_FORMAT_R16G16B16_SFLOAT | VK_FORMAT_R16G16B16A16_SFLOAT | VK_FORMAT_R32_SFLOAT |
VK_FORMAT_R32G32_SFLOAT | VK_FORMAT_R32G32B32_SFLOAT | VK_FORMAT_R32G32B32A32_SFLOAT | VK_FORMAT_R64_SFLOAT | VK_FORMAT_R64G64_SFLOAT |
VK_FORMAT_R64G64B64_SFLOAT | VK_FORMAT_R64G64B64A64_SFLOAT | VK_FORMAT_D32_SFLOAT | VK_FORMAT_BC6H_SFLOAT_BLOCK => ElementType::SFLOAT,
VK_FORMAT_B10G11R11_UFLOAT_PACK32 | VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 | VK_FORMAT_BC6H_UFLOAT_BLOCK => ElementType::UFLOAT,
_ => ElementType::Compound
}
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub struct FormatQuery(pub vk::VkFormat);
impl FormatQuery
{
pub fn eq_bit_width(self, w: usize) -> Self
{
if self.0.bit_width() == w { self } else { FormatQuery(VK_FORMAT_UNDEFINED) }
}
pub fn has_components(self, c: FormatComponents) -> Self
{
if c.satisfy(self.0) { self } else { FormatQuery(VK_FORMAT_UNDEFINED) }
}
pub fn is_component_of(self, c: FormatComponents) -> Self
{
if c.satisfy_eq(self.0) { self } else { FormatQuery(VK_FORMAT_UNDEFINED) }
}
pub fn has_element_of(self, e: ElementType) -> Self
{
if self.0.element_type() == e { self } else { FormatQuery(VK_FORMAT_UNDEFINED) }
}
pub fn passed(self) -> bool { self.0 != VK_FORMAT_UNDEFINED }
pub fn srgb(self) -> Option<VkFormat>
{
match self.0
{
VK_FORMAT_R8_UNORM => Some(VK_FORMAT_R8_SRGB),
VK_FORMAT_R8G8_UNORM => Some(VK_FORMAT_R8G8_SRGB),
VK_FORMAT_R8G8B8_UNORM => Some(VK_FORMAT_R8G8B8_SRGB),
VK_FORMAT_B8G8R8_UNORM => Some(VK_FORMAT_B8G8R8_SRGB),
VK_FORMAT_R8G8B8A8_UNORM => Some(VK_FORMAT_R8G8B8A8_SRGB),
VK_FORMAT_B8G8R8A8_UNORM => Some(VK_FORMAT_B8G8R8A8_SRGB),
VK_FORMAT_A8B8G8R8_UNORM_PACK32 => Some(VK_FORMAT_A8B8G8R8_SRGB_PACK32),
_ => None
}
}
pub fn unorm(self) -> Option<VkFormat>
{
match self.0
{
VK_FORMAT_R8_SRGB | VK_FORMAT_R8_UNORM => Some(VK_FORMAT_R8_UNORM),
VK_FORMAT_R8G8_SRGB | VK_FORMAT_R8G8_UNORM => Some(VK_FORMAT_R8G8_UNORM),
VK_FORMAT_R8G8B8_SRGB | VK_FORMAT_R8G8B8_UNORM => Some(VK_FORMAT_R8G8B8_UNORM),
VK_FORMAT_B8G8R8_SRGB | VK_FORMAT_B8G8R8_UNORM => Some(VK_FORMAT_B8G8R8_UNORM),
VK_FORMAT_R8G8B8A8_SRGB | VK_FORMAT_R8G8B8A8_UNORM => Some(VK_FORMAT_R8G8B8A8_UNORM),
VK_FORMAT_B8G8R8A8_SRGB | VK_FORMAT_B8G8R8A8_UNORM => Some(VK_FORMAT_B8G8R8A8_UNORM),
VK_FORMAT_A8B8G8R8_SRGB_PACK32 | VK_FORMAT_A8B8G8R8_UNORM_PACK32 => Some(VK_FORMAT_A8B8G8R8_UNORM_PACK32),
_ => None
}
}
}
#[derive(Clone)]
pub struct FormatQueryPred
{
bit_width: Option<usize>, req_components: Option<FormatComponents>, req_elements_of: Option<ElementType>
}
impl Default for FormatQueryPred
{
fn default() -> Self
{
FormatQueryPred { bit_width: None, req_components: None, req_elements_of: None }
}
}
impl FormatQueryPred
{
pub fn bit(&mut self, b: usize) -> &mut Self { self.bit_width = Some(b); self }
pub fn components(&mut self, c: FormatComponents) -> &mut Self { self.req_components = Some(c); self }
pub fn elements(&mut self, e: ElementType) -> &mut Self { self.req_elements_of = Some(e); self }
pub fn satisfy(&self, f: vk::VkFormat) -> bool
{
self.bit_width.map_or(true, |b| f.bit_width() == b) && self.req_components.map_or(true, |c| c.satisfy(f))
&& self.req_elements_of.map_or(true, |e| f.element_type() == e)
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum FormatComponents
{
Undefined, R, RG, RGB, RGBA, EBGR, D, S, DS, Compressed
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum ElementType
{
Undefined, UNORM, SNORM, UINT, SINT, SFLOAT, UFLOAT, SRGB, USCALED, SSCALED,
Compound
}
impl FormatComponents
{
pub fn has(self, o: Self) -> bool
{
use self::FormatComponents::*;
match self
{
R => o == R || o == RG || o == RGB || o == RGBA,
RG => o == RG || o == RGB || o == RGBA,
RGB => o == RGB || o == RGBA,
D => o == D || o == DS,
S => o == S || o == DS,
t => t == o
}
}
pub fn satisfy(self, f: vk::VkFormat) -> bool { self.has(f.components()) }
pub fn satisfy_eq(self, f: vk::VkFormat) -> bool { f.components() == self }
}