use smallvec::SmallVec;
use std::cmp;
use std::error;
use std::fmt;
use std::mem;
use std::ops;
use std::ptr;
use std::sync::Arc;
use std::vec::IntoIter as VecIntoIter;
use buffer::BufferAccess;
use buffer::BufferInner;
use buffer::BufferView;
use descriptor::descriptor::DescriptorType;
use descriptor::descriptor_set::UnsafeDescriptorSetLayout;
use device::Device;
use device::DeviceOwned;
use image::ImageViewAccess;
use sampler::Sampler;
use OomError;
use VulkanObject;
use check_errors;
use vk;
pub unsafe trait DescriptorPool: DeviceOwned {
type Alloc: DescriptorPoolAlloc;
fn alloc(&mut self, layout: &UnsafeDescriptorSetLayout) -> Result<Self::Alloc, OomError>;
}
pub trait DescriptorPoolAlloc {
fn inner(&self) -> &UnsafeDescriptorSet;
fn inner_mut(&mut self) -> &mut UnsafeDescriptorSet;
}
macro_rules! descriptors_count {
($($name:ident,)+) => (
#[derive(Debug, Copy, Clone)]
pub struct DescriptorsCount {
$(
pub $name: u32,
)+
}
impl DescriptorsCount {
#[inline]
pub fn zero() -> DescriptorsCount {
DescriptorsCount {
$(
$name: 0,
)+
}
}
#[inline]
pub fn add_one(&mut self, ty: DescriptorType) {
match ty {
DescriptorType::Sampler => self.sampler += 1,
DescriptorType::CombinedImageSampler => self.combined_image_sampler += 1,
DescriptorType::SampledImage => self.sampled_image += 1,
DescriptorType::StorageImage => self.storage_image += 1,
DescriptorType::UniformTexelBuffer => self.uniform_texel_buffer += 1,
DescriptorType::StorageTexelBuffer => self.storage_texel_buffer += 1,
DescriptorType::UniformBuffer => self.uniform_buffer += 1,
DescriptorType::StorageBuffer => self.storage_buffer += 1,
DescriptorType::UniformBufferDynamic => self.uniform_buffer_dynamic += 1,
DescriptorType::StorageBufferDynamic => self.storage_buffer_dynamic += 1,
DescriptorType::InputAttachment => self.input_attachment += 1,
};
}
}
impl cmp::PartialEq for DescriptorsCount {
#[inline]
fn eq(&self, other: &DescriptorsCount) -> bool {
self.partial_cmp(other) == Some(cmp::Ordering::Equal)
}
}
impl cmp::Eq for DescriptorsCount {
}
impl cmp::PartialOrd for DescriptorsCount {
fn partial_cmp(&self, other: &DescriptorsCount) -> Option<cmp::Ordering> {
if $(self.$name > other.$name)&&+ {
Some(cmp::Ordering::Greater)
} else if $(self.$name < other.$name)&&+ {
Some(cmp::Ordering::Less)
} else if $(self.$name == other.$name)&&+ {
Some(cmp::Ordering::Equal)
} else {
None
}
}
fn le(&self, other: &DescriptorsCount) -> bool {
$(self.$name <= other.$name)&&+
}
fn ge(&self, other: &DescriptorsCount) -> bool {
$(self.$name >= other.$name)&&+
}
}
impl ops::Sub for DescriptorsCount {
type Output = DescriptorsCount;
#[inline]
fn sub(self, rhs: DescriptorsCount) -> DescriptorsCount {
DescriptorsCount {
$(
$name: self.$name - rhs.$name,
)+
}
}
}
impl ops::SubAssign for DescriptorsCount {
#[inline]
fn sub_assign(&mut self, rhs: DescriptorsCount) {
$(
self.$name -= rhs.$name;
)+
}
}
impl ops::Add for DescriptorsCount {
type Output = DescriptorsCount;
#[inline]
fn add(self, rhs: DescriptorsCount) -> DescriptorsCount {
DescriptorsCount {
$(
$name: self.$name + rhs.$name,
)+
}
}
}
impl ops::AddAssign for DescriptorsCount {
#[inline]
fn add_assign(&mut self, rhs: DescriptorsCount) {
$(
self.$name += rhs.$name;
)+
}
}
impl ops::Mul<u32> for DescriptorsCount {
type Output = DescriptorsCount;
#[inline]
fn mul(self, rhs: u32) -> DescriptorsCount {
DescriptorsCount {
$(
$name: self.$name * rhs,
)+
}
}
}
impl ops::MulAssign<u32> for DescriptorsCount {
#[inline]
fn mul_assign(&mut self, rhs: u32) {
$(
self.$name *= rhs;
)+
}
}
);
}
descriptors_count! {
uniform_buffer,
storage_buffer,
uniform_buffer_dynamic,
storage_buffer_dynamic,
uniform_texel_buffer,
storage_texel_buffer,
sampled_image,
storage_image,
sampler,
combined_image_sampler,
input_attachment,
}
pub struct UnsafeDescriptorPool {
pool: vk::DescriptorPool,
device: Arc<Device>,
}
impl UnsafeDescriptorPool {
pub fn new(device: Arc<Device>, count: &DescriptorsCount, max_sets: u32,
free_descriptor_set_bit: bool)
-> Result<UnsafeDescriptorPool, OomError> {
let vk = device.pointers();
assert_ne!(max_sets, 0, "The maximum number of sets can't be 0");
let mut pool_sizes: SmallVec<[_; 10]> = SmallVec::new();
macro_rules! elem {
($field:ident, $ty:expr) => (
if count.$field >= 1 {
pool_sizes.push(vk::DescriptorPoolSize {
ty: $ty,
descriptorCount: count.$field,
});
}
);
}
elem!(uniform_buffer, vk::DESCRIPTOR_TYPE_UNIFORM_BUFFER);
elem!(storage_buffer, vk::DESCRIPTOR_TYPE_STORAGE_BUFFER);
elem!(uniform_buffer_dynamic,
vk::DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
elem!(storage_buffer_dynamic,
vk::DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
elem!(uniform_texel_buffer,
vk::DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER);
elem!(storage_texel_buffer,
vk::DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
elem!(sampled_image, vk::DESCRIPTOR_TYPE_SAMPLED_IMAGE);
elem!(storage_image, vk::DESCRIPTOR_TYPE_STORAGE_IMAGE);
elem!(sampler, vk::DESCRIPTOR_TYPE_SAMPLER);
elem!(combined_image_sampler,
vk::DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
elem!(input_attachment, vk::DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
assert!(!pool_sizes.is_empty(),
"All the descriptors count of a pool are 0");
let pool = unsafe {
let infos = vk::DescriptorPoolCreateInfo {
sType: vk::STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
pNext: ptr::null(),
flags: if free_descriptor_set_bit {
vk::DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT
} else {
0
},
maxSets: max_sets,
poolSizeCount: pool_sizes.len() as u32,
pPoolSizes: pool_sizes.as_ptr(),
};
let mut output = mem::uninitialized();
check_errors(vk.CreateDescriptorPool(device.internal_object(),
&infos,
ptr::null(),
&mut output))?;
output
};
Ok(UnsafeDescriptorPool {
pool: pool,
device: device.clone(),
})
}
#[inline]
pub unsafe fn alloc<'l, I>(&mut self, layouts: I)
-> Result<UnsafeDescriptorPoolAllocIter, DescriptorPoolAllocError>
where I: IntoIterator<Item = &'l UnsafeDescriptorSetLayout>
{
let layouts: SmallVec<[_; 8]> = layouts
.into_iter()
.map(|l| {
assert_eq!(self.device.internal_object(),
l.device().internal_object(),
"Tried to allocate from a pool with a set layout of a different \
device");
l.internal_object()
})
.collect();
self.alloc_impl(&layouts)
}
unsafe fn alloc_impl(&mut self, layouts: &SmallVec<[vk::DescriptorSetLayout; 8]>)
-> Result<UnsafeDescriptorPoolAllocIter, DescriptorPoolAllocError> {
let num = layouts.len();
if num == 0 {
return Ok(UnsafeDescriptorPoolAllocIter { sets: vec![].into_iter() });
}
let infos = vk::DescriptorSetAllocateInfo {
sType: vk::STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
pNext: ptr::null(),
descriptorPool: self.pool,
descriptorSetCount: layouts.len() as u32,
pSetLayouts: layouts.as_ptr(),
};
let mut output = Vec::with_capacity(num);
let vk = self.device.pointers();
let ret =
vk.AllocateDescriptorSets(self.device.internal_object(), &infos, output.as_mut_ptr());
match ret {
vk::ERROR_OUT_OF_HOST_MEMORY => {
return Err(DescriptorPoolAllocError::OutOfHostMemory);
},
vk::ERROR_OUT_OF_DEVICE_MEMORY => {
return Err(DescriptorPoolAllocError::OutOfDeviceMemory);
},
vk::ERROR_OUT_OF_POOL_MEMORY_KHR => {
return Err(DescriptorPoolAllocError::OutOfPoolMemory);
},
c if (c as i32) < 0 => {
return Err(DescriptorPoolAllocError::FragmentedPool);
},
_ => (),
};
output.set_len(num);
Ok(UnsafeDescriptorPoolAllocIter { sets: output.into_iter() })
}
#[inline]
pub unsafe fn free<I>(&mut self, descriptor_sets: I) -> Result<(), OomError>
where I: IntoIterator<Item = UnsafeDescriptorSet>
{
let sets: SmallVec<[_; 8]> = descriptor_sets.into_iter().map(|s| s.set).collect();
if !sets.is_empty() {
self.free_impl(&sets)
} else {
Ok(())
}
}
unsafe fn free_impl(&mut self, sets: &SmallVec<[vk::DescriptorSet; 8]>)
-> Result<(), OomError> {
let vk = self.device.pointers();
check_errors(vk.FreeDescriptorSets(self.device.internal_object(),
self.pool,
sets.len() as u32,
sets.as_ptr()))?;
Ok(())
}
pub unsafe fn reset(&mut self) -> Result<(), OomError> {
let vk = self.device.pointers();
check_errors(vk.ResetDescriptorPool(self.device.internal_object(),
self.pool,
0 ))?;
Ok(())
}
}
unsafe impl DeviceOwned for UnsafeDescriptorPool {
#[inline]
fn device(&self) -> &Arc<Device> {
&self.device
}
}
impl fmt::Debug for UnsafeDescriptorPool {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
fmt.debug_struct("UnsafeDescriptorPool")
.field("raw", &self.pool)
.field("device", &self.device)
.finish()
}
}
impl Drop for UnsafeDescriptorPool {
#[inline]
fn drop(&mut self) {
unsafe {
let vk = self.device.pointers();
vk.DestroyDescriptorPool(self.device.internal_object(), self.pool, ptr::null());
}
}
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum DescriptorPoolAllocError {
OutOfHostMemory,
OutOfDeviceMemory,
FragmentedPool,
OutOfPoolMemory,
}
impl error::Error for DescriptorPoolAllocError {
#[inline]
fn description(&self) -> &str {
match *self {
DescriptorPoolAllocError::OutOfHostMemory => {
"no memory available on the host"
},
DescriptorPoolAllocError::OutOfDeviceMemory => {
"no memory available on the graphical device"
},
DescriptorPoolAllocError::FragmentedPool => {
"allocation has failed because the pool is too fragmented"
},
DescriptorPoolAllocError::OutOfPoolMemory => {
"there is no more space available in the descriptor pool"
},
}
}
}
impl fmt::Display for DescriptorPoolAllocError {
#[inline]
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "{}", error::Error::description(self))
}
}
#[derive(Debug)]
pub struct UnsafeDescriptorPoolAllocIter {
sets: VecIntoIter<vk::DescriptorSet>,
}
impl Iterator for UnsafeDescriptorPoolAllocIter {
type Item = UnsafeDescriptorSet;
#[inline]
fn next(&mut self) -> Option<UnsafeDescriptorSet> {
self.sets.next().map(|s| UnsafeDescriptorSet { set: s })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.sets.size_hint()
}
}
impl ExactSizeIterator for UnsafeDescriptorPoolAllocIter {
}
pub struct UnsafeDescriptorSet {
set: vk::DescriptorSet,
}
impl UnsafeDescriptorSet {
pub unsafe fn write<I>(&mut self, device: &Device, writes: I)
where I: Iterator<Item = DescriptorWrite>
{
let vk = device.pointers();
let mut buffer_descriptors: SmallVec<[_; 64]> = SmallVec::new();
let mut image_descriptors: SmallVec<[_; 64]> = SmallVec::new();
let mut buffer_views_descriptors: SmallVec<[_; 64]> = SmallVec::new();
let mut raw_writes: SmallVec<[_; 64]> = SmallVec::new();
let mut raw_writes_img_infos: SmallVec<[_; 64]> = SmallVec::new();
let mut raw_writes_buf_infos: SmallVec<[_; 64]> = SmallVec::new();
let mut raw_writes_buf_view_infos: SmallVec<[_; 64]> = SmallVec::new();
for indiv_write in writes {
debug_assert!(!indiv_write.inner.is_empty());
raw_writes.push(vk::WriteDescriptorSet {
sType: vk::STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
pNext: ptr::null(),
dstSet: self.set,
dstBinding: indiv_write.binding,
dstArrayElement: indiv_write.first_array_element,
descriptorCount: indiv_write.inner.len() as u32,
descriptorType: indiv_write.ty() as u32,
pImageInfo: ptr::null(),
pBufferInfo: ptr::null(),
pTexelBufferView: ptr::null(),
});
match indiv_write.inner[0] {
DescriptorWriteInner::Sampler(_) |
DescriptorWriteInner::CombinedImageSampler(_, _, _) |
DescriptorWriteInner::SampledImage(_, _) |
DescriptorWriteInner::StorageImage(_, _) |
DescriptorWriteInner::InputAttachment(_, _) => {
raw_writes_img_infos.push(Some(image_descriptors.len()));
raw_writes_buf_infos.push(None);
raw_writes_buf_view_infos.push(None);
},
DescriptorWriteInner::UniformBuffer(_, _, _) |
DescriptorWriteInner::StorageBuffer(_, _, _) |
DescriptorWriteInner::DynamicUniformBuffer(_, _, _) |
DescriptorWriteInner::DynamicStorageBuffer(_, _, _) => {
raw_writes_img_infos.push(None);
raw_writes_buf_infos.push(Some(buffer_descriptors.len()));
raw_writes_buf_view_infos.push(None);
},
DescriptorWriteInner::UniformTexelBuffer(_) |
DescriptorWriteInner::StorageTexelBuffer(_) => {
raw_writes_img_infos.push(None);
raw_writes_buf_infos.push(None);
raw_writes_buf_view_infos.push(Some(buffer_views_descriptors.len()));
},
}
for elem in indiv_write.inner.iter() {
match *elem {
DescriptorWriteInner::UniformBuffer(buffer, offset, size) |
DescriptorWriteInner::DynamicUniformBuffer(buffer, offset, size) => {
buffer_descriptors.push(vk::DescriptorBufferInfo {
buffer: buffer,
offset: offset as u64,
range: size as u64,
});
},
DescriptorWriteInner::StorageBuffer(buffer, offset, size) |
DescriptorWriteInner::DynamicStorageBuffer(buffer, offset, size) => {
buffer_descriptors.push(vk::DescriptorBufferInfo {
buffer: buffer,
offset: offset as u64,
range: size as u64,
});
},
DescriptorWriteInner::Sampler(sampler) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: sampler,
imageView: 0,
imageLayout: 0,
});
},
DescriptorWriteInner::CombinedImageSampler(sampler, view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: sampler,
imageView: view,
imageLayout: layout,
});
},
DescriptorWriteInner::StorageImage(view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: 0,
imageView: view,
imageLayout: layout,
});
},
DescriptorWriteInner::SampledImage(view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: 0,
imageView: view,
imageLayout: layout,
});
},
DescriptorWriteInner::InputAttachment(view, layout) => {
image_descriptors.push(vk::DescriptorImageInfo {
sampler: 0,
imageView: view,
imageLayout: layout,
});
},
DescriptorWriteInner::UniformTexelBuffer(view) |
DescriptorWriteInner::StorageTexelBuffer(view) => {
buffer_views_descriptors.push(view);
},
}
}
}
for (i, write) in raw_writes.iter_mut().enumerate() {
write.pImageInfo = match raw_writes_img_infos[i] {
Some(off) => image_descriptors.as_ptr().offset(off as isize),
None => ptr::null(),
};
write.pBufferInfo = match raw_writes_buf_infos[i] {
Some(off) => buffer_descriptors.as_ptr().offset(off as isize),
None => ptr::null(),
};
write.pTexelBufferView = match raw_writes_buf_view_infos[i] {
Some(off) => buffer_views_descriptors.as_ptr().offset(off as isize),
None => ptr::null(),
};
}
if !raw_writes.is_empty() {
vk.UpdateDescriptorSets(device.internal_object(),
raw_writes.len() as u32,
raw_writes.as_ptr(),
0,
ptr::null());
}
}
}
unsafe impl VulkanObject for UnsafeDescriptorSet {
type Object = vk::DescriptorSet;
const TYPE: vk::DebugReportObjectTypeEXT = vk::DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT;
#[inline]
fn internal_object(&self) -> vk::DescriptorSet {
self.set
}
}
impl fmt::Debug for UnsafeDescriptorSet {
fn fmt(&self, fmt: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(fmt, "<Vulkan descriptor set {:?}>", self.set)
}
}
pub struct DescriptorWrite {
binding: u32,
first_array_element: u32,
inner: SmallVec<[DescriptorWriteInner; 1]>,
}
#[derive(Debug, Clone)]
enum DescriptorWriteInner {
Sampler(vk::Sampler),
StorageImage(vk::ImageView, vk::ImageLayout),
SampledImage(vk::ImageView, vk::ImageLayout),
CombinedImageSampler(vk::Sampler, vk::ImageView, vk::ImageLayout),
UniformTexelBuffer(vk::BufferView),
StorageTexelBuffer(vk::BufferView),
UniformBuffer(vk::Buffer, usize, usize),
StorageBuffer(vk::Buffer, usize, usize),
DynamicUniformBuffer(vk::Buffer, usize, usize),
DynamicStorageBuffer(vk::Buffer, usize, usize),
InputAttachment(vk::ImageView, vk::ImageLayout),
}
macro_rules! smallvec {
($elem:expr) => ({ let mut s = SmallVec::new(); s.push($elem); s });
}
impl DescriptorWrite {
#[inline]
pub fn storage_image<I>(binding: u32, array_element: u32, image: &I) -> DescriptorWrite
where I: ImageViewAccess
{
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!({
let layout = image.descriptor_set_storage_image_layout() as u32;
DescriptorWriteInner::StorageImage(image.inner().internal_object(),
layout)
}),
}
}
#[inline]
pub fn sampler(binding: u32, array_element: u32, sampler: &Arc<Sampler>) -> DescriptorWrite {
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!(DescriptorWriteInner::Sampler(sampler.internal_object())),
}
}
#[inline]
pub fn sampled_image<I>(binding: u32, array_element: u32, image: &I) -> DescriptorWrite
where I: ImageViewAccess
{
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!({
let layout = image.descriptor_set_sampled_image_layout() as u32;
DescriptorWriteInner::SampledImage(image.inner().internal_object(),
layout)
}),
}
}
#[inline]
pub fn combined_image_sampler<I>(binding: u32, array_element: u32, sampler: &Arc<Sampler>,
image: &I)
-> DescriptorWrite
where I: ImageViewAccess
{
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!({
let layout =
image.descriptor_set_combined_image_sampler_layout() as u32;
DescriptorWriteInner::CombinedImageSampler(sampler
.internal_object(),
image
.inner()
.internal_object(),
layout)
}),
}
}
#[inline]
pub fn uniform_texel_buffer<'a, F, B>(binding: u32, array_element: u32, view: &BufferView<F, B>)
-> DescriptorWrite
where B: BufferAccess
{
assert!(view.uniform_texel_buffer());
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!(DescriptorWriteInner::UniformTexelBuffer(view.internal_object())),
}
}
#[inline]
pub fn storage_texel_buffer<'a, F, B>(binding: u32, array_element: u32, view: &BufferView<F, B>)
-> DescriptorWrite
where B: BufferAccess
{
assert!(view.storage_texel_buffer());
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!(DescriptorWriteInner::StorageTexelBuffer(view.internal_object())),
}
}
#[inline]
pub unsafe fn uniform_buffer<B>(binding: u32, array_element: u32, buffer: &B) -> DescriptorWrite
where B: BufferAccess
{
let size = buffer.size();
let BufferInner { buffer, offset } = buffer.inner();
debug_assert_eq!(offset %
buffer
.device()
.physical_device()
.limits()
.min_uniform_buffer_offset_alignment() as
usize,
0);
debug_assert!(size <=
buffer
.device()
.physical_device()
.limits()
.max_uniform_buffer_range() as usize);
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!({
DescriptorWriteInner::UniformBuffer(buffer.internal_object(),
offset,
size)
}),
}
}
#[inline]
pub unsafe fn storage_buffer<B>(binding: u32, array_element: u32, buffer: &B) -> DescriptorWrite
where B: BufferAccess
{
let size = buffer.size();
let BufferInner { buffer, offset } = buffer.inner();
debug_assert_eq!(offset %
buffer
.device()
.physical_device()
.limits()
.min_storage_buffer_offset_alignment() as
usize,
0);
debug_assert!(size <=
buffer
.device()
.physical_device()
.limits()
.max_storage_buffer_range() as usize);
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!({
DescriptorWriteInner::StorageBuffer(buffer.internal_object(),
offset,
size)
}),
}
}
#[inline]
pub unsafe fn dynamic_uniform_buffer<B>(binding: u32, array_element: u32, buffer: &B)
-> DescriptorWrite
where B: BufferAccess
{
let size = buffer.size();
let BufferInner { buffer, offset } = buffer.inner();
debug_assert_eq!(offset %
buffer
.device()
.physical_device()
.limits()
.min_uniform_buffer_offset_alignment() as
usize,
0);
debug_assert!(size <=
buffer
.device()
.physical_device()
.limits()
.max_uniform_buffer_range() as usize);
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!(DescriptorWriteInner::DynamicUniformBuffer(buffer.internal_object(),
offset,
size)),
}
}
#[inline]
pub unsafe fn dynamic_storage_buffer<B>(binding: u32, array_element: u32, buffer: &B)
-> DescriptorWrite
where B: BufferAccess
{
let size = buffer.size();
let BufferInner { buffer, offset } = buffer.inner();
debug_assert_eq!(offset %
buffer
.device()
.physical_device()
.limits()
.min_storage_buffer_offset_alignment() as
usize,
0);
debug_assert!(size <=
buffer
.device()
.physical_device()
.limits()
.max_storage_buffer_range() as usize);
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!(DescriptorWriteInner::DynamicStorageBuffer(buffer.internal_object(),
offset,
size)),
}
}
#[inline]
pub fn input_attachment<I>(binding: u32, array_element: u32, image: &I) -> DescriptorWrite
where I: ImageViewAccess
{
DescriptorWrite {
binding: binding,
first_array_element: array_element,
inner: smallvec!({
let layout = image.descriptor_set_input_attachment_layout() as u32;
DescriptorWriteInner::InputAttachment(image
.inner()
.internal_object(),
layout)
}),
}
}
#[inline]
pub fn ty(&self) -> DescriptorType {
match self.inner[0] {
DescriptorWriteInner::Sampler(_) => DescriptorType::Sampler,
DescriptorWriteInner::CombinedImageSampler(_, _, _) =>
DescriptorType::CombinedImageSampler,
DescriptorWriteInner::SampledImage(_, _) => DescriptorType::SampledImage,
DescriptorWriteInner::StorageImage(_, _) => DescriptorType::StorageImage,
DescriptorWriteInner::UniformTexelBuffer(_) => DescriptorType::UniformTexelBuffer,
DescriptorWriteInner::StorageTexelBuffer(_) => DescriptorType::StorageTexelBuffer,
DescriptorWriteInner::UniformBuffer(_, _, _) => DescriptorType::UniformBuffer,
DescriptorWriteInner::StorageBuffer(_, _, _) => DescriptorType::StorageBuffer,
DescriptorWriteInner::DynamicUniformBuffer(_, _, _) =>
DescriptorType::UniformBufferDynamic,
DescriptorWriteInner::DynamicStorageBuffer(_, _, _) =>
DescriptorType::StorageBufferDynamic,
DescriptorWriteInner::InputAttachment(_, _) => DescriptorType::InputAttachment,
}
}
}
#[cfg(test)]
mod tests {
use descriptor::descriptor::DescriptorBufferDesc;
use descriptor::descriptor::DescriptorDesc;
use descriptor::descriptor::DescriptorDescTy;
use descriptor::descriptor::ShaderStages;
use descriptor::descriptor_set::DescriptorsCount;
use descriptor::descriptor_set::UnsafeDescriptorPool;
use descriptor::descriptor_set::UnsafeDescriptorSetLayout;
use std::iter;
#[test]
fn pool_create() {
let (device, _) = gfx_dev_and_queue!();
let desc = DescriptorsCount {
uniform_buffer: 1,
..DescriptorsCount::zero()
};
let _ = UnsafeDescriptorPool::new(device, &desc, 10, false).unwrap();
}
#[test]
fn zero_max_set() {
let (device, _) = gfx_dev_and_queue!();
let desc = DescriptorsCount {
uniform_buffer: 1,
..DescriptorsCount::zero()
};
assert_should_panic!("The maximum number of sets can't be 0", {
let _ = UnsafeDescriptorPool::new(device, &desc, 0, false);
});
}
#[test]
fn zero_descriptors() {
let (device, _) = gfx_dev_and_queue!();
assert_should_panic!("All the descriptors count of a pool are 0", {
let _ = UnsafeDescriptorPool::new(device, &DescriptorsCount::zero(), 10, false);
});
}
#[test]
fn basic_alloc() {
let (device, _) = gfx_dev_and_queue!();
let layout = DescriptorDesc {
ty: DescriptorDescTy::Buffer(DescriptorBufferDesc {
dynamic: Some(false),
storage: false,
}),
array_count: 1,
stages: ShaderStages::all_graphics(),
readonly: true,
};
let set_layout = UnsafeDescriptorSetLayout::new(device.clone(), iter::once(Some(layout)))
.unwrap();
let desc = DescriptorsCount {
uniform_buffer: 10,
..DescriptorsCount::zero()
};
let mut pool = UnsafeDescriptorPool::new(device, &desc, 10, false).unwrap();
unsafe {
let sets = pool.alloc(iter::once(&set_layout)).unwrap();
assert_eq!(sets.count(), 1);
}
}
#[test]
fn alloc_diff_device() {
let (device1, _) = gfx_dev_and_queue!();
let (device2, _) = gfx_dev_and_queue!();
let layout = DescriptorDesc {
ty: DescriptorDescTy::Buffer(DescriptorBufferDesc {
dynamic: Some(false),
storage: false,
}),
array_count: 1,
stages: ShaderStages::all_graphics(),
readonly: true,
};
let set_layout = UnsafeDescriptorSetLayout::new(device1, iter::once(Some(layout))).unwrap();
let desc = DescriptorsCount {
uniform_buffer: 10,
..DescriptorsCount::zero()
};
assert_should_panic!("Tried to allocate from a pool with a set layout \
of a different device",
{
let mut pool =
UnsafeDescriptorPool::new(device2, &desc, 10, false).unwrap();
unsafe {
let _ = pool.alloc(iter::once(&set_layout));
}
});
}
#[test]
fn alloc_zero() {
let (device, _) = gfx_dev_and_queue!();
let desc = DescriptorsCount {
uniform_buffer: 1,
..DescriptorsCount::zero()
};
let mut pool = UnsafeDescriptorPool::new(device, &desc, 1, false).unwrap();
unsafe {
let sets = pool.alloc(iter::empty()).unwrap();
assert_eq!(sets.count(), 0);
}
}
}