Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor some descriptor set/allocation things #2357

Merged
merged 3 commits into from
Oct 13, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Pool allocate validation
  • Loading branch information
Rua committed Oct 10, 2023
commit 091f3aa99f6c0978dd4034d1747860a217cd8364
140 changes: 69 additions & 71 deletions vulkano/src/descriptor_set/allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ use super::{
sys::UnsafeDescriptorSet,
};
use crate::{
descriptor_set::layout::{DescriptorSetLayoutCreateFlags, DescriptorType},
descriptor_set::layout::DescriptorType,
device::{Device, DeviceOwned},
instance::InstanceOwnedDebugWrapper,
Validated, VulkanError,
Expand Down Expand Up @@ -60,7 +60,7 @@ pub unsafe trait DescriptorSetAllocator: DeviceOwned {
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<Self::Alloc, VulkanError>;
) -> Result<Self::Alloc, Validated<VulkanError>>;
}

/// An allocated descriptor set.
Expand All @@ -70,6 +70,9 @@ pub trait DescriptorSetAlloc: Send + Sync {

/// Returns the inner unsafe descriptor set object.
fn inner_mut(&mut self) -> &mut UnsafeDescriptorSet;

/// Returns the descriptor pool that the descriptor set was allocated from.
fn pool(&self) -> &DescriptorPool;
}

/// Standard implementation of a descriptor set allocator.
Expand Down Expand Up @@ -142,38 +145,15 @@ unsafe impl DescriptorSetAllocator for StandardDescriptorSetAllocator {
type Alloc = StandardDescriptorSetAlloc;

/// Allocates a descriptor set.
///
/// # Panics
///
/// - Panics if the provided `layout` is for push descriptors rather than regular descriptor
/// sets.
/// - Panics if the provided `variable_descriptor_count` is greater than the maximum number of
/// variable count descriptors in the set.
#[inline]
fn allocate(
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<StandardDescriptorSetAlloc, VulkanError> {
assert!(
!layout
.flags()
.intersects(DescriptorSetLayoutCreateFlags::PUSH_DESCRIPTOR),
"the provided descriptor set layout is for push descriptors, and cannot be used to \
build a descriptor set object",
);

) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
let max_count = layout.variable_descriptor_count();

assert!(
variable_descriptor_count <= max_count,
"the provided variable_descriptor_count ({}) is greater than the maximum number of \
variable count descriptors in the set ({})",
variable_descriptor_count,
max_count,
);

let pools = self.pools.get_or(Default::default);

let entry = unsafe { &mut *pools.get() }.get_or_try_insert(layout.id(), || {
if max_count == 0 {
FixedEntry::new(layout.clone()).map(Entry::Fixed)
Expand All @@ -197,7 +177,7 @@ unsafe impl<T: DescriptorSetAllocator> DescriptorSetAllocator for Arc<T> {
&self,
layout: &Arc<DescriptorSetLayout>,
variable_descriptor_count: u32,
) -> Result<Self::Alloc, VulkanError> {
) -> Result<Self::Alloc, Validated<VulkanError>> {
(**self).allocate(layout, variable_descriptor_count)
}
}
Expand All @@ -221,15 +201,15 @@ struct FixedEntry {
}

impl FixedEntry {
fn new(layout: Arc<DescriptorSetLayout>) -> Result<Self, VulkanError> {
fn new(layout: Arc<DescriptorSetLayout>) -> Result<Self, Validated<VulkanError>> {
Ok(FixedEntry {
pool: FixedPool::new(&layout, MAX_SETS)?,
set_count: MAX_SETS,
layout,
})
}

fn allocate(&mut self) -> Result<StandardDescriptorSetAlloc, VulkanError> {
fn allocate(&mut self) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
let inner = if let Some(inner) = self.pool.reserve.pop() {
inner
} else {
Expand All @@ -250,14 +230,17 @@ impl FixedEntry {
struct FixedPool {
// The actual Vulkan descriptor pool. This field isn't actually used anywhere, but we need to
// keep the pool alive in order to keep the descriptor sets valid.
_inner: DescriptorPool,
inner: DescriptorPool,
// List of descriptor sets. When `alloc` is called, a descriptor will be extracted from this
// list. When a `SingleLayoutPoolAlloc` is dropped, its descriptor set is put back in this list.
reserve: ArrayQueue<UnsafeDescriptorSet>,
}

impl FixedPool {
fn new(layout: &Arc<DescriptorSetLayout>, set_count: usize) -> Result<Arc<Self>, VulkanError> {
fn new(
layout: &Arc<DescriptorSetLayout>,
set_count: usize,
) -> Result<Arc<Self>, Validated<VulkanError>> {
let inner = DescriptorPool::new(
layout.device().clone(),
DescriptorPoolCreateInfo {
Expand All @@ -275,28 +258,28 @@ impl FixedPool {
)
.map_err(Validated::unwrap)?;

let allocate_infos = (0..set_count).map(|_| DescriptorSetAllocateInfo {
layout,
variable_descriptor_count: 0,
});
let allocate_infos = (0..set_count).map(|_| DescriptorSetAllocateInfo::new(layout));

let allocs = unsafe {
inner
.allocate_descriptor_sets(allocate_infos)
.map_err(|err| match err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
// This can't happen as we don't free individual sets.
unreachable!();
}
VulkanError::OutOfPoolMemory => {
// We created the pool with an exact size.
unreachable!();
}
_ => {
// Shouldn't ever be returned.
unreachable!();
}
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
// This can't happen as we don't free individual sets.
unreachable!();
}
VulkanError::OutOfPoolMemory => {
// We created the pool with an exact size.
unreachable!();
}
_ => {
// Shouldn't ever be returned.
unreachable!();
}
},
})?
};

Expand All @@ -305,10 +288,7 @@ impl FixedPool {
let _ = reserve.push(alloc);
}

Ok(Arc::new(FixedPool {
_inner: inner,
reserve,
}))
Ok(Arc::new(FixedPool { inner, reserve }))
}
}

Expand All @@ -326,7 +306,7 @@ struct VariableEntry {
}

impl VariableEntry {
fn new(layout: Arc<DescriptorSetLayout>) -> Result<Self, VulkanError> {
fn new(layout: Arc<DescriptorSetLayout>) -> Result<Self, Validated<VulkanError>> {
let reserve = Arc::new(ArrayQueue::new(MAX_POOLS));

Ok(VariableEntry {
Expand All @@ -340,7 +320,7 @@ impl VariableEntry {
fn allocate(
&mut self,
variable_descriptor_count: u32,
) -> Result<StandardDescriptorSetAlloc, VulkanError> {
) -> Result<StandardDescriptorSetAlloc, Validated<VulkanError>> {
if self.allocations >= MAX_SETS {
self.pool = if let Some(inner) = self.reserve.pop() {
Arc::new(VariablePool {
Expand All @@ -354,28 +334,31 @@ impl VariableEntry {
}

let allocate_info = DescriptorSetAllocateInfo {
layout: &self.layout,
variable_descriptor_count,
..DescriptorSetAllocateInfo::new(&self.layout)
};

let mut sets = unsafe {
self.pool
.inner
.allocate_descriptor_sets([allocate_info])
.map_err(|err| match err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
// This can't happen as we don't free individual sets.
unreachable!();
}
VulkanError::OutOfPoolMemory => {
// We created the pool to fit the maximum variable descriptor count.
unreachable!();
}
_ => {
// Shouldn't ever be returned.
unreachable!();
}
Validated::ValidationError(_) => err,
Validated::Error(vk_err) => match vk_err {
VulkanError::OutOfHostMemory | VulkanError::OutOfDeviceMemory => err,
VulkanError::FragmentedPool => {
// This can't happen as we don't free individual sets.
unreachable!();
}
VulkanError::OutOfPoolMemory => {
// We created the pool to fit the maximum variable descriptor count.
unreachable!();
}
_ => {
// Shouldn't ever be returned.
unreachable!();
}
},
})?
};
self.allocations += 1;
Expand Down Expand Up @@ -457,6 +440,16 @@ enum AllocParent {
Variable(Arc<VariablePool>),
}

impl AllocParent {
#[inline]
fn pool(&self) -> &DescriptorPool {
match self {
Self::Fixed(pool) => &pool.inner,
Self::Variable(pool) => &pool.inner,
}
}
}

// This is needed because of the blanket impl of `Send` on `Arc<T>`, which requires that `T` is
// `Send + Sync`. `FixedPool` and `VariablePool` are `Send + !Sync` because `DescriptorPool` is
// `!Sync`. That's fine however because we never access the `DescriptorPool` concurrently.
Expand All @@ -473,6 +466,11 @@ impl DescriptorSetAlloc for StandardDescriptorSetAlloc {
fn inner_mut(&mut self) -> &mut UnsafeDescriptorSet {
&mut self.inner
}

#[inline]
fn pool(&self) -> &DescriptorPool {
self.parent.pool()
}
}

impl Drop for StandardDescriptorSetAlloc {
Expand Down Expand Up @@ -567,15 +565,15 @@ mod tests {

let pool1 =
if let AllocParent::Fixed(pool) = &allocator.allocate(&layout, 0).unwrap().parent {
pool._inner.handle()
pool.inner.handle()
} else {
unreachable!()
};

thread::spawn(move || {
let pool2 =
if let AllocParent::Fixed(pool) = &allocator.allocate(&layout, 0).unwrap().parent {
pool._inner.handle()
pool.inner.handle()
} else {
unreachable!()
};
Expand Down
Loading