Skip to content

Commit

Permalink
Merge branch 'master' into suballocation
Browse files Browse the repository at this point in the history
  • Loading branch information
marc0246 committed Aug 28, 2023
2 parents 4fc7f76 + e1acf31 commit bd2869d
Show file tree
Hide file tree
Showing 7 changed files with 95 additions and 25 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -157,6 +157,7 @@ Changes to the `khr_display` extension:
- Added `MemoryMapInfo`, `MemoryUnmapInfo`, `MappingState` and `MappedMemoryRange`.
- Added `ShaderModule::single_entry_point()` which may replace `entry_point("main")` calls in common setups.
- Added `ShaderModule::single_entry_point_of_execution`.
- Added `GenericMemoryAllocatorCreateInfo::memory_type_bits` and `AllocationCreateInfo::memory_type_bits`.

### Bugs fixed

Expand Down
2 changes: 1 addition & 1 deletion vulkano/src/acceleration_structure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1056,7 +1056,7 @@ impl AccelerationStructureGeometryTrianglesData {
.filter(|&c| c != 0)
.min()
.unwrap() as u32;
let smallest_component_bytes = (smallest_component_bits + 7) & !7;
let smallest_component_bytes = ((smallest_component_bits + 7) & !7) / 8;

if vertex_stride % smallest_component_bytes != 0 {
return Err(Box::new(ValidationError {
Expand Down
8 changes: 3 additions & 5 deletions vulkano/src/buffer/subbuffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,11 +129,9 @@ impl<T: ?Sized> Subbuffer<T> {
/// [`MappingState::slice`]: crate::memory::MappingState::slice
pub fn mapped_slice(&self) -> Result<NonNull<[u8]>, HostAccessError> {
match self.buffer().memory() {
BufferMemory::Normal(a) => {
let opt = a.mapped_slice(self.range());

BufferMemory::Normal(allocation) => {
// SAFETY: `self.range()` is in bounds of the allocation.
unsafe { opt.unwrap_unchecked() }
unsafe { allocation.mapped_slice_unchecked(self.range()) }
}
BufferMemory::Sparse => unreachable!(),
}
Expand Down Expand Up @@ -510,7 +508,7 @@ impl<T> Subbuffer<[T]> {

#[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
pub unsafe fn slice_unchecked(mut self, range: impl RangeBounds<DeviceSize>) -> Subbuffer<[T]> {
let Range { start, end } = memory::range(range, ..self.len()).unwrap_unchecked();
let Range { start, end } = memory::range_unchecked(range, ..self.len());

self.offset += start * size_of::<T>() as DeviceSize;
self.size = (end - start) * size_of::<T>() as DeviceSize;
Expand Down
4 changes: 2 additions & 2 deletions vulkano/src/command_buffer/commands/acceleration_structure.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1050,7 +1050,7 @@ where
.filter(|&c| c != 0)
.min()
.unwrap() as u32;
let smallest_component_bytes = (smallest_component_bits + 7) & !7;
let smallest_component_bytes = ((smallest_component_bits + 7) & !7) / 8;

if vertex_data.device_address().unwrap().get() % smallest_component_bytes as u64
!= 0
Expand Down Expand Up @@ -1837,7 +1837,7 @@ where
.filter(|&c| c != 0)
.min()
.unwrap() as u32;
let smallest_component_bytes = (smallest_component_bits + 7) & !7;
let smallest_component_bytes = ((smallest_component_bits + 7) & !7) / 8;

if vertex_data.device_address().unwrap().get() % smallest_component_bytes as u64
!= 0
Expand Down
64 changes: 47 additions & 17 deletions vulkano/src/memory/allocator/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -612,6 +612,12 @@ pub struct AllocationCreateInfo {
/// The default value is [`MemoryTypeFilter::PREFER_DEVICE`].
pub memory_type_filter: MemoryTypeFilter,

/// Allows you to further constrain the possible choices of memory types, by only allowing the
/// memory type indices that have a corresponding bit at the same index set to 1.
///
/// The default value is [`u32::MAX`].
pub memory_type_bits: u32,

/// How eager the allocator should be to allocate [`DeviceMemory`].
///
/// The default value is [`MemoryAllocatePreference::Unknown`].
Expand All @@ -625,6 +631,7 @@ impl Default for AllocationCreateInfo {
fn default() -> Self {
AllocationCreateInfo {
memory_type_filter: MemoryTypeFilter::PREFER_DEVICE,
memory_type_bits: u32::MAX,
allocate_preference: MemoryAllocatePreference::Unknown,
_ne: crate::NonExhaustive(()),
}
Expand Down Expand Up @@ -734,13 +741,32 @@ pub type StandardMemoryAllocator = GenericMemoryAllocator<Arc<FreeListAllocator>
impl StandardMemoryAllocator {
/// Creates a new `StandardMemoryAllocator` with default configuration.
pub fn new_default(device: Arc<Device>) -> Self {
let memory_types = &device.physical_device().memory_properties().memory_types;

let mut memory_type_bits = u32::MAX;

for (index, MemoryType { property_flags, .. }) in memory_types.iter().enumerate() {
if property_flags.intersects(
MemoryPropertyFlags::LAZILY_ALLOCATED
| MemoryPropertyFlags::PROTECTED
| MemoryPropertyFlags::DEVICE_COHERENT
| MemoryPropertyFlags::RDMA_CAPABLE,
) {
// VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872
// VUID-vkAllocateMemory-deviceCoherentMemory-02790
// Lazily allocated memory would just cause problems for suballocation in general.
memory_type_bits &= !(1 << index);
}
}

#[allow(clippy::erasing_op, clippy::identity_op)]
let create_info = GenericMemoryAllocatorCreateInfo {
#[rustfmt::skip]
block_sizes: &[
(0 * B, 64 * M),
(1 * G, 256 * M),
],
memory_type_bits,
..Default::default()
};

Expand Down Expand Up @@ -859,6 +885,7 @@ impl<S: Suballocator> GenericMemoryAllocator<S> {
) -> Self {
let GenericMemoryAllocatorCreateInfo {
block_sizes,
memory_type_bits,
allocation_type,
dedicated_allocation,
export_handle_types,
Expand All @@ -872,6 +899,7 @@ impl<S: Suballocator> GenericMemoryAllocator<S> {
} = device.physical_device().memory_properties();

let mut pools = ArrayVec::new(memory_types.len(), [Self::EMPTY_POOL; MAX_MEMORY_TYPES]);

for (i, memory_type) in memory_types.iter().enumerate() {
pools[i].memory_type = ash::vk::MemoryType {
property_flags: memory_type.property_flags.into(),
Expand Down Expand Up @@ -913,22 +941,6 @@ impl<S: Suballocator> GenericMemoryAllocator<S> {
device_address &=
device.api_version() >= Version::V1_1 || device.enabled_extensions().khr_device_group;

let mut memory_type_bits = u32::MAX;
for (index, MemoryType { property_flags, .. }) in memory_types.iter().enumerate() {
if property_flags.intersects(
MemoryPropertyFlags::LAZILY_ALLOCATED
| MemoryPropertyFlags::PROTECTED
| MemoryPropertyFlags::DEVICE_COHERENT
| MemoryPropertyFlags::DEVICE_UNCACHED
| MemoryPropertyFlags::RDMA_CAPABLE,
) {
// VUID-VkMemoryAllocateInfo-memoryTypeIndex-01872
// VUID-vkAllocateMemory-deviceCoherentMemory-02790
// Lazily allocated memory would just cause problems for suballocation in general.
memory_type_bits &= !(1 << index);
}
}

let flags = if device_address {
MemoryAllocateFlags::DEVICE_ADDRESS
} else {
Expand Down Expand Up @@ -1236,8 +1248,12 @@ unsafe impl<S: Suballocator> MemoryAllocator for GenericMemoryAllocator<S> {
requires_dedicated_allocation,
} = requirements;

memory_type_bits &= self.memory_type_bits;
memory_type_bits &= create_info.memory_type_bits;

let AllocationCreateInfo {
memory_type_filter,
memory_type_bits: _,
allocate_preference,
_ne: _,
} = create_info;
Expand All @@ -1249,7 +1265,6 @@ unsafe impl<S: Suballocator> MemoryAllocator for GenericMemoryAllocator<S> {
};

let size = layout.size();
memory_type_bits &= self.memory_type_bits;

let mut memory_type_index = self
.find_memory_type_index(memory_type_bits, memory_type_filter)
Expand Down Expand Up @@ -1485,6 +1500,19 @@ pub struct GenericMemoryAllocatorCreateInfo<'b, 'e> {
/// The default value is `&[]`, which must be overridden.
pub block_sizes: &'b [(Threshold, BlockSize)],

/// Lets you configure the allocator's global mask of memory type indices. Only the memory type
/// indices that have a corresponding bit at the same index set will be allocated from when
/// calling [`allocate`], otherwise [`MemoryAllocatorError::FindMemoryType`] is returned.
///
/// You may use this to disallow problematic memory types, for instance ones with the
/// [`PROTECTED`] flag, or any other flags you don't want.
///
/// The default value is [`u32::MAX`].
///
/// [`allocate`]: struct.GenericMemoryAllocator.html#method.allocate
/// [`PROTECTED`]: MemoryPropertyFlags::DEVICE_COHERENT
pub memory_type_bits: u32,

/// The allocation type that should be used for root allocations.
///
/// You only need to worry about this if you're using [`PoolAllocator`] as the suballocator, as
Expand Down Expand Up @@ -1563,6 +1591,7 @@ impl GenericMemoryAllocatorCreateInfo<'_, '_> {
pub(crate) fn validate(&self, device: &Device) -> Result<(), Box<ValidationError>> {
let &Self {
block_sizes,
memory_type_bits: _,
allocation_type: _,
dedicated_allocation: _,
export_handle_types,
Expand Down Expand Up @@ -1621,6 +1650,7 @@ impl Default for GenericMemoryAllocatorCreateInfo<'_, '_> {
fn default() -> Self {
GenericMemoryAllocatorCreateInfo {
block_sizes: &[],
memory_type_bits: u32::MAX,
allocation_type: AllocationType::Unknown,
dedicated_allocation: true,
export_handle_types: &[],
Expand Down
17 changes: 17 additions & 0 deletions vulkano/src/memory/allocator/suballocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,23 @@ impl MemoryAlloc {
Some(res)
}

#[cfg_attr(not(feature = "document_unchecked"), doc(hidden))]
#[inline]
pub unsafe fn mapped_slice_unchecked(
&self,
range: impl RangeBounds<DeviceSize>,
) -> Result<NonNull<[u8]>, HostAccessError> {
let mut range = memory::range_unchecked(range, ..self.size());
range.start += self.offset();
range.end += self.offset();

if let Some(state) = self.device_memory().mapping_state() {
state.slice(range).ok_or(HostAccessError::OutOfMappedRange)
} else {
Err(HostAccessError::NotHostMapped)
}
}

pub(crate) fn atom_size(&self) -> Option<DeviceAlignment> {
self.atom_size
}
Expand Down
24 changes: 24 additions & 0 deletions vulkano/src/memory/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -590,3 +590,27 @@ pub(crate) fn range(

(start <= end && end <= len).then_some(Range { start, end })
}

/// Converts a `RangeBounds` into a `Range` without doing any bounds checking.
pub(crate) fn range_unchecked(
range: impl RangeBounds<DeviceSize>,
bounds: RangeTo<DeviceSize>,
) -> Range<DeviceSize> {
let len = bounds.end;

let start = match range.start_bound() {
Bound::Included(&start) => start,
Bound::Excluded(start) => start + 1,
Bound::Unbounded => 0,
};

let end = match range.end_bound() {
Bound::Included(end) => end + 1,
Bound::Excluded(&end) => end,
Bound::Unbounded => len,
};

debug_assert!(start <= end && end <= len);

Range { start, end }
}

0 comments on commit bd2869d

Please sign in to comment.