Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 4 additions & 23 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

10 changes: 7 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ getrandom = "0.3"
glam = "0.30"
glob = "0.3"
half = { version = "2.5", default-features = false } # We require 2.5 to have `Arbitrary` support.
hashbrown = { version = "0.15", default-features = false, features = [
hashbrown = { version = "0.15.2", default-features = false, features = [
"default-hasher",
"inline-more",
] }
Expand Down Expand Up @@ -203,15 +203,19 @@ objc = "0.2.5"
# Vulkan dependencies
android_system_properties = "0.1.1"
ash = "0.38"
gpu-alloc = "0.6"
gpu-descriptor = "0.3.2"

# DX12 dependencies
gpu-allocator = { version = "0.27", default-features = false }
range-alloc = "0.1"
mach-dxcompiler-rs = { version = "0.1.4", default-features = false } # remember to increase max_shader_model if applicable
windows-core = { version = "0.58", default-features = false }

# DX12 and Vulkan dependencies
# # TODO: https://github.com/Traverse-Research/gpu-allocator/issues/281 put back a version
gpu-allocator = { git = "https://github.com/Traverse-Research/gpu-allocator.git", rev = "673e4ecb503af4188e0ca576acd0dad681f22413", default-features = false, features = [
"hashbrown",
] }

# Gles dependencies
khronos-egl = "6"
glow = "0.16"
Expand Down
6 changes: 3 additions & 3 deletions wgpu-hal/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ vulkan = [
"dep:arrayvec",
"dep:ash",
"dep:bytemuck",
"dep:gpu-alloc",
"dep:gpu-descriptor",
"dep:hashbrown",
"dep:libc",
Expand All @@ -101,6 +100,7 @@ vulkan = [
"dep:profiling",
"dep:smallvec",
"dep:windows",
"gpu-allocator/vulkan",
"windows/Win32",
]
gles = [
Expand Down Expand Up @@ -228,9 +228,10 @@ glow = { workspace = true, optional = true }
########################

[target.'cfg(not(target_arch = "wasm32"))'.dependencies]
# Backend: Vulkan and Dx12
gpu-allocator = { workspace = true, optional = true }
# Backend: Vulkan
ash = { workspace = true, optional = true }
gpu-alloc = { workspace = true, optional = true }
gpu-descriptor = { workspace = true, optional = true }
smallvec = { workspace = true, optional = true, features = ["union"] }
# Backend: GLES
Expand All @@ -257,7 +258,6 @@ windows-core = { workspace = true, optional = true }
# Backend: Dx12
bit-set = { workspace = true, optional = true }
range-alloc = { workspace = true, optional = true }
gpu-allocator = { workspace = true, optional = true }
# backend: GLES
glutin_wgl_sys = { workspace = true, optional = true }

Expand Down
36 changes: 1 addition & 35 deletions wgpu-hal/src/dx12/suballocation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ impl Allocator {
allocations,
blocks,
total_allocated_bytes: upstream.total_allocated_bytes,
total_reserved_bytes: upstream.total_reserved_bytes,
total_reserved_bytes: upstream.total_capacity_bytes,
}
}
}
Expand Down Expand Up @@ -621,37 +621,3 @@ impl<'a> DeviceAllocationContext<'a> {
Ok(allocation_info)
}
}

impl From<gpu_allocator::AllocationError> for crate::DeviceError {
fn from(result: gpu_allocator::AllocationError) -> Self {
match result {
gpu_allocator::AllocationError::OutOfMemory => Self::OutOfMemory,
gpu_allocator::AllocationError::FailedToMap(e) => {
log::error!("DX12 gpu-allocator: Failed to map: {e}");
Self::Lost
}
gpu_allocator::AllocationError::NoCompatibleMemoryTypeFound => {
log::error!("DX12 gpu-allocator: No Compatible Memory Type Found");
Self::Lost
}
gpu_allocator::AllocationError::InvalidAllocationCreateDesc => {
log::error!("DX12 gpu-allocator: Invalid Allocation Creation Description");
Self::Lost
}
gpu_allocator::AllocationError::InvalidAllocatorCreateDesc(e) => {
log::error!("DX12 gpu-allocator: Invalid Allocator Creation Description: {e}");
Self::Lost
}

gpu_allocator::AllocationError::Internal(e) => {
log::error!("DX12 gpu-allocator: Internal Error: {e}");
Self::Lost
}
gpu_allocator::AllocationError::BarrierLayoutNeedsDevice10
| gpu_allocator::AllocationError::CastableFormatsRequiresEnhancedBarriers
| gpu_allocator::AllocationError::CastableFormatsRequiresAtLeastDevice12 => {
unreachable!()
}
}
}
}
35 changes: 35 additions & 0 deletions wgpu-hal/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,6 +381,41 @@ pub enum DeviceError {
Unexpected,
}

#[cfg(any(dx12, vulkan))]
impl From<gpu_allocator::AllocationError> for DeviceError {
fn from(result: gpu_allocator::AllocationError) -> Self {
match result {
gpu_allocator::AllocationError::OutOfMemory => Self::OutOfMemory,
gpu_allocator::AllocationError::FailedToMap(e) => {
log::error!("DX12 gpu-allocator: Failed to map: {e}");
Self::Lost
}
gpu_allocator::AllocationError::NoCompatibleMemoryTypeFound => {
log::error!("DX12 gpu-allocator: No Compatible Memory Type Found");
Self::Lost
}
gpu_allocator::AllocationError::InvalidAllocationCreateDesc => {
log::error!("DX12 gpu-allocator: Invalid Allocation Creation Description");
Self::Lost
}
gpu_allocator::AllocationError::InvalidAllocatorCreateDesc(e) => {
log::error!("DX12 gpu-allocator: Invalid Allocator Creation Description: {e}");
Self::Lost
}

gpu_allocator::AllocationError::Internal(e) => {
log::error!("DX12 gpu-allocator: Internal Error: {e}");
Self::Lost
}
gpu_allocator::AllocationError::BarrierLayoutNeedsDevice10
| gpu_allocator::AllocationError::CastableFormatsRequiresEnhancedBarriers
| gpu_allocator::AllocationError::CastableFormatsRequiresAtLeastDevice12 => {
unreachable!()
}
}
}
}

#[allow(dead_code)] // may be unused on some platforms
#[cold]
fn hal_usage_error<T: fmt::Display>(txt: T) -> ! {
Expand Down
115 changes: 35 additions & 80 deletions wgpu-hal/src/vulkan/adapter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2235,87 +2235,42 @@ impl super::Adapter {
signal_semaphores: Default::default(),
};

let mem_allocator = {
let limits = self.phd_capabilities.properties.limits;

// Note: the parameters here are not set in stone nor where they picked with
// strong confidence.
// `final_free_list_chunk` should be bigger than starting_free_list_chunk if
// we want the behavior of starting with smaller block sizes and using larger
// ones only after we observe that the small ones aren't enough, which I think
// is a good "I don't know what the workload is going to be like" approach.
//
// For reference, `VMA`, and `gpu_allocator` both start with 256 MB blocks
// (then VMA doubles the block size each time it needs a new block).
// At some point it would be good to experiment with real workloads
//
// TODO(#5925): The plan is to switch the Vulkan backend from `gpu_alloc` to
// `gpu_allocator` which has a different (simpler) set of configuration options.
//
// TODO: These parameters should take hardware capabilities into account.
let mb = 1024 * 1024;
let perf_cfg = gpu_alloc::Config {
starting_free_list_chunk: 128 * mb,
final_free_list_chunk: 512 * mb,
minimal_buddy_size: 1,
initial_buddy_dedicated_size: 8 * mb,
dedicated_threshold: 32 * mb,
preferred_dedicated_threshold: mb,
transient_dedicated_threshold: 128 * mb,
};
let mem_usage_cfg = gpu_alloc::Config {
starting_free_list_chunk: 8 * mb,
final_free_list_chunk: 64 * mb,
minimal_buddy_size: 1,
initial_buddy_dedicated_size: 8 * mb,
dedicated_threshold: 8 * mb,
preferred_dedicated_threshold: mb,
transient_dedicated_threshold: 16 * mb,
};
let config = match memory_hints {
wgt::MemoryHints::Performance => perf_cfg,
wgt::MemoryHints::MemoryUsage => mem_usage_cfg,
wgt::MemoryHints::Manual {
suballocated_device_memory_block_size,
} => gpu_alloc::Config {
starting_free_list_chunk: suballocated_device_memory_block_size.start,
final_free_list_chunk: suballocated_device_memory_block_size.end,
initial_buddy_dedicated_size: suballocated_device_memory_block_size.start,
..perf_cfg
},
};

let max_memory_allocation_size =
if let Some(maintenance_3) = self.phd_capabilities.maintenance_3 {
maintenance_3.max_memory_allocation_size
} else {
u64::MAX
};
let properties = gpu_alloc::DeviceProperties {
max_memory_allocation_count: limits.max_memory_allocation_count,
max_memory_allocation_size,
non_coherent_atom_size: limits.non_coherent_atom_size,
memory_types: memory_types
.iter()
.map(|memory_type| gpu_alloc::MemoryType {
props: gpu_alloc::MemoryPropertyFlags::from_bits_truncate(
memory_type.property_flags.as_raw() as u8,
),
heap: memory_type.heap_index,
})
.collect(),
memory_heaps: mem_properties
.memory_heaps_as_slice()
.iter()
.map(|&memory_heap| gpu_alloc::MemoryHeap {
size: memory_heap.size,
})
.collect(),
buffer_device_address: enabled_extensions
.contains(&khr::buffer_device_address::NAME),
};
gpu_alloc::GpuAllocator::new(config, properties)
// TODO: the allocator's configuration should take hardware capability into
// account.
const MB: u64 = 1024 * 1024;
let allocation_sizes = match memory_hints {
wgt::MemoryHints::Performance => gpu_allocator::AllocationSizes::new(128 * MB, 64 * MB)
.with_max_device_memblock_size(256 * MB)
.with_max_host_memblock_size(128 * MB),
wgt::MemoryHints::MemoryUsage => gpu_allocator::AllocationSizes::new(8 * MB, 4 * MB)
.with_max_device_memblock_size(64 * MB)
.with_max_host_memblock_size(32 * MB),
wgt::MemoryHints::Manual {
suballocated_device_memory_block_size,
} => {
// TODO: Would it be useful to expose the host size in memory hints
// instead of always using half of the device size?
let device_size = suballocated_device_memory_block_size;
let host_size = device_size.start / 2..device_size.end / 2;

gpu_allocator::AllocationSizes::new(device_size.start, host_size.start)
.with_max_device_memblock_size(device_size.end)
.with_max_host_memblock_size(host_size.end)
}
};

let buffer_device_address = enabled_extensions.contains(&khr::buffer_device_address::NAME);

let mem_allocator =
gpu_allocator::vulkan::Allocator::new(&gpu_allocator::vulkan::AllocatorCreateDesc {
instance: self.instance.raw.clone(),
device: shared.raw.clone(),
physical_device: self.raw,
debug_settings: Default::default(),
buffer_device_address,
allocation_sizes,
})?;

let desc_allocator = gpu_descriptor::DescriptorAllocator::new(
if let Some(di) = self.phd_capabilities.descriptor_indexing {
di.max_update_after_bind_descriptors_in_all_pools
Expand Down
Loading
Loading