1use super::{conv, RawTlasInstance};
2
3use arrayvec::ArrayVec;
4use ash::{khr, vk};
5use parking_lot::Mutex;
6
7use crate::TlasInstance;
8use std::{
9 borrow::Cow,
10 collections::{hash_map::Entry, BTreeMap},
11 ffi::{CStr, CString},
12 mem::{self, size_of, MaybeUninit},
13 num::NonZeroU32,
14 ptr, slice,
15 sync::Arc,
16};
17
18impl super::DeviceShared {
19 pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
27 let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
28 return;
29 };
30
31 let mut buffer: [u8; 64] = [0u8; 64];
34 let buffer_vec: Vec<u8>;
35
36 let name_bytes = if name.len() < buffer.len() {
38 buffer[..name.len()].copy_from_slice(name.as_bytes());
40 buffer[name.len()] = 0;
42 &buffer[..name.len() + 1]
43 } else {
44 buffer_vec = name
47 .as_bytes()
48 .iter()
49 .cloned()
50 .chain(std::iter::once(0))
51 .collect();
52 &buffer_vec
53 };
54
55 let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
56
57 let _result = unsafe {
58 extension.set_debug_utils_object_name(
59 &vk::DebugUtilsObjectNameInfoEXT::default()
60 .object_handle(object)
61 .object_name(name),
62 )
63 };
64 }
65
66 pub fn make_render_pass(
67 &self,
68 key: super::RenderPassKey,
69 ) -> Result<vk::RenderPass, crate::DeviceError> {
70 Ok(match self.render_passes.lock().entry(key) {
71 Entry::Occupied(e) => *e.get(),
72 Entry::Vacant(e) => {
73 let mut vk_attachments = Vec::new();
74 let mut color_refs = Vec::with_capacity(e.key().colors.len());
75 let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
76 let mut ds_ref = None;
77 let samples = vk::SampleCountFlags::from_raw(e.key().sample_count);
78 let unused = vk::AttachmentReference {
79 attachment: vk::ATTACHMENT_UNUSED,
80 layout: vk::ImageLayout::UNDEFINED,
81 };
82 for cat in e.key().colors.iter() {
83 let (color_ref, resolve_ref) = if let Some(cat) = cat.as_ref() {
84 let color_ref = vk::AttachmentReference {
85 attachment: vk_attachments.len() as u32,
86 layout: cat.base.layout,
87 };
88 vk_attachments.push({
89 let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
90 vk::AttachmentDescription::default()
91 .format(cat.base.format)
92 .samples(samples)
93 .load_op(load_op)
94 .store_op(store_op)
95 .initial_layout(cat.base.layout)
96 .final_layout(cat.base.layout)
97 });
98 let resolve_ref = if let Some(ref rat) = cat.resolve {
99 let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
100 let vk_attachment = vk::AttachmentDescription::default()
101 .format(rat.format)
102 .samples(vk::SampleCountFlags::TYPE_1)
103 .load_op(load_op)
104 .store_op(store_op)
105 .initial_layout(rat.layout)
106 .final_layout(rat.layout);
107 vk_attachments.push(vk_attachment);
108
109 vk::AttachmentReference {
110 attachment: vk_attachments.len() as u32 - 1,
111 layout: rat.layout,
112 }
113 } else {
114 unused
115 };
116
117 (color_ref, resolve_ref)
118 } else {
119 (unused, unused)
120 };
121
122 color_refs.push(color_ref);
123 resolve_refs.push(resolve_ref);
124 }
125
126 if let Some(ref ds) = e.key().depth_stencil {
127 ds_ref = Some(vk::AttachmentReference {
128 attachment: vk_attachments.len() as u32,
129 layout: ds.base.layout,
130 });
131 let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
132 let (stencil_load_op, stencil_store_op) =
133 conv::map_attachment_ops(ds.stencil_ops);
134 let vk_attachment = vk::AttachmentDescription::default()
135 .format(ds.base.format)
136 .samples(samples)
137 .load_op(load_op)
138 .store_op(store_op)
139 .stencil_load_op(stencil_load_op)
140 .stencil_store_op(stencil_store_op)
141 .initial_layout(ds.base.layout)
142 .final_layout(ds.base.layout);
143 vk_attachments.push(vk_attachment);
144 }
145
146 let vk_subpasses = [{
147 let mut vk_subpass = vk::SubpassDescription::default()
148 .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
149 .color_attachments(&color_refs)
150 .resolve_attachments(&resolve_refs);
151
152 if self
153 .workarounds
154 .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
155 && resolve_refs.is_empty()
156 {
157 vk_subpass.p_resolve_attachments = ptr::null();
158 }
159
160 if let Some(ref reference) = ds_ref {
161 vk_subpass = vk_subpass.depth_stencil_attachment(reference)
162 }
163 vk_subpass
164 }];
165
166 let mut vk_info = vk::RenderPassCreateInfo::default()
167 .attachments(&vk_attachments)
168 .subpasses(&vk_subpasses);
169
170 let mut multiview_info;
171 let mask;
172 if let Some(multiview) = e.key().multiview {
173 assert!(multiview.get() <= 8);
175 assert!(multiview.get() > 1);
176
177 mask = [(1 << multiview.get()) - 1];
181
182 multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
184 .view_masks(&mask)
185 .correlation_masks(&mask);
186 vk_info = vk_info.push_next(&mut multiview_info);
187 }
188
189 let raw = unsafe {
190 self.raw
191 .create_render_pass(&vk_info, None)
192 .map_err(super::map_host_device_oom_err)?
193 };
194
195 *e.insert(raw)
196 }
197 })
198 }
199
200 pub fn make_framebuffer(
201 &self,
202 key: super::FramebufferKey,
203 raw_pass: vk::RenderPass,
204 pass_label: crate::Label,
205 ) -> Result<vk::Framebuffer, crate::DeviceError> {
206 Ok(match self.framebuffers.lock().entry(key) {
207 Entry::Occupied(e) => *e.get(),
208 Entry::Vacant(e) => {
209 let vk_views = e
210 .key()
211 .attachments
212 .iter()
213 .map(|at| at.raw)
214 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
215 let vk_view_formats = e
216 .key()
217 .attachments
218 .iter()
219 .map(|at| self.private_caps.map_texture_format(at.view_format))
220 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
221 let vk_view_formats_list = e
222 .key()
223 .attachments
224 .iter()
225 .map(|at| at.raw_view_formats.clone())
226 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
227
228 let vk_image_infos = e
229 .key()
230 .attachments
231 .iter()
232 .enumerate()
233 .map(|(i, at)| {
234 let mut info = vk::FramebufferAttachmentImageInfo::default()
235 .usage(conv::map_texture_usage(at.view_usage))
236 .flags(at.raw_image_flags)
237 .width(e.key().extent.width)
238 .height(e.key().extent.height)
239 .layer_count(e.key().extent.depth_or_array_layers);
240 if vk_view_formats_list[i].is_empty() {
242 info = info.view_formats(&vk_view_formats[i..i + 1]);
243 } else {
244 info = info.view_formats(&vk_view_formats_list[i]);
245 };
246 info
247 })
248 .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
249
250 let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::default()
251 .attachment_image_infos(&vk_image_infos);
252 let mut vk_info = vk::FramebufferCreateInfo::default()
253 .render_pass(raw_pass)
254 .width(e.key().extent.width)
255 .height(e.key().extent.height)
256 .layers(e.key().extent.depth_or_array_layers);
257
258 if self.private_caps.imageless_framebuffers {
259 vk_info = vk_info
261 .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
262 .push_next(&mut vk_attachment_info);
263 vk_info.attachment_count = e.key().attachments.len() as u32;
264 } else {
265 vk_info = vk_info.attachments(&vk_views);
266 }
267
268 *e.insert(unsafe {
269 let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
270 if let Some(label) = pass_label {
271 self.set_object_name(raw, label);
272 }
273 raw
274 })
275 }
276 })
277 }
278
279 fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
280 &self,
281 buffer: &'a super::Buffer,
282 ranges: I,
283 ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange>> {
284 let block = buffer.block.as_ref()?.lock();
285 let mask = self.private_caps.non_coherent_map_mask;
286 Some(ranges.map(move |range| {
287 vk::MappedMemoryRange::default()
288 .memory(*block.memory())
289 .offset((block.offset() + range.start) & !mask)
290 .size((range.end - range.start + mask) & !mask)
291 }))
292 }
293}
294
295impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
296 unsafe fn allocate_memory(
297 &self,
298 size: u64,
299 memory_type: u32,
300 flags: gpu_alloc::AllocationFlags,
301 ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
302 let mut info = vk::MemoryAllocateInfo::default()
303 .allocation_size(size)
304 .memory_type_index(memory_type);
305
306 let mut info_flags;
307
308 if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
309 info_flags = vk::MemoryAllocateFlagsInfo::default()
310 .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
311 info = info.push_next(&mut info_flags);
312 }
313
314 match unsafe { self.raw.allocate_memory(&info, None) } {
315 Ok(memory) => {
316 self.memory_allocations_counter.add(1);
317 Ok(memory)
318 }
319 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
320 Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
321 }
322 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
323 Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
324 }
325 Err(err) => handle_unexpected(err),
330 }
331 }
332
333 unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
334 self.memory_allocations_counter.sub(1);
335
336 unsafe { self.raw.free_memory(memory, None) };
337 }
338
339 unsafe fn map_memory(
340 &self,
341 memory: &mut vk::DeviceMemory,
342 offset: u64,
343 size: u64,
344 ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
345 match unsafe {
346 self.raw
347 .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
348 } {
349 Ok(ptr) => Ok(ptr::NonNull::new(ptr.cast::<u8>())
350 .expect("Pointer to memory mapping must not be null")),
351 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
352 Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
353 }
354 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
355 Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
356 }
357 Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
358 Err(err) => handle_unexpected(err),
359 }
360 }
361
362 unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
363 unsafe { self.raw.unmap_memory(*memory) };
364 }
365
366 unsafe fn invalidate_memory_ranges(
367 &self,
368 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
369 ) -> Result<(), gpu_alloc::OutOfMemory> {
370 unimplemented!()
372 }
373
374 unsafe fn flush_memory_ranges(
375 &self,
376 _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
377 ) -> Result<(), gpu_alloc::OutOfMemory> {
378 unimplemented!()
380 }
381}
382
383impl
384 gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
385 for super::DeviceShared
386{
387 unsafe fn create_descriptor_pool(
388 &self,
389 descriptor_count: &gpu_descriptor::DescriptorTotalCount,
390 max_sets: u32,
391 flags: gpu_descriptor::DescriptorPoolCreateFlags,
392 ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
393 let unfiltered_counts = [
395 (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
396 (
397 vk::DescriptorType::SAMPLED_IMAGE,
398 descriptor_count.sampled_image,
399 ),
400 (
401 vk::DescriptorType::STORAGE_IMAGE,
402 descriptor_count.storage_image,
403 ),
404 (
405 vk::DescriptorType::UNIFORM_BUFFER,
406 descriptor_count.uniform_buffer,
407 ),
408 (
409 vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
410 descriptor_count.uniform_buffer_dynamic,
411 ),
412 (
413 vk::DescriptorType::STORAGE_BUFFER,
414 descriptor_count.storage_buffer,
415 ),
416 (
417 vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
418 descriptor_count.storage_buffer_dynamic,
419 ),
420 (
421 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR,
422 descriptor_count.acceleration_structure,
423 ),
424 ];
425
426 let filtered_counts = unfiltered_counts
427 .iter()
428 .cloned()
429 .filter(|&(_, count)| count != 0)
430 .map(|(ty, count)| vk::DescriptorPoolSize {
431 ty,
432 descriptor_count: count,
433 })
434 .collect::<ArrayVec<_, 8>>();
435
436 let mut vk_flags =
437 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
438 vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
439 } else {
440 vk::DescriptorPoolCreateFlags::empty()
441 };
442 if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
443 vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
444 }
445 let vk_info = vk::DescriptorPoolCreateInfo::default()
446 .max_sets(max_sets)
447 .flags(vk_flags)
448 .pool_sizes(&filtered_counts);
449
450 match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
451 Ok(pool) => Ok(pool),
452 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
453 Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
454 }
455 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
456 Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
457 }
458 Err(vk::Result::ERROR_FRAGMENTATION) => {
459 Err(gpu_descriptor::CreatePoolError::Fragmentation)
460 }
461 Err(err) => handle_unexpected(err),
462 }
463 }
464
465 unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
466 unsafe { self.raw.destroy_descriptor_pool(pool, None) }
467 }
468
469 unsafe fn alloc_descriptor_sets<'a>(
470 &self,
471 pool: &mut vk::DescriptorPool,
472 layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
473 sets: &mut impl Extend<vk::DescriptorSet>,
474 ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
475 let result = unsafe {
476 self.raw.allocate_descriptor_sets(
477 &vk::DescriptorSetAllocateInfo::default()
478 .descriptor_pool(*pool)
479 .set_layouts(
480 &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
481 layouts.cloned(),
482 ),
483 ),
484 )
485 };
486
487 match result {
488 Ok(vk_sets) => {
489 sets.extend(vk_sets);
490 Ok(())
491 }
492 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
493 | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
494 Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
495 }
496 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
497 Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
498 }
499 Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
500 Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
501 }
502 Err(err) => handle_unexpected(err),
503 }
504 }
505
506 unsafe fn dealloc_descriptor_sets<'a>(
507 &self,
508 pool: &mut vk::DescriptorPool,
509 sets: impl Iterator<Item = vk::DescriptorSet>,
510 ) {
511 let result = unsafe {
512 self.raw.free_descriptor_sets(
513 *pool,
514 &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
515 )
516 };
517 match result {
518 Ok(()) => {}
519 Err(err) => handle_unexpected(err),
520 }
521 }
522}
523
524struct CompiledStage {
525 create_info: vk::PipelineShaderStageCreateInfo<'static>,
526 _entry_point: CString,
527 temp_raw_module: Option<vk::ShaderModule>,
528}
529
530impl super::Device {
531 pub(super) unsafe fn create_swapchain(
532 &self,
533 surface: &super::Surface,
534 config: &crate::SurfaceConfiguration,
535 provided_old_swapchain: Option<super::Swapchain>,
536 ) -> Result<super::Swapchain, crate::SurfaceError> {
537 profiling::scope!("Device::create_swapchain");
538 let functor = khr::swapchain::Device::new(&surface.instance.raw, &self.shared.raw);
539
540 let old_swapchain = match provided_old_swapchain {
541 Some(osc) => osc.raw,
542 None => vk::SwapchainKHR::null(),
543 };
544
545 let color_space = if config.format == wgt::TextureFormat::Rgba16Float {
546 vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT
549 } else {
550 vk::ColorSpaceKHR::SRGB_NONLINEAR
551 };
552
553 let original_format = self.shared.private_caps.map_texture_format(config.format);
554 let mut raw_flags = vk::SwapchainCreateFlagsKHR::empty();
555 let mut raw_view_formats: Vec<vk::Format> = vec![];
556 let mut wgt_view_formats = vec![];
557 if !config.view_formats.is_empty() {
558 raw_flags |= vk::SwapchainCreateFlagsKHR::MUTABLE_FORMAT;
559 raw_view_formats = config
560 .view_formats
561 .iter()
562 .map(|f| self.shared.private_caps.map_texture_format(*f))
563 .collect();
564 raw_view_formats.push(original_format);
565
566 wgt_view_formats.clone_from(&config.view_formats);
567 wgt_view_formats.push(config.format);
568 }
569
570 let mut info = vk::SwapchainCreateInfoKHR::default()
571 .flags(raw_flags)
572 .surface(surface.raw)
573 .min_image_count(config.maximum_frame_latency + 1) .image_format(original_format)
575 .image_color_space(color_space)
576 .image_extent(vk::Extent2D {
577 width: config.extent.width,
578 height: config.extent.height,
579 })
580 .image_array_layers(config.extent.depth_or_array_layers)
581 .image_usage(conv::map_texture_usage(config.usage))
582 .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
583 .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
584 .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
585 .present_mode(conv::map_present_mode(config.present_mode))
586 .clipped(true)
587 .old_swapchain(old_swapchain);
588
589 let mut format_list_info = vk::ImageFormatListCreateInfo::default();
590 if !raw_view_formats.is_empty() {
591 format_list_info = format_list_info.view_formats(&raw_view_formats);
592 info = info.push_next(&mut format_list_info);
593 }
594
595 let result = {
596 profiling::scope!("vkCreateSwapchainKHR");
597 unsafe { functor.create_swapchain(&info, None) }
598 };
599
600 if old_swapchain != vk::SwapchainKHR::null() {
602 unsafe { functor.destroy_swapchain(old_swapchain, None) }
603 }
604
605 let raw = match result {
606 Ok(swapchain) => swapchain,
607 Err(error) => {
608 return Err(match error {
609 vk::Result::ERROR_SURFACE_LOST_KHR
610 | vk::Result::ERROR_INITIALIZATION_FAILED => crate::SurfaceError::Lost,
611 vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
612 crate::SurfaceError::Other("Native window is in use")
613 }
614 other => super::map_host_device_oom_and_lost_err(other).into(),
617 });
618 }
619 };
620
621 let images =
622 unsafe { functor.get_swapchain_images(raw) }.map_err(super::map_host_device_oom_err)?;
623
624 let surface_semaphores = (0..=images.len())
628 .map(|_| {
629 super::SwapchainImageSemaphores::new(&self.shared)
630 .map(Mutex::new)
631 .map(Arc::new)
632 })
633 .collect::<Result<Vec<_>, _>>()?;
634
635 Ok(super::Swapchain {
636 raw,
637 raw_flags,
638 functor,
639 device: Arc::clone(&self.shared),
640 images,
641 config: config.clone(),
642 view_formats: wgt_view_formats,
643 surface_semaphores,
644 next_semaphore_index: 0,
645 next_present_time: None,
646 })
647 }
648
649 pub unsafe fn texture_from_raw(
656 vk_image: vk::Image,
657 desc: &crate::TextureDescriptor,
658 drop_callback: Option<crate::DropCallback>,
659 ) -> super::Texture {
660 let mut raw_flags = vk::ImageCreateFlags::empty();
661 let mut view_formats = vec![];
662 for tf in desc.view_formats.iter() {
663 if *tf == desc.format {
664 continue;
665 }
666 view_formats.push(*tf);
667 }
668 if !view_formats.is_empty() {
669 raw_flags |=
670 vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
671 view_formats.push(desc.format)
672 }
673 if desc.format.is_multi_planar_format() {
674 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
675 }
676
677 let drop_guard = crate::DropGuard::from_option(drop_callback);
678
679 super::Texture {
680 raw: vk_image,
681 drop_guard,
682 external_memory: None,
683 block: None,
684 usage: desc.usage,
685 format: desc.format,
686 raw_flags: vk::ImageCreateFlags::empty(),
687 copy_size: desc.copy_extent(),
688 view_formats,
689 }
690 }
691
692 #[cfg(windows)]
693 fn find_memory_type_index(
694 &self,
695 type_bits_req: u32,
696 flags_req: vk::MemoryPropertyFlags,
697 ) -> Option<usize> {
698 let mem_properties = unsafe {
699 self.shared
700 .instance
701 .raw
702 .get_physical_device_memory_properties(self.shared.physical_device)
703 };
704
705 for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
707 let types_bits = 1 << i;
708 let is_required_memory_type = type_bits_req & types_bits != 0;
709 let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
710 if is_required_memory_type && has_required_properties {
711 return Some(i);
712 }
713 }
714
715 None
716 }
717
718 fn create_image_without_memory(
719 &self,
720 desc: &crate::TextureDescriptor,
721 external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
722 ) -> Result<ImageWithoutMemory, crate::DeviceError> {
723 let copy_size = desc.copy_extent();
724
725 let mut raw_flags = vk::ImageCreateFlags::empty();
726 if desc.is_cube_compatible() {
727 raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
728 }
729
730 let original_format = self.shared.private_caps.map_texture_format(desc.format);
731 let mut vk_view_formats = vec![];
732 let mut wgt_view_formats = vec![];
733 if !desc.view_formats.is_empty() {
734 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
735 wgt_view_formats.clone_from(&desc.view_formats);
736 wgt_view_formats.push(desc.format);
737
738 if self.shared.private_caps.image_format_list {
739 vk_view_formats = desc
740 .view_formats
741 .iter()
742 .map(|f| self.shared.private_caps.map_texture_format(*f))
743 .collect();
744 vk_view_formats.push(original_format)
745 }
746 }
747 if desc.format.is_multi_planar_format() {
748 raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
749 }
750
751 let mut vk_info = vk::ImageCreateInfo::default()
752 .flags(raw_flags)
753 .image_type(conv::map_texture_dimension(desc.dimension))
754 .format(original_format)
755 .extent(conv::map_copy_extent(©_size))
756 .mip_levels(desc.mip_level_count)
757 .array_layers(desc.array_layer_count())
758 .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
759 .tiling(vk::ImageTiling::OPTIMAL)
760 .usage(conv::map_texture_usage(desc.usage))
761 .sharing_mode(vk::SharingMode::EXCLUSIVE)
762 .initial_layout(vk::ImageLayout::UNDEFINED);
763
764 let mut format_list_info = vk::ImageFormatListCreateInfo::default();
765 if !vk_view_formats.is_empty() {
766 format_list_info = format_list_info.view_formats(&vk_view_formats);
767 vk_info = vk_info.push_next(&mut format_list_info);
768 }
769
770 if let Some(ext_info) = external_memory_image_create_info {
771 vk_info = vk_info.push_next(ext_info);
772 }
773
774 let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
775 fn map_err(err: vk::Result) -> crate::DeviceError {
776 super::map_host_device_oom_and_ioca_err(err)
779 }
780 let req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
781
782 Ok(ImageWithoutMemory {
783 raw,
784 requirements: req,
785 copy_size,
786 view_formats: wgt_view_formats,
787 raw_flags,
788 })
789 }
790
791 #[cfg(windows)]
797 pub unsafe fn texture_from_d3d11_shared_handle(
798 &self,
799 d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
800 desc: &crate::TextureDescriptor,
801 ) -> Result<super::Texture, crate::DeviceError> {
802 if !self
803 .shared
804 .features
805 .contains(wgt::Features::VULKAN_EXTERNAL_MEMORY_WIN32)
806 {
807 log::error!("Vulkan driver does not support VK_KHR_external_memory_win32");
808 return Err(crate::DeviceError::ResourceCreationFailed);
809 }
810
811 let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
812 .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
813
814 let image =
815 self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
816
817 let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
818 .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
819 .handle(d3d11_shared_handle.0 as _);
820
821 let mem_type_index = self
822 .find_memory_type_index(
823 image.requirements.memory_type_bits,
824 vk::MemoryPropertyFlags::DEVICE_LOCAL,
825 )
826 .ok_or(crate::DeviceError::ResourceCreationFailed)?;
827
828 let memory_allocate_info = vk::MemoryAllocateInfo::default()
829 .allocation_size(image.requirements.size)
830 .memory_type_index(mem_type_index as _)
831 .push_next(&mut import_memory_info);
832 let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
833 .map_err(super::map_host_device_oom_err)?;
834
835 unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
836 .map_err(super::map_host_device_oom_err)?;
837
838 if let Some(label) = desc.label {
839 unsafe { self.shared.set_object_name(image.raw, label) };
840 }
841
842 self.counters.textures.add(1);
843
844 Ok(super::Texture {
845 raw: image.raw,
846 drop_guard: None,
847 external_memory: Some(memory),
848 block: None,
849 usage: desc.usage,
850 format: desc.format,
851 raw_flags: image.raw_flags,
852 copy_size: image.copy_size,
853 view_formats: image.view_formats,
854 })
855 }
856
857 pub unsafe fn buffer_from_raw(vk_buffer: vk::Buffer) -> super::Buffer {
862 super::Buffer {
863 raw: vk_buffer,
864 block: None,
865 }
866 }
867
868 fn create_shader_module_impl(
869 &self,
870 spv: &[u32],
871 ) -> Result<vk::ShaderModule, crate::DeviceError> {
872 let vk_info = vk::ShaderModuleCreateInfo::default()
873 .flags(vk::ShaderModuleCreateFlags::empty())
874 .code(spv);
875
876 let raw = unsafe {
877 profiling::scope!("vkCreateShaderModule");
878 self.shared
879 .raw
880 .create_shader_module(&vk_info, None)
881 .map_err(map_err)?
882 };
883 fn map_err(err: vk::Result) -> crate::DeviceError {
884 super::map_host_device_oom_err(err)
887 }
888 Ok(raw)
889 }
890
891 fn compile_stage(
892 &self,
893 stage: &crate::ProgrammableStage<super::ShaderModule>,
894 naga_stage: naga::ShaderStage,
895 binding_map: &naga::back::spv::BindingMap,
896 ) -> Result<CompiledStage, crate::PipelineError> {
897 let stage_flags = crate::auxil::map_naga_stage(naga_stage);
898 let vk_module = match *stage.module {
899 super::ShaderModule::Raw(raw) => raw,
900 super::ShaderModule::Intermediate {
901 ref naga_shader,
902 runtime_checks,
903 } => {
904 let pipeline_options = naga::back::spv::PipelineOptions {
905 entry_point: stage.entry_point.to_string(),
906 shader_stage: naga_stage,
907 };
908 let needs_temp_options = !runtime_checks.bounds_checks
909 || !binding_map.is_empty()
910 || naga_shader.debug_source.is_some()
911 || !stage.zero_initialize_workgroup_memory;
912 let mut temp_options;
913 let options = if needs_temp_options {
914 temp_options = self.naga_options.clone();
915 if !runtime_checks.bounds_checks {
916 temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
917 index: naga::proc::BoundsCheckPolicy::Unchecked,
918 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
919 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
920 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
921 };
922 }
923 if !binding_map.is_empty() {
924 temp_options.binding_map = binding_map.clone();
925 }
926
927 if let Some(ref debug) = naga_shader.debug_source {
928 temp_options.debug_info = Some(naga::back::spv::DebugInfo {
929 source_code: &debug.source_code,
930 file_name: debug.file_name.as_ref().as_ref(),
931 language: naga::back::spv::SourceLanguage::WGSL,
932 })
933 }
934 if !stage.zero_initialize_workgroup_memory {
935 temp_options.zero_initialize_workgroup_memory =
936 naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
937 }
938
939 &temp_options
940 } else {
941 &self.naga_options
942 };
943
944 let (module, info) = naga::back::pipeline_constants::process_overrides(
945 &naga_shader.module,
946 &naga_shader.info,
947 stage.constants,
948 )
949 .map_err(|e| {
950 crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
951 })?;
952
953 let spv = {
954 profiling::scope!("naga::spv::write_vec");
955 naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
956 }
957 .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
958 self.create_shader_module_impl(&spv)?
959 }
960 };
961
962 let mut flags = vk::PipelineShaderStageCreateFlags::empty();
963 if self.shared.features.contains(wgt::Features::SUBGROUP) {
964 flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
965 }
966
967 let entry_point = CString::new(stage.entry_point).unwrap();
968 let mut create_info = vk::PipelineShaderStageCreateInfo::default()
969 .flags(flags)
970 .stage(conv::map_shader_stage(stage_flags))
971 .module(vk_module);
972
973 create_info.p_name = entry_point.as_ptr();
975
976 Ok(CompiledStage {
977 create_info,
978 _entry_point: entry_point,
979 temp_raw_module: match *stage.module {
980 super::ShaderModule::Raw(_) => None,
981 super::ShaderModule::Intermediate { .. } => Some(vk_module),
982 },
983 })
984 }
985
986 pub fn queue_family_index(&self) -> u32 {
992 self.shared.family_index
993 }
994
995 pub fn queue_index(&self) -> u32 {
996 self.shared.queue_index
997 }
998
999 pub fn raw_device(&self) -> &ash::Device {
1000 &self.shared.raw
1001 }
1002
1003 pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
1004 self.shared.physical_device
1005 }
1006
1007 pub fn raw_queue(&self) -> vk::Queue {
1008 self.shared.raw_queue
1009 }
1010
1011 pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
1012 &self.shared.enabled_extensions
1013 }
1014
1015 pub fn shared_instance(&self) -> &super::InstanceShared {
1016 &self.shared.instance
1017 }
1018}
1019
1020impl crate::Device for super::Device {
1021 type A = super::Api;
1022
1023 unsafe fn create_buffer(
1024 &self,
1025 desc: &crate::BufferDescriptor,
1026 ) -> Result<super::Buffer, crate::DeviceError> {
1027 let vk_info = vk::BufferCreateInfo::default()
1028 .size(desc.size)
1029 .usage(conv::map_buffer_usage(desc.usage))
1030 .sharing_mode(vk::SharingMode::EXCLUSIVE);
1031
1032 let raw = unsafe {
1033 self.shared
1034 .raw
1035 .create_buffer(&vk_info, None)
1036 .map_err(super::map_host_device_oom_and_ioca_err)?
1037 };
1038 let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
1039
1040 let mut alloc_usage = if desc
1041 .usage
1042 .intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE)
1043 {
1044 let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
1045 flags.set(
1047 gpu_alloc::UsageFlags::DOWNLOAD,
1048 desc.usage.contains(crate::BufferUses::MAP_READ),
1049 );
1050 flags.set(
1051 gpu_alloc::UsageFlags::UPLOAD,
1052 desc.usage.contains(crate::BufferUses::MAP_WRITE),
1053 );
1054 flags
1055 } else {
1056 gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
1057 };
1058 alloc_usage.set(
1059 gpu_alloc::UsageFlags::TRANSIENT,
1060 desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
1061 );
1062
1063 let alignment_mask = req.alignment - 1;
1064
1065 let block = unsafe {
1066 self.mem_allocator.lock().alloc(
1067 &*self.shared,
1068 gpu_alloc::Request {
1069 size: req.size,
1070 align_mask: alignment_mask,
1071 usage: alloc_usage,
1072 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1073 },
1074 )?
1075 };
1076
1077 unsafe {
1078 self.shared
1079 .raw
1080 .bind_buffer_memory(raw, *block.memory(), block.offset())
1081 .map_err(super::map_host_device_oom_and_ioca_err)?
1082 };
1083
1084 if let Some(label) = desc.label {
1085 unsafe { self.shared.set_object_name(raw, label) };
1086 }
1087
1088 self.counters.buffer_memory.add(block.size() as isize);
1089 self.counters.buffers.add(1);
1090
1091 Ok(super::Buffer {
1092 raw,
1093 block: Some(Mutex::new(block)),
1094 })
1095 }
1096 unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
1097 unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
1098 if let Some(block) = buffer.block {
1099 let block = block.into_inner();
1100 self.counters.buffer_memory.sub(block.size() as isize);
1101 unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1102 }
1103
1104 self.counters.buffers.sub(1);
1105 }
1106
1107 unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
1108 self.counters.buffers.add(1);
1109 }
1110
1111 unsafe fn map_buffer(
1112 &self,
1113 buffer: &super::Buffer,
1114 range: crate::MemoryRange,
1115 ) -> Result<crate::BufferMapping, crate::DeviceError> {
1116 if let Some(ref block) = buffer.block {
1117 let size = range.end - range.start;
1118 let mut block = block.lock();
1119 let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
1120 let is_coherent = block
1121 .props()
1122 .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
1123 Ok(crate::BufferMapping { ptr, is_coherent })
1124 } else {
1125 crate::hal_usage_error("tried to map external buffer")
1126 }
1127 }
1128 unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
1129 if let Some(ref block) = buffer.block {
1130 unsafe { block.lock().unmap(&*self.shared) };
1131 } else {
1132 crate::hal_usage_error("tried to unmap external buffer")
1133 }
1134 }
1135
1136 unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1137 where
1138 I: Iterator<Item = crate::MemoryRange>,
1139 {
1140 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1141 unsafe {
1142 self.shared
1143 .raw
1144 .flush_mapped_memory_ranges(
1145 &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1146 )
1147 }
1148 .unwrap();
1149 }
1150 }
1151 unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1152 where
1153 I: Iterator<Item = crate::MemoryRange>,
1154 {
1155 if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1156 unsafe {
1157 self.shared
1158 .raw
1159 .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1160 [vk::MappedMemoryRange; 32],
1161 >::from_iter(vk_ranges))
1162 }
1163 .unwrap();
1164 }
1165 }
1166
1167 unsafe fn create_texture(
1168 &self,
1169 desc: &crate::TextureDescriptor,
1170 ) -> Result<super::Texture, crate::DeviceError> {
1171 let image = self.create_image_without_memory(desc, None)?;
1172
1173 let block = unsafe {
1174 self.mem_allocator.lock().alloc(
1175 &*self.shared,
1176 gpu_alloc::Request {
1177 size: image.requirements.size,
1178 align_mask: image.requirements.alignment - 1,
1179 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1180 memory_types: image.requirements.memory_type_bits & self.valid_ash_memory_types,
1181 },
1182 )?
1183 };
1184
1185 self.counters.texture_memory.add(block.size() as isize);
1186
1187 unsafe {
1188 self.shared
1189 .raw
1190 .bind_image_memory(image.raw, *block.memory(), block.offset())
1191 .map_err(super::map_host_device_oom_err)?
1192 };
1193
1194 if let Some(label) = desc.label {
1195 unsafe { self.shared.set_object_name(image.raw, label) };
1196 }
1197
1198 self.counters.textures.add(1);
1199
1200 Ok(super::Texture {
1201 raw: image.raw,
1202 drop_guard: None,
1203 external_memory: None,
1204 block: Some(block),
1205 usage: desc.usage,
1206 format: desc.format,
1207 raw_flags: image.raw_flags,
1208 copy_size: image.copy_size,
1209 view_formats: image.view_formats,
1210 })
1211 }
1212 unsafe fn destroy_texture(&self, texture: super::Texture) {
1213 if texture.drop_guard.is_none() {
1214 unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1215 }
1216 if let Some(memory) = texture.external_memory {
1217 unsafe { self.shared.raw.free_memory(memory, None) };
1218 }
1219 if let Some(block) = texture.block {
1220 self.counters.texture_memory.sub(block.size() as isize);
1221
1222 unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1223 }
1224
1225 self.counters.textures.sub(1);
1226 }
1227
1228 unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1229 self.counters.textures.add(1);
1230 }
1231
1232 unsafe fn create_texture_view(
1233 &self,
1234 texture: &super::Texture,
1235 desc: &crate::TextureViewDescriptor,
1236 ) -> Result<super::TextureView, crate::DeviceError> {
1237 let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1238 let mut vk_info = vk::ImageViewCreateInfo::default()
1239 .flags(vk::ImageViewCreateFlags::empty())
1240 .image(texture.raw)
1241 .view_type(conv::map_view_dimension(desc.dimension))
1242 .format(self.shared.private_caps.map_texture_format(desc.format))
1243 .subresource_range(subresource_range);
1244 let layers =
1245 NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1246
1247 let mut image_view_info;
1248 let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1249 image_view_info =
1250 vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1251 vk_info = vk_info.push_next(&mut image_view_info);
1252 desc.usage
1253 } else {
1254 texture.usage
1255 };
1256
1257 let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1258 .map_err(super::map_host_device_oom_and_ioca_err)?;
1259
1260 if let Some(label) = desc.label {
1261 unsafe { self.shared.set_object_name(raw, label) };
1262 }
1263
1264 let attachment = super::FramebufferAttachment {
1265 raw: if self.shared.private_caps.imageless_framebuffers {
1266 vk::ImageView::null()
1267 } else {
1268 raw
1269 },
1270 raw_image_flags: texture.raw_flags,
1271 view_usage,
1272 view_format: desc.format,
1273 raw_view_formats: texture
1274 .view_formats
1275 .iter()
1276 .map(|tf| self.shared.private_caps.map_texture_format(*tf))
1277 .collect(),
1278 };
1279
1280 self.counters.texture_views.add(1);
1281
1282 Ok(super::TextureView {
1283 raw,
1284 layers,
1285 attachment,
1286 })
1287 }
1288 unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1289 if !self.shared.private_caps.imageless_framebuffers {
1290 let mut fbuf_lock = self.shared.framebuffers.lock();
1291 for (key, &raw_fbuf) in fbuf_lock.iter() {
1292 if key.attachments.iter().any(|at| at.raw == view.raw) {
1293 unsafe { self.shared.raw.destroy_framebuffer(raw_fbuf, None) };
1294 }
1295 }
1296 fbuf_lock.retain(|key, _| !key.attachments.iter().any(|at| at.raw == view.raw));
1297 }
1298 unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1299
1300 self.counters.texture_views.sub(1);
1301 }
1302
1303 unsafe fn create_sampler(
1304 &self,
1305 desc: &crate::SamplerDescriptor,
1306 ) -> Result<super::Sampler, crate::DeviceError> {
1307 let mut create_info = vk::SamplerCreateInfo::default()
1308 .flags(vk::SamplerCreateFlags::empty())
1309 .mag_filter(conv::map_filter_mode(desc.mag_filter))
1310 .min_filter(conv::map_filter_mode(desc.min_filter))
1311 .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1312 .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1313 .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1314 .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1315 .min_lod(desc.lod_clamp.start)
1316 .max_lod(desc.lod_clamp.end);
1317
1318 if let Some(fun) = desc.compare {
1319 create_info = create_info
1320 .compare_enable(true)
1321 .compare_op(conv::map_comparison(fun));
1322 }
1323
1324 if desc.anisotropy_clamp != 1 {
1325 create_info = create_info
1328 .anisotropy_enable(true)
1329 .max_anisotropy(desc.anisotropy_clamp as f32);
1330 }
1331
1332 if let Some(color) = desc.border_color {
1333 create_info = create_info.border_color(conv::map_border_color(color));
1334 }
1335
1336 let raw = self
1337 .shared
1338 .sampler_cache
1339 .lock()
1340 .create_sampler(&self.shared.raw, create_info)?;
1341
1342 if let Some(label) = desc.label {
1346 unsafe { self.shared.set_object_name(raw, label) };
1347 }
1348
1349 self.counters.samplers.add(1);
1350
1351 Ok(super::Sampler { raw, create_info })
1352 }
1353 unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1354 self.shared.sampler_cache.lock().destroy_sampler(
1355 &self.shared.raw,
1356 sampler.create_info,
1357 sampler.raw,
1358 );
1359
1360 self.counters.samplers.sub(1);
1361 }
1362
1363 unsafe fn create_command_encoder(
1364 &self,
1365 desc: &crate::CommandEncoderDescriptor<super::Queue>,
1366 ) -> Result<super::CommandEncoder, crate::DeviceError> {
1367 let vk_info = vk::CommandPoolCreateInfo::default()
1368 .queue_family_index(desc.queue.family_index)
1369 .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1370
1371 let raw = unsafe {
1372 self.shared
1373 .raw
1374 .create_command_pool(&vk_info, None)
1375 .map_err(super::map_host_device_oom_err)?
1376 };
1377
1378 self.counters.command_encoders.add(1);
1379
1380 Ok(super::CommandEncoder {
1381 raw,
1382 device: Arc::clone(&self.shared),
1383 active: vk::CommandBuffer::null(),
1384 bind_point: vk::PipelineBindPoint::default(),
1385 temp: super::Temp::default(),
1386 free: Vec::new(),
1387 discarded: Vec::new(),
1388 rpass_debug_marker_active: false,
1389 end_of_pass_timer_query: None,
1390 counters: Arc::clone(&self.counters),
1391 })
1392 }
1393
1394 unsafe fn create_bind_group_layout(
1395 &self,
1396 desc: &crate::BindGroupLayoutDescriptor,
1397 ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1398 let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1399 let mut types = Vec::new();
1400 for entry in desc.entries {
1401 let count = entry.count.map_or(1, |c| c.get());
1402 if entry.binding as usize >= types.len() {
1403 types.resize(
1404 entry.binding as usize + 1,
1405 (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1406 );
1407 }
1408 types[entry.binding as usize] = (
1409 conv::map_binding_type(entry.ty),
1410 entry.count.map_or(1, |c| c.get()),
1411 );
1412
1413 match entry.ty {
1414 wgt::BindingType::Buffer {
1415 ty,
1416 has_dynamic_offset,
1417 ..
1418 } => match ty {
1419 wgt::BufferBindingType::Uniform => {
1420 if has_dynamic_offset {
1421 desc_count.uniform_buffer_dynamic += count;
1422 } else {
1423 desc_count.uniform_buffer += count;
1424 }
1425 }
1426 wgt::BufferBindingType::Storage { .. } => {
1427 if has_dynamic_offset {
1428 desc_count.storage_buffer_dynamic += count;
1429 } else {
1430 desc_count.storage_buffer += count;
1431 }
1432 }
1433 },
1434 wgt::BindingType::Sampler { .. } => {
1435 desc_count.sampler += count;
1436 }
1437 wgt::BindingType::Texture { .. } => {
1438 desc_count.sampled_image += count;
1439 }
1440 wgt::BindingType::StorageTexture { .. } => {
1441 desc_count.storage_image += count;
1442 }
1443 wgt::BindingType::AccelerationStructure => {
1444 desc_count.acceleration_structure += count;
1445 }
1446 }
1447 }
1448
1449 let vk_bindings = desc
1451 .entries
1452 .iter()
1453 .map(|entry| vk::DescriptorSetLayoutBinding {
1454 binding: entry.binding,
1455 descriptor_type: types[entry.binding as usize].0,
1456 descriptor_count: types[entry.binding as usize].1,
1457 stage_flags: conv::map_shader_stage(entry.visibility),
1458 p_immutable_samplers: ptr::null(),
1459 _marker: Default::default(),
1460 })
1461 .collect::<Vec<_>>();
1462
1463 let vk_info = vk::DescriptorSetLayoutCreateInfo::default().bindings(&vk_bindings);
1464
1465 let binding_arrays = desc
1466 .entries
1467 .iter()
1468 .enumerate()
1469 .filter_map(|(idx, entry)| entry.count.map(|count| (idx as u32, count)))
1470 .collect();
1471
1472 let mut binding_flag_info;
1473 let binding_flag_vec;
1474
1475 let partially_bound = desc
1476 .flags
1477 .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1478
1479 let vk_info = if partially_bound {
1480 binding_flag_vec = desc
1481 .entries
1482 .iter()
1483 .map(|entry| {
1484 let mut flags = vk::DescriptorBindingFlags::empty();
1485
1486 if partially_bound && entry.count.is_some() {
1487 flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1488 }
1489
1490 flags
1491 })
1492 .collect::<Vec<_>>();
1493
1494 binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::default()
1495 .binding_flags(&binding_flag_vec);
1496
1497 vk_info.push_next(&mut binding_flag_info)
1498 } else {
1499 vk_info
1500 };
1501
1502 let raw = unsafe {
1503 self.shared
1504 .raw
1505 .create_descriptor_set_layout(&vk_info, None)
1506 .map_err(super::map_host_device_oom_err)?
1507 };
1508
1509 if let Some(label) = desc.label {
1510 unsafe { self.shared.set_object_name(raw, label) };
1511 }
1512
1513 self.counters.bind_group_layouts.add(1);
1514
1515 Ok(super::BindGroupLayout {
1516 raw,
1517 desc_count,
1518 types: types.into_boxed_slice(),
1519 binding_arrays,
1520 })
1521 }
1522 unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1523 unsafe {
1524 self.shared
1525 .raw
1526 .destroy_descriptor_set_layout(bg_layout.raw, None)
1527 };
1528
1529 self.counters.bind_group_layouts.sub(1);
1530 }
1531
1532 unsafe fn create_pipeline_layout(
1533 &self,
1534 desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1535 ) -> Result<super::PipelineLayout, crate::DeviceError> {
1536 let vk_set_layouts = desc
1538 .bind_group_layouts
1539 .iter()
1540 .map(|bgl| bgl.raw)
1541 .collect::<Vec<_>>();
1542 let vk_push_constant_ranges = desc
1543 .push_constant_ranges
1544 .iter()
1545 .map(|pcr| vk::PushConstantRange {
1546 stage_flags: conv::map_shader_stage(pcr.stages),
1547 offset: pcr.range.start,
1548 size: pcr.range.end - pcr.range.start,
1549 })
1550 .collect::<Vec<_>>();
1551
1552 let vk_info = vk::PipelineLayoutCreateInfo::default()
1553 .flags(vk::PipelineLayoutCreateFlags::empty())
1554 .set_layouts(&vk_set_layouts)
1555 .push_constant_ranges(&vk_push_constant_ranges);
1556
1557 let raw = {
1558 profiling::scope!("vkCreatePipelineLayout");
1559 unsafe {
1560 self.shared
1561 .raw
1562 .create_pipeline_layout(&vk_info, None)
1563 .map_err(super::map_host_device_oom_err)?
1564 }
1565 };
1566
1567 if let Some(label) = desc.label {
1568 unsafe { self.shared.set_object_name(raw, label) };
1569 }
1570
1571 let mut binding_arrays = BTreeMap::new();
1572 for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1573 for &(binding, binding_array_size) in &layout.binding_arrays {
1574 binding_arrays.insert(
1575 naga::ResourceBinding {
1576 group: group as u32,
1577 binding,
1578 },
1579 naga::back::spv::BindingInfo {
1580 binding_array_size: Some(binding_array_size.get()),
1581 },
1582 );
1583 }
1584 }
1585
1586 self.counters.pipeline_layouts.add(1);
1587
1588 Ok(super::PipelineLayout {
1589 raw,
1590 binding_arrays,
1591 })
1592 }
1593 unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1594 unsafe {
1595 self.shared
1596 .raw
1597 .destroy_pipeline_layout(pipeline_layout.raw, None)
1598 };
1599
1600 self.counters.pipeline_layouts.sub(1);
1601 }
1602
1603 unsafe fn create_bind_group(
1604 &self,
1605 desc: &crate::BindGroupDescriptor<
1606 super::BindGroupLayout,
1607 super::Buffer,
1608 super::Sampler,
1609 super::TextureView,
1610 super::AccelerationStructure,
1611 >,
1612 ) -> Result<super::BindGroup, crate::DeviceError> {
1613 let mut vk_sets = unsafe {
1614 self.desc_allocator.lock().allocate(
1615 &*self.shared,
1616 &desc.layout.raw,
1617 gpu_descriptor::DescriptorSetLayoutCreateFlags::empty(),
1618 &desc.layout.desc_count,
1619 1,
1620 )?
1621 };
1622
1623 let set = vk_sets.pop().unwrap();
1624 if let Some(label) = desc.label {
1625 unsafe { self.shared.set_object_name(*set.raw(), label) };
1626 }
1627
1628 struct ExtendStack<'a, T> {
1635 remainder: &'a mut [MaybeUninit<T>],
1636 }
1637
1638 impl<'a, T> ExtendStack<'a, T> {
1639 fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1640 Self {
1641 remainder: vec.spare_capacity_mut(),
1642 }
1643 }
1644
1645 fn extend_one(self, value: T) -> (Self, &'a mut T) {
1646 let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1647 let init = to_init.write(value);
1648 (Self { remainder }, init)
1649 }
1650
1651 fn extend(
1652 self,
1653 iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1654 ) -> (Self, &'a mut [T]) {
1655 let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1656
1657 for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1658 to_init.write(value);
1659 }
1660
1661 let init = {
1664 unsafe { mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1671 };
1672 (Self { remainder }, init)
1673 }
1674 }
1675
1676 let mut writes = Vec::with_capacity(desc.entries.len());
1677 let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1678 let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1679 let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1680 let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1681 let mut acceleration_structure_infos =
1686 Vec::with_capacity(desc.acceleration_structures.len());
1687 let mut acceleration_structure_infos =
1688 ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1689 let mut raw_acceleration_structures =
1690 Vec::with_capacity(desc.acceleration_structures.len());
1691 let mut raw_acceleration_structures =
1692 ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1693 for entry in desc.entries {
1694 let (ty, size) = desc.layout.types[entry.binding as usize];
1695 if size == 0 {
1696 continue; }
1698 let mut write = vk::WriteDescriptorSet::default()
1699 .dst_set(*set.raw())
1700 .dst_binding(entry.binding)
1701 .descriptor_type(ty);
1702
1703 write = match ty {
1704 vk::DescriptorType::SAMPLER => {
1705 let start = entry.resource_index;
1706 let end = start + entry.count;
1707 let local_image_infos;
1708 (image_infos, local_image_infos) =
1709 image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1710 |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1711 ));
1712 write.image_info(local_image_infos)
1713 }
1714 vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1715 let start = entry.resource_index;
1716 let end = start + entry.count;
1717 let local_image_infos;
1718 (image_infos, local_image_infos) =
1719 image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1720 |binding| {
1721 let layout = conv::derive_image_layout(
1722 binding.usage,
1723 binding.view.attachment.view_format,
1724 );
1725 vk::DescriptorImageInfo::default()
1726 .image_view(binding.view.raw)
1727 .image_layout(layout)
1728 },
1729 ));
1730 write.image_info(local_image_infos)
1731 }
1732 vk::DescriptorType::UNIFORM_BUFFER
1733 | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1734 | vk::DescriptorType::STORAGE_BUFFER
1735 | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1736 let start = entry.resource_index;
1737 let end = start + entry.count;
1738 let local_buffer_infos;
1739 (buffer_infos, local_buffer_infos) =
1740 buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1741 |binding| {
1742 vk::DescriptorBufferInfo::default()
1743 .buffer(binding.buffer.raw)
1744 .offset(binding.offset)
1745 .range(
1746 binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1747 )
1748 },
1749 ));
1750 write.buffer_info(local_buffer_infos)
1751 }
1752 vk::DescriptorType::ACCELERATION_STRUCTURE_KHR => {
1753 let start = entry.resource_index;
1754 let end = start + entry.count;
1755
1756 let local_raw_acceleration_structures;
1757 (
1758 raw_acceleration_structures,
1759 local_raw_acceleration_structures,
1760 ) = raw_acceleration_structures.extend(
1761 desc.acceleration_structures[start as usize..end as usize]
1762 .iter()
1763 .map(|acceleration_structure| acceleration_structure.raw),
1764 );
1765
1766 let local_acceleration_structure_infos;
1767 (
1768 acceleration_structure_infos,
1769 local_acceleration_structure_infos,
1770 ) = acceleration_structure_infos.extend_one(
1771 vk::WriteDescriptorSetAccelerationStructureKHR::default()
1772 .acceleration_structures(local_raw_acceleration_structures),
1773 );
1774
1775 write
1776 .descriptor_count(entry.count)
1777 .push_next(local_acceleration_structure_infos)
1778 }
1779 _ => unreachable!(),
1780 };
1781
1782 writes.push(write);
1783 }
1784
1785 unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1786
1787 self.counters.bind_groups.add(1);
1788
1789 Ok(super::BindGroup { set })
1790 }
1791
1792 unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1793 unsafe {
1794 self.desc_allocator
1795 .lock()
1796 .free(&*self.shared, Some(group.set))
1797 };
1798
1799 self.counters.bind_groups.sub(1);
1800 }
1801
1802 unsafe fn create_shader_module(
1803 &self,
1804 desc: &crate::ShaderModuleDescriptor,
1805 shader: crate::ShaderInput,
1806 ) -> Result<super::ShaderModule, crate::ShaderError> {
1807 let spv = match shader {
1808 crate::ShaderInput::Naga(naga_shader) => {
1809 if self
1810 .shared
1811 .workarounds
1812 .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1813 || !naga_shader.module.overrides.is_empty()
1814 {
1815 return Ok(super::ShaderModule::Intermediate {
1816 naga_shader,
1817 runtime_checks: desc.runtime_checks,
1818 });
1819 }
1820 let mut naga_options = self.naga_options.clone();
1821 naga_options.debug_info =
1822 naga_shader
1823 .debug_source
1824 .as_ref()
1825 .map(|d| naga::back::spv::DebugInfo {
1826 source_code: d.source_code.as_ref(),
1827 file_name: d.file_name.as_ref().as_ref(),
1828 language: naga::back::spv::SourceLanguage::WGSL,
1829 });
1830 if !desc.runtime_checks.bounds_checks {
1831 naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1832 index: naga::proc::BoundsCheckPolicy::Unchecked,
1833 buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1834 image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1835 binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1836 };
1837 }
1838 Cow::Owned(
1839 naga::back::spv::write_vec(
1840 &naga_shader.module,
1841 &naga_shader.info,
1842 &naga_options,
1843 None,
1844 )
1845 .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?,
1846 )
1847 }
1848 crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1849 };
1850
1851 let raw = self.create_shader_module_impl(&spv)?;
1852
1853 if let Some(label) = desc.label {
1854 unsafe { self.shared.set_object_name(raw, label) };
1855 }
1856
1857 self.counters.shader_modules.add(1);
1858
1859 Ok(super::ShaderModule::Raw(raw))
1860 }
1861
1862 unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1863 match module {
1864 super::ShaderModule::Raw(raw) => {
1865 unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1866 }
1867 super::ShaderModule::Intermediate { .. } => {}
1868 }
1869
1870 self.counters.shader_modules.sub(1);
1871 }
1872
1873 unsafe fn create_render_pipeline(
1874 &self,
1875 desc: &crate::RenderPipelineDescriptor<
1876 super::PipelineLayout,
1877 super::ShaderModule,
1878 super::PipelineCache,
1879 >,
1880 ) -> Result<super::RenderPipeline, crate::PipelineError> {
1881 let dynamic_states = [
1882 vk::DynamicState::VIEWPORT,
1883 vk::DynamicState::SCISSOR,
1884 vk::DynamicState::BLEND_CONSTANTS,
1885 vk::DynamicState::STENCIL_REFERENCE,
1886 ];
1887 let mut compatible_rp_key = super::RenderPassKey {
1888 sample_count: desc.multisample.count,
1889 multiview: desc.multiview,
1890 ..Default::default()
1891 };
1892 let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1893 let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1894 let mut vertex_attributes = Vec::new();
1895
1896 for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1897 vertex_buffers.push(vk::VertexInputBindingDescription {
1898 binding: i as u32,
1899 stride: vb.array_stride as u32,
1900 input_rate: match vb.step_mode {
1901 wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1902 wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1903 },
1904 });
1905 for at in vb.attributes {
1906 vertex_attributes.push(vk::VertexInputAttributeDescription {
1907 location: at.shader_location,
1908 binding: i as u32,
1909 format: conv::map_vertex_format(at.format),
1910 offset: at.offset as u32,
1911 });
1912 }
1913 }
1914
1915 let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
1916 .vertex_binding_descriptions(&vertex_buffers)
1917 .vertex_attribute_descriptions(&vertex_attributes);
1918
1919 let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
1920 .topology(conv::map_topology(desc.primitive.topology))
1921 .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
1922
1923 let compiled_vs = self.compile_stage(
1924 &desc.vertex_stage,
1925 naga::ShaderStage::Vertex,
1926 &desc.layout.binding_arrays,
1927 )?;
1928 stages.push(compiled_vs.create_info);
1929 let compiled_fs = match desc.fragment_stage {
1930 Some(ref stage) => {
1931 let compiled = self.compile_stage(
1932 stage,
1933 naga::ShaderStage::Fragment,
1934 &desc.layout.binding_arrays,
1935 )?;
1936 stages.push(compiled.create_info);
1937 Some(compiled)
1938 }
1939 None => None,
1940 };
1941
1942 let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
1943 .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1944 .front_face(conv::map_front_face(desc.primitive.front_face))
1945 .line_width(1.0)
1946 .depth_clamp_enable(desc.primitive.unclipped_depth);
1947 if let Some(face) = desc.primitive.cull_mode {
1948 vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1949 }
1950 let mut vk_rasterization_conservative_state =
1951 vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
1952 .conservative_rasterization_mode(
1953 vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
1954 );
1955 if desc.primitive.conservative {
1956 vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1957 }
1958
1959 let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
1960 if let Some(ref ds) = desc.depth_stencil {
1961 let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1962 let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1963 vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1964 } else {
1965 vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1966 };
1967 compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1968 base: super::AttachmentKey::compatible(vk_format, vk_layout),
1969 stencil_ops: crate::AttachmentOps::all(),
1970 });
1971
1972 if ds.is_depth_enabled() {
1973 vk_depth_stencil = vk_depth_stencil
1974 .depth_test_enable(true)
1975 .depth_write_enable(ds.depth_write_enabled)
1976 .depth_compare_op(conv::map_comparison(ds.depth_compare));
1977 }
1978 if ds.stencil.is_enabled() {
1979 let s = &ds.stencil;
1980 let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
1981 let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
1982 vk_depth_stencil = vk_depth_stencil
1983 .stencil_test_enable(true)
1984 .front(front)
1985 .back(back);
1986 }
1987
1988 if ds.bias.is_enabled() {
1989 vk_rasterization = vk_rasterization
1990 .depth_bias_enable(true)
1991 .depth_bias_constant_factor(ds.bias.constant as f32)
1992 .depth_bias_clamp(ds.bias.clamp)
1993 .depth_bias_slope_factor(ds.bias.slope_scale);
1994 }
1995 }
1996
1997 let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
1998 .flags(vk::PipelineViewportStateCreateFlags::empty())
1999 .scissor_count(1)
2000 .viewport_count(1);
2001
2002 let vk_sample_mask = [
2003 desc.multisample.mask as u32,
2004 (desc.multisample.mask >> 32) as u32,
2005 ];
2006 let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
2007 .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
2008 .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
2009 .sample_mask(&vk_sample_mask);
2010
2011 let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
2012 for cat in desc.color_targets {
2013 let (key, attarchment) = if let Some(cat) = cat.as_ref() {
2014 let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
2015 .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
2016 if let Some(ref blend) = cat.blend {
2017 let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
2018 let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
2019 vk_attachment = vk_attachment
2020 .blend_enable(true)
2021 .color_blend_op(color_op)
2022 .src_color_blend_factor(color_src)
2023 .dst_color_blend_factor(color_dst)
2024 .alpha_blend_op(alpha_op)
2025 .src_alpha_blend_factor(alpha_src)
2026 .dst_alpha_blend_factor(alpha_dst);
2027 }
2028
2029 let vk_format = self.shared.private_caps.map_texture_format(cat.format);
2030 (
2031 Some(super::ColorAttachmentKey {
2032 base: super::AttachmentKey::compatible(
2033 vk_format,
2034 vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
2035 ),
2036 resolve: None,
2037 }),
2038 vk_attachment,
2039 )
2040 } else {
2041 (None, vk::PipelineColorBlendAttachmentState::default())
2042 };
2043
2044 compatible_rp_key.colors.push(key);
2045 vk_attachments.push(attarchment);
2046 }
2047
2048 let vk_color_blend =
2049 vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
2050
2051 let vk_dynamic_state =
2052 vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
2053
2054 let raw_pass = self
2055 .shared
2056 .make_render_pass(compatible_rp_key)
2057 .map_err(crate::DeviceError::from)?;
2058
2059 let vk_infos = [{
2060 vk::GraphicsPipelineCreateInfo::default()
2061 .layout(desc.layout.raw)
2062 .stages(&stages)
2063 .vertex_input_state(&vk_vertex_input)
2064 .input_assembly_state(&vk_input_assembly)
2065 .rasterization_state(&vk_rasterization)
2066 .viewport_state(&vk_viewport)
2067 .multisample_state(&vk_multisample)
2068 .depth_stencil_state(&vk_depth_stencil)
2069 .color_blend_state(&vk_color_blend)
2070 .dynamic_state(&vk_dynamic_state)
2071 .render_pass(raw_pass)
2072 }];
2073
2074 let pipeline_cache = desc
2075 .cache
2076 .map(|it| it.raw)
2077 .unwrap_or(vk::PipelineCache::null());
2078
2079 let mut raw_vec = {
2080 profiling::scope!("vkCreateGraphicsPipelines");
2081 unsafe {
2082 self.shared
2083 .raw
2084 .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2085 .map_err(|(_, e)| super::map_pipeline_err(e))
2086 }?
2087 };
2088
2089 let raw = raw_vec.pop().unwrap();
2090 if let Some(label) = desc.label {
2091 unsafe { self.shared.set_object_name(raw, label) };
2092 }
2093
2094 if let Some(raw_module) = compiled_vs.temp_raw_module {
2095 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2096 }
2097 if let Some(CompiledStage {
2098 temp_raw_module: Some(raw_module),
2099 ..
2100 }) = compiled_fs
2101 {
2102 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2103 }
2104
2105 self.counters.render_pipelines.add(1);
2106
2107 Ok(super::RenderPipeline { raw })
2108 }
2109
2110 unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2111 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2112
2113 self.counters.render_pipelines.sub(1);
2114 }
2115
2116 unsafe fn create_compute_pipeline(
2117 &self,
2118 desc: &crate::ComputePipelineDescriptor<
2119 super::PipelineLayout,
2120 super::ShaderModule,
2121 super::PipelineCache,
2122 >,
2123 ) -> Result<super::ComputePipeline, crate::PipelineError> {
2124 let compiled = self.compile_stage(
2125 &desc.stage,
2126 naga::ShaderStage::Compute,
2127 &desc.layout.binding_arrays,
2128 )?;
2129
2130 let vk_infos = [{
2131 vk::ComputePipelineCreateInfo::default()
2132 .layout(desc.layout.raw)
2133 .stage(compiled.create_info)
2134 }];
2135
2136 let pipeline_cache = desc
2137 .cache
2138 .map(|it| it.raw)
2139 .unwrap_or(vk::PipelineCache::null());
2140
2141 let mut raw_vec = {
2142 profiling::scope!("vkCreateComputePipelines");
2143 unsafe {
2144 self.shared
2145 .raw
2146 .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2147 .map_err(|(_, e)| super::map_pipeline_err(e))
2148 }?
2149 };
2150
2151 let raw = raw_vec.pop().unwrap();
2152 if let Some(label) = desc.label {
2153 unsafe { self.shared.set_object_name(raw, label) };
2154 }
2155
2156 if let Some(raw_module) = compiled.temp_raw_module {
2157 unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2158 }
2159
2160 self.counters.compute_pipelines.add(1);
2161
2162 Ok(super::ComputePipeline { raw })
2163 }
2164
2165 unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2166 unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2167
2168 self.counters.compute_pipelines.sub(1);
2169 }
2170
2171 unsafe fn create_pipeline_cache(
2172 &self,
2173 desc: &crate::PipelineCacheDescriptor<'_>,
2174 ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2175 let mut info = vk::PipelineCacheCreateInfo::default();
2176 if let Some(data) = desc.data {
2177 info = info.initial_data(data)
2178 }
2179 profiling::scope!("vkCreatePipelineCache");
2180 let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2181 .map_err(super::map_host_device_oom_err)?;
2182
2183 Ok(super::PipelineCache { raw })
2184 }
2185 fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2186 Some(self.shared.pipeline_cache_validation_key)
2187 }
2188 unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2189 unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2190 }
2191 unsafe fn create_query_set(
2192 &self,
2193 desc: &wgt::QuerySetDescriptor<crate::Label>,
2194 ) -> Result<super::QuerySet, crate::DeviceError> {
2195 let (vk_type, pipeline_statistics) = match desc.ty {
2196 wgt::QueryType::Occlusion => (
2197 vk::QueryType::OCCLUSION,
2198 vk::QueryPipelineStatisticFlags::empty(),
2199 ),
2200 wgt::QueryType::PipelineStatistics(statistics) => (
2201 vk::QueryType::PIPELINE_STATISTICS,
2202 conv::map_pipeline_statistics(statistics),
2203 ),
2204 wgt::QueryType::Timestamp => (
2205 vk::QueryType::TIMESTAMP,
2206 vk::QueryPipelineStatisticFlags::empty(),
2207 ),
2208 };
2209
2210 let vk_info = vk::QueryPoolCreateInfo::default()
2211 .query_type(vk_type)
2212 .query_count(desc.count)
2213 .pipeline_statistics(pipeline_statistics);
2214
2215 let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2216 .map_err(super::map_host_device_oom_err)?;
2217 if let Some(label) = desc.label {
2218 unsafe { self.shared.set_object_name(raw, label) };
2219 }
2220
2221 self.counters.query_sets.add(1);
2222
2223 Ok(super::QuerySet { raw })
2224 }
2225
2226 unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2227 unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2228
2229 self.counters.query_sets.sub(1);
2230 }
2231
2232 unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2233 self.counters.fences.add(1);
2234
2235 Ok(if self.shared.private_caps.timeline_semaphores {
2236 let mut sem_type_info =
2237 vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2238 let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2239 let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2240 .map_err(super::map_host_device_oom_err)?;
2241
2242 super::Fence::TimelineSemaphore(raw)
2243 } else {
2244 super::Fence::FencePool {
2245 last_completed: 0,
2246 active: Vec::new(),
2247 free: Vec::new(),
2248 }
2249 })
2250 }
2251 unsafe fn destroy_fence(&self, fence: super::Fence) {
2252 match fence {
2253 super::Fence::TimelineSemaphore(raw) => {
2254 unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2255 }
2256 super::Fence::FencePool {
2257 active,
2258 free,
2259 last_completed: _,
2260 } => {
2261 for (_, raw) in active {
2262 unsafe { self.shared.raw.destroy_fence(raw, None) };
2263 }
2264 for raw in free {
2265 unsafe { self.shared.raw.destroy_fence(raw, None) };
2266 }
2267 }
2268 }
2269
2270 self.counters.fences.sub(1);
2271 }
2272 unsafe fn get_fence_value(
2273 &self,
2274 fence: &super::Fence,
2275 ) -> Result<crate::FenceValue, crate::DeviceError> {
2276 fence.get_latest(
2277 &self.shared.raw,
2278 self.shared.extension_fns.timeline_semaphore.as_ref(),
2279 )
2280 }
2281 unsafe fn wait(
2282 &self,
2283 fence: &super::Fence,
2284 wait_value: crate::FenceValue,
2285 timeout_ms: u32,
2286 ) -> Result<bool, crate::DeviceError> {
2287 let timeout_ns = timeout_ms as u64 * super::MILLIS_TO_NANOS;
2288 self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2289 }
2290
2291 unsafe fn start_capture(&self) -> bool {
2292 #[cfg(feature = "renderdoc")]
2293 {
2294 let raw_vk_instance =
2296 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2297 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2298 unsafe {
2299 self.render_doc
2300 .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2301 }
2302 }
2303 #[cfg(not(feature = "renderdoc"))]
2304 false
2305 }
2306 unsafe fn stop_capture(&self) {
2307 #[cfg(feature = "renderdoc")]
2308 {
2309 let raw_vk_instance =
2311 vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2312 let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2313
2314 unsafe {
2315 self.render_doc
2316 .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2317 }
2318 }
2319 }
2320
2321 unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2322 let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2323 data.ok()
2324 }
2325
2326 unsafe fn get_acceleration_structure_build_sizes<'a>(
2327 &self,
2328 desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2329 ) -> crate::AccelerationStructureBuildSizes {
2330 const CAPACITY: usize = 8;
2331
2332 let ray_tracing_functions = self
2333 .shared
2334 .extension_fns
2335 .ray_tracing
2336 .as_ref()
2337 .expect("Feature `RAY_TRACING` not enabled");
2338
2339 let (geometries, primitive_counts) = match *desc.entries {
2340 crate::AccelerationStructureEntries::Instances(ref instances) => {
2341 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2342
2343 let geometry = vk::AccelerationStructureGeometryKHR::default()
2344 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2345 .geometry(vk::AccelerationStructureGeometryDataKHR {
2346 instances: instance_data,
2347 });
2348
2349 (
2350 smallvec::smallvec![geometry],
2351 smallvec::smallvec![instances.count],
2352 )
2353 }
2354 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2355 let mut primitive_counts =
2356 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2357 let mut geometries = smallvec::SmallVec::<
2358 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2359 >::with_capacity(in_geometries.len());
2360
2361 for triangles in in_geometries {
2362 let mut triangle_data =
2363 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2364 .index_type(vk::IndexType::NONE_KHR)
2365 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2366 .max_vertex(triangles.vertex_count)
2367 .vertex_stride(triangles.vertex_stride);
2368
2369 let pritive_count = if let Some(ref indices) = triangles.indices {
2370 triangle_data =
2371 triangle_data.index_type(conv::map_index_format(indices.format));
2372 indices.count / 3
2373 } else {
2374 triangles.vertex_count
2375 };
2376
2377 let geometry = vk::AccelerationStructureGeometryKHR::default()
2378 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2379 .geometry(vk::AccelerationStructureGeometryDataKHR {
2380 triangles: triangle_data,
2381 })
2382 .flags(conv::map_acceleration_structure_geometry_flags(
2383 triangles.flags,
2384 ));
2385
2386 geometries.push(geometry);
2387 primitive_counts.push(pritive_count);
2388 }
2389 (geometries, primitive_counts)
2390 }
2391 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2392 let mut primitive_counts =
2393 smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2394 let mut geometries = smallvec::SmallVec::<
2395 [vk::AccelerationStructureGeometryKHR; CAPACITY],
2396 >::with_capacity(in_geometries.len());
2397 for aabb in in_geometries {
2398 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2399 .stride(aabb.stride);
2400
2401 let geometry = vk::AccelerationStructureGeometryKHR::default()
2402 .geometry_type(vk::GeometryTypeKHR::AABBS)
2403 .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2404 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2405
2406 geometries.push(geometry);
2407 primitive_counts.push(aabb.count);
2408 }
2409 (geometries, primitive_counts)
2410 }
2411 };
2412
2413 let ty = match *desc.entries {
2414 crate::AccelerationStructureEntries::Instances(_) => {
2415 vk::AccelerationStructureTypeKHR::TOP_LEVEL
2416 }
2417 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2418 };
2419
2420 let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2421 .ty(ty)
2422 .flags(conv::map_acceleration_structure_flags(desc.flags))
2423 .geometries(&geometries);
2424
2425 let mut raw = Default::default();
2426 unsafe {
2427 ray_tracing_functions
2428 .acceleration_structure
2429 .get_acceleration_structure_build_sizes(
2430 vk::AccelerationStructureBuildTypeKHR::DEVICE,
2431 &geometry_info,
2432 &primitive_counts,
2433 &mut raw,
2434 )
2435 }
2436
2437 crate::AccelerationStructureBuildSizes {
2438 acceleration_structure_size: raw.acceleration_structure_size,
2439 update_scratch_size: raw.update_scratch_size,
2440 build_scratch_size: raw.build_scratch_size,
2441 }
2442 }
2443
2444 unsafe fn get_acceleration_structure_device_address(
2445 &self,
2446 acceleration_structure: &super::AccelerationStructure,
2447 ) -> wgt::BufferAddress {
2448 let ray_tracing_functions = self
2449 .shared
2450 .extension_fns
2451 .ray_tracing
2452 .as_ref()
2453 .expect("Feature `RAY_TRACING` not enabled");
2454
2455 unsafe {
2456 ray_tracing_functions
2457 .acceleration_structure
2458 .get_acceleration_structure_device_address(
2459 &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2460 .acceleration_structure(acceleration_structure.raw),
2461 )
2462 }
2463 }
2464
2465 unsafe fn create_acceleration_structure(
2466 &self,
2467 desc: &crate::AccelerationStructureDescriptor,
2468 ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2469 let ray_tracing_functions = self
2470 .shared
2471 .extension_fns
2472 .ray_tracing
2473 .as_ref()
2474 .expect("Feature `RAY_TRACING` not enabled");
2475
2476 let vk_buffer_info = vk::BufferCreateInfo::default()
2477 .size(desc.size)
2478 .usage(
2479 vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2480 | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2481 )
2482 .sharing_mode(vk::SharingMode::EXCLUSIVE);
2483
2484 unsafe {
2485 let raw_buffer = self
2486 .shared
2487 .raw
2488 .create_buffer(&vk_buffer_info, None)
2489 .map_err(super::map_host_device_oom_and_ioca_err)?;
2490 let req = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2491
2492 let block = self.mem_allocator.lock().alloc(
2493 &*self.shared,
2494 gpu_alloc::Request {
2495 size: req.size,
2496 align_mask: req.alignment - 1,
2497 usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
2498 memory_types: req.memory_type_bits & self.valid_ash_memory_types,
2499 },
2500 )?;
2501
2502 self.shared
2503 .raw
2504 .bind_buffer_memory(raw_buffer, *block.memory(), block.offset())
2505 .map_err(super::map_host_device_oom_and_ioca_err)?;
2506
2507 if let Some(label) = desc.label {
2508 self.shared.set_object_name(raw_buffer, label);
2509 }
2510
2511 let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2512 .buffer(raw_buffer)
2513 .offset(0)
2514 .size(desc.size)
2515 .ty(conv::map_acceleration_structure_format(desc.format));
2516
2517 let raw_acceleration_structure = ray_tracing_functions
2518 .acceleration_structure
2519 .create_acceleration_structure(&vk_info, None)
2520 .map_err(super::map_host_oom_and_ioca_err)?;
2521
2522 if let Some(label) = desc.label {
2523 self.shared
2524 .set_object_name(raw_acceleration_structure, label);
2525 }
2526
2527 Ok(super::AccelerationStructure {
2528 raw: raw_acceleration_structure,
2529 buffer: raw_buffer,
2530 block: Mutex::new(block),
2531 })
2532 }
2533 }
2534
2535 unsafe fn destroy_acceleration_structure(
2536 &self,
2537 acceleration_structure: super::AccelerationStructure,
2538 ) {
2539 let ray_tracing_functions = self
2540 .shared
2541 .extension_fns
2542 .ray_tracing
2543 .as_ref()
2544 .expect("Feature `RAY_TRACING` not enabled");
2545
2546 unsafe {
2547 ray_tracing_functions
2548 .acceleration_structure
2549 .destroy_acceleration_structure(acceleration_structure.raw, None);
2550 self.shared
2551 .raw
2552 .destroy_buffer(acceleration_structure.buffer, None);
2553 self.mem_allocator
2554 .lock()
2555 .dealloc(&*self.shared, acceleration_structure.block.into_inner());
2556 }
2557 }
2558
2559 fn get_internal_counters(&self) -> wgt::HalCounters {
2560 self.counters
2561 .memory_allocations
2562 .set(self.shared.memory_allocations_counter.read());
2563
2564 self.counters.as_ref().clone()
2565 }
2566
2567 fn tlas_instance_to_bytes(&self, instance: TlasInstance) -> Vec<u8> {
2568 const MAX_U24: u32 = (1u32 << 24u32) - 1u32;
2569 let temp = RawTlasInstance {
2570 transform: instance.transform,
2571 custom_index_and_mask: (instance.custom_index & MAX_U24)
2572 | (u32::from(instance.mask) << 24),
2573 shader_binding_table_record_offset_and_flags: 0,
2574 acceleration_structure_reference: instance.blas_address,
2575 };
2576 let temp: *const _ = &temp;
2577 unsafe {
2578 slice::from_raw_parts::<u8>(temp.cast::<u8>(), size_of::<RawTlasInstance>()).to_vec()
2579 }
2580 }
2581}
2582
2583impl super::DeviceShared {
2584 pub(super) fn new_binary_semaphore(&self) -> Result<vk::Semaphore, crate::DeviceError> {
2585 unsafe {
2586 self.raw
2587 .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2588 .map_err(super::map_host_device_oom_err)
2589 }
2590 }
2591
2592 pub(super) fn wait_for_fence(
2593 &self,
2594 fence: &super::Fence,
2595 wait_value: crate::FenceValue,
2596 timeout_ns: u64,
2597 ) -> Result<bool, crate::DeviceError> {
2598 profiling::scope!("Device::wait");
2599 match *fence {
2600 super::Fence::TimelineSemaphore(raw) => {
2601 let semaphores = [raw];
2602 let values = [wait_value];
2603 let vk_info = vk::SemaphoreWaitInfo::default()
2604 .semaphores(&semaphores)
2605 .values(&values);
2606 let result = match self.extension_fns.timeline_semaphore {
2607 Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2608 ext.wait_semaphores(&vk_info, timeout_ns)
2609 },
2610 Some(super::ExtensionFn::Promoted) => unsafe {
2611 self.raw.wait_semaphores(&vk_info, timeout_ns)
2612 },
2613 None => unreachable!(),
2614 };
2615 match result {
2616 Ok(()) => Ok(true),
2617 Err(vk::Result::TIMEOUT) => Ok(false),
2618 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2619 }
2620 }
2621 super::Fence::FencePool {
2622 last_completed,
2623 ref active,
2624 free: _,
2625 } => {
2626 if wait_value <= last_completed {
2627 Ok(true)
2628 } else {
2629 match active.iter().find(|&&(value, _)| value >= wait_value) {
2630 Some(&(_, raw)) => {
2631 match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2632 Ok(()) => Ok(true),
2633 Err(vk::Result::TIMEOUT) => Ok(false),
2634 Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2635 }
2636 }
2637 None => {
2638 crate::hal_usage_error(format!(
2639 "no signals reached value {wait_value}"
2640 ));
2641 }
2642 }
2643 }
2644 }
2645 }
2646 }
2647}
2648
2649impl From<gpu_alloc::AllocationError> for crate::DeviceError {
2650 fn from(error: gpu_alloc::AllocationError) -> Self {
2651 use gpu_alloc::AllocationError as Ae;
2652 match error {
2653 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::TooManyObjects => Self::OutOfMemory,
2654 Ae::NoCompatibleMemoryTypes => crate::hal_usage_error(error),
2655 }
2656 }
2657}
2658impl From<gpu_alloc::MapError> for crate::DeviceError {
2659 fn from(error: gpu_alloc::MapError) -> Self {
2660 use gpu_alloc::MapError as Me;
2661 match error {
2662 Me::OutOfDeviceMemory | Me::OutOfHostMemory | Me::MapFailed => Self::OutOfMemory,
2663 Me::NonHostVisible | Me::AlreadyMapped => crate::hal_usage_error(error),
2664 }
2665 }
2666}
2667impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2668 fn from(error: gpu_descriptor::AllocationError) -> Self {
2669 use gpu_descriptor::AllocationError as Ae;
2670 match error {
2671 Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
2672 }
2673 }
2674}
2675
2676fn handle_unexpected(err: vk::Result) -> ! {
2683 panic!("Unexpected Vulkan error: `{err}`")
2684}
2685
2686struct ImageWithoutMemory {
2687 raw: vk::Image,
2688 requirements: vk::MemoryRequirements,
2689 copy_size: crate::CopyExtent,
2690 view_formats: Vec<wgt::TextureFormat>,
2691 raw_flags: vk::ImageCreateFlags,
2692}