1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::vk;
5
6use std::{mem, ops::Range};
7
8const ALLOCATION_GRANULARITY: u32 = 16;
9const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
10
11impl super::Texture {
12 fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
13 where
14 T: Iterator<Item = crate::BufferTextureCopy>,
15 {
16 let (block_width, block_height) = self.format.block_dimensions();
17 let format = self.format;
18 let copy_size = self.copy_size;
19 regions.map(move |r| {
20 let extent = r.texture_base.max_copy_size(©_size).min(&r.size);
21 let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
22 vk::BufferImageCopy {
23 buffer_offset: r.buffer_layout.offset,
24 buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
25 let block_size = format
26 .block_copy_size(Some(r.texture_base.aspect.map()))
27 .unwrap();
28 block_width * (bpr / block_size)
29 }),
30 buffer_image_height: r
31 .buffer_layout
32 .rows_per_image
33 .map_or(0, |rpi| rpi * block_height),
34 image_subresource,
35 image_offset,
36 image_extent: conv::map_copy_extent(&extent),
37 }
38 })
39 }
40}
41
42impl super::CommandEncoder {
43 fn write_pass_end_timestamp_if_requested(&mut self) {
44 if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
45 unsafe {
46 self.device.raw.cmd_write_timestamp(
47 self.active,
48 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
49 query_set,
50 index,
51 );
52 }
53 }
54 }
55}
56
57impl crate::CommandEncoder for super::CommandEncoder {
58 type A = super::Api;
59
60 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
61 if self.free.is_empty() {
62 let vk_info = vk::CommandBufferAllocateInfo::default()
63 .command_pool(self.raw)
64 .command_buffer_count(ALLOCATION_GRANULARITY);
65 let cmd_buf_vec = unsafe {
66 self.device
67 .raw
68 .allocate_command_buffers(&vk_info)
69 .map_err(super::map_host_device_oom_err)?
70 };
71 self.free.extend(cmd_buf_vec);
72 }
73 let raw = self.free.pop().unwrap();
74
75 unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
78
79 self.rpass_debug_marker_active = false;
81
82 let vk_info = vk::CommandBufferBeginInfo::default()
83 .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
84 unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }
85 .map_err(super::map_host_device_oom_err)?;
86 self.active = raw;
87
88 Ok(())
89 }
90
91 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
92 let raw = self.active;
93 self.active = vk::CommandBuffer::null();
94 unsafe { self.device.raw.end_command_buffer(raw) }.map_err(map_err)?;
95 fn map_err(err: vk::Result) -> crate::DeviceError {
96 super::map_host_device_oom_err(err)
99 }
100 Ok(super::CommandBuffer { raw })
101 }
102
103 unsafe fn discard_encoding(&mut self) {
104 assert_ne!(self.active, vk::CommandBuffer::null());
108
109 self.discarded.push(self.active);
110 self.active = vk::CommandBuffer::null();
111 }
112
113 unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
114 where
115 I: Iterator<Item = super::CommandBuffer>,
116 {
117 self.temp.clear();
118 self.free
119 .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
120 self.free.append(&mut self.discarded);
121 let _ = unsafe {
122 self.device
123 .raw
124 .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
125 };
126 }
127
128 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
129 where
130 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
131 {
132 let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
134 let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
135 let vk_barriers = &mut self.temp.buffer_barriers;
136 vk_barriers.clear();
137
138 for bar in barriers {
139 let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.from);
140 src_stages |= src_stage;
141 let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.to);
142 dst_stages |= dst_stage;
143
144 vk_barriers.push(
145 vk::BufferMemoryBarrier::default()
146 .buffer(bar.buffer.raw)
147 .size(vk::WHOLE_SIZE)
148 .src_access_mask(src_access)
149 .dst_access_mask(dst_access),
150 )
151 }
152
153 if !vk_barriers.is_empty() {
154 unsafe {
155 self.device.raw.cmd_pipeline_barrier(
156 self.active,
157 src_stages,
158 dst_stages,
159 vk::DependencyFlags::empty(),
160 &[],
161 vk_barriers,
162 &[],
163 )
164 };
165 }
166 }
167
168 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
169 where
170 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
171 {
172 let mut src_stages = vk::PipelineStageFlags::empty();
173 let mut dst_stages = vk::PipelineStageFlags::empty();
174 let vk_barriers = &mut self.temp.image_barriers;
175 vk_barriers.clear();
176
177 for bar in barriers {
178 let range = conv::map_subresource_range_combined_aspect(
179 &bar.range,
180 bar.texture.format,
181 &self.device.private_caps,
182 );
183 let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.from);
184 let src_layout = conv::derive_image_layout(bar.usage.from, bar.texture.format);
185 src_stages |= src_stage;
186 let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.to);
187 let dst_layout = conv::derive_image_layout(bar.usage.to, bar.texture.format);
188 dst_stages |= dst_stage;
189
190 vk_barriers.push(
191 vk::ImageMemoryBarrier::default()
192 .image(bar.texture.raw)
193 .subresource_range(range)
194 .src_access_mask(src_access)
195 .dst_access_mask(dst_access)
196 .old_layout(src_layout)
197 .new_layout(dst_layout),
198 );
199 }
200
201 if !vk_barriers.is_empty() {
202 unsafe {
203 self.device.raw.cmd_pipeline_barrier(
204 self.active,
205 src_stages,
206 dst_stages,
207 vk::DependencyFlags::empty(),
208 &[],
209 &[],
210 vk_barriers,
211 )
212 };
213 }
214 }
215
216 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
217 let range_size = range.end - range.start;
218 if self.device.workarounds.contains(
219 super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
220 ) && range_size >= 4096
221 && range.start % 16 != 0
222 {
223 let rounded_start = wgt::math::align_to(range.start, 16);
224 let prefix_size = rounded_start - range.start;
225
226 unsafe {
227 self.device.raw.cmd_fill_buffer(
228 self.active,
229 buffer.raw,
230 range.start,
231 prefix_size,
232 0,
233 )
234 };
235
236 let suffix_size = range.end - rounded_start;
238
239 unsafe {
240 self.device.raw.cmd_fill_buffer(
241 self.active,
242 buffer.raw,
243 rounded_start,
244 suffix_size,
245 0,
246 )
247 };
248 } else {
249 unsafe {
250 self.device
251 .raw
252 .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
253 };
254 }
255 }
256
257 unsafe fn copy_buffer_to_buffer<T>(
258 &mut self,
259 src: &super::Buffer,
260 dst: &super::Buffer,
261 regions: T,
262 ) where
263 T: Iterator<Item = crate::BufferCopy>,
264 {
265 let vk_regions_iter = regions.map(|r| vk::BufferCopy {
266 src_offset: r.src_offset,
267 dst_offset: r.dst_offset,
268 size: r.size.get(),
269 });
270
271 unsafe {
272 self.device.raw.cmd_copy_buffer(
273 self.active,
274 src.raw,
275 dst.raw,
276 &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
277 )
278 };
279 }
280
281 unsafe fn copy_texture_to_texture<T>(
282 &mut self,
283 src: &super::Texture,
284 src_usage: wgt::TextureUses,
285 dst: &super::Texture,
286 regions: T,
287 ) where
288 T: Iterator<Item = crate::TextureCopy>,
289 {
290 let src_layout = conv::derive_image_layout(src_usage, src.format);
291
292 let vk_regions_iter = regions.map(|r| {
293 let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
294 let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
295 let extent = r
296 .size
297 .min(&r.src_base.max_copy_size(&src.copy_size))
298 .min(&r.dst_base.max_copy_size(&dst.copy_size));
299 vk::ImageCopy {
300 src_subresource,
301 src_offset,
302 dst_subresource,
303 dst_offset,
304 extent: conv::map_copy_extent(&extent),
305 }
306 });
307
308 unsafe {
309 self.device.raw.cmd_copy_image(
310 self.active,
311 src.raw,
312 src_layout,
313 dst.raw,
314 DST_IMAGE_LAYOUT,
315 &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
316 )
317 };
318 }
319
320 unsafe fn copy_buffer_to_texture<T>(
321 &mut self,
322 src: &super::Buffer,
323 dst: &super::Texture,
324 regions: T,
325 ) where
326 T: Iterator<Item = crate::BufferTextureCopy>,
327 {
328 let vk_regions_iter = dst.map_buffer_copies(regions);
329
330 unsafe {
331 self.device.raw.cmd_copy_buffer_to_image(
332 self.active,
333 src.raw,
334 dst.raw,
335 DST_IMAGE_LAYOUT,
336 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
337 )
338 };
339 }
340
341 unsafe fn copy_texture_to_buffer<T>(
342 &mut self,
343 src: &super::Texture,
344 src_usage: wgt::TextureUses,
345 dst: &super::Buffer,
346 regions: T,
347 ) where
348 T: Iterator<Item = crate::BufferTextureCopy>,
349 {
350 let src_layout = conv::derive_image_layout(src_usage, src.format);
351 let vk_regions_iter = src.map_buffer_copies(regions);
352
353 unsafe {
354 self.device.raw.cmd_copy_image_to_buffer(
355 self.active,
356 src.raw,
357 src_layout,
358 dst.raw,
359 &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
360 )
361 };
362 }
363
364 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
365 unsafe {
366 self.device.raw.cmd_begin_query(
367 self.active,
368 set.raw,
369 index,
370 vk::QueryControlFlags::empty(),
371 )
372 };
373 }
374 unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
375 unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
376 }
377 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
378 unsafe {
379 self.device.raw.cmd_write_timestamp(
380 self.active,
381 vk::PipelineStageFlags::BOTTOM_OF_PIPE,
382 set.raw,
383 index,
384 )
385 };
386 }
387 unsafe fn read_acceleration_structure_compact_size(
388 &mut self,
389 acceleration_structure: &super::AccelerationStructure,
390 buffer: &super::Buffer,
391 ) {
392 let ray_tracing_functions = self
393 .device
394 .extension_fns
395 .ray_tracing
396 .as_ref()
397 .expect("Feature `RAY_TRACING` not enabled");
398 let query_pool = acceleration_structure
399 .compacted_size_query
400 .as_ref()
401 .unwrap();
402 unsafe {
403 self.device
404 .raw
405 .cmd_reset_query_pool(self.active, *query_pool, 0, 1);
406 ray_tracing_functions
407 .acceleration_structure
408 .cmd_write_acceleration_structures_properties(
409 self.active,
410 &[acceleration_structure.raw],
411 vk::QueryType::ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
412 *query_pool,
413 0,
414 );
415 self.device.raw.cmd_copy_query_pool_results(
416 self.active,
417 *query_pool,
418 0,
419 1,
420 buffer.raw,
421 0,
422 wgt::QUERY_SIZE as vk::DeviceSize,
423 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
424 )
425 };
426 }
427 unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
428 unsafe {
429 self.device.raw.cmd_reset_query_pool(
430 self.active,
431 set.raw,
432 range.start,
433 range.end - range.start,
434 )
435 };
436 }
437 unsafe fn copy_query_results(
438 &mut self,
439 set: &super::QuerySet,
440 range: Range<u32>,
441 buffer: &super::Buffer,
442 offset: wgt::BufferAddress,
443 stride: wgt::BufferSize,
444 ) {
445 unsafe {
446 self.device.raw.cmd_copy_query_pool_results(
447 self.active,
448 set.raw,
449 range.start,
450 range.end - range.start,
451 buffer.raw,
452 offset,
453 stride.get(),
454 vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
455 )
456 };
457 }
458
459 unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
460 where
461 super::Api: 'a,
462 T: IntoIterator<
463 Item = crate::BuildAccelerationStructureDescriptor<
464 'a,
465 super::Buffer,
466 super::AccelerationStructure,
467 >,
468 >,
469 {
470 const CAPACITY_OUTER: usize = 8;
471 const CAPACITY_INNER: usize = 1;
472 let descriptor_count = descriptor_count as usize;
473
474 let ray_tracing_functions = self
475 .device
476 .extension_fns
477 .ray_tracing
478 .as_ref()
479 .expect("Feature `RAY_TRACING` not enabled");
480
481 let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
482 match buffer {
483 Some(buffer) => ray_tracing_functions
484 .buffer_device_address
485 .get_buffer_device_address(
486 &vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
487 ),
488 None => panic!("Buffers are required to build acceleration structures"),
489 }
490 };
491
492 let mut ranges_storage = smallvec::SmallVec::<
494 [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
495 CAPACITY_OUTER],
496 >::with_capacity(descriptor_count);
497 let mut geometries_storage = smallvec::SmallVec::<
498 [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
499 CAPACITY_OUTER],
500 >::with_capacity(descriptor_count);
501
502 let mut geometry_infos = smallvec::SmallVec::<
504 [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
505 >::with_capacity(descriptor_count);
506 let mut ranges_ptrs = smallvec::SmallVec::<
507 [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
508 >::with_capacity(descriptor_count);
509
510 for desc in descriptors {
511 let (geometries, ranges) = match *desc.entries {
512 crate::AccelerationStructureEntries::Instances(ref instances) => {
513 let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
514 )
516 .data(vk::DeviceOrHostAddressConstKHR {
517 device_address: get_device_address(instances.buffer),
518 });
519
520 let geometry = vk::AccelerationStructureGeometryKHR::default()
521 .geometry_type(vk::GeometryTypeKHR::INSTANCES)
522 .geometry(vk::AccelerationStructureGeometryDataKHR {
523 instances: instance_data,
524 });
525
526 let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
527 .primitive_count(instances.count)
528 .primitive_offset(instances.offset);
529
530 (smallvec::smallvec![geometry], smallvec::smallvec![range])
531 }
532 crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
533 let mut ranges = smallvec::SmallVec::<
534 [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
535 >::with_capacity(in_geometries.len());
536 let mut geometries = smallvec::SmallVec::<
537 [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
538 >::with_capacity(in_geometries.len());
539 for triangles in in_geometries {
540 let mut triangle_data =
541 vk::AccelerationStructureGeometryTrianglesDataKHR::default()
542 .index_type(vk::IndexType::NONE_KHR)
545 .vertex_data(vk::DeviceOrHostAddressConstKHR {
546 device_address: get_device_address(triangles.vertex_buffer),
547 })
548 .vertex_format(conv::map_vertex_format(triangles.vertex_format))
549 .max_vertex(triangles.vertex_count)
550 .vertex_stride(triangles.vertex_stride);
551
552 let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
553
554 if let Some(ref indices) = triangles.indices {
555 triangle_data = triangle_data
556 .index_data(vk::DeviceOrHostAddressConstKHR {
557 device_address: get_device_address(indices.buffer),
558 })
559 .index_type(conv::map_index_format(indices.format));
560
561 range = range
562 .primitive_count(indices.count / 3)
563 .primitive_offset(indices.offset)
564 .first_vertex(triangles.first_vertex);
565 } else {
566 range = range
567 .primitive_count(triangles.vertex_count)
568 .first_vertex(triangles.first_vertex);
569 }
570
571 if let Some(ref transform) = triangles.transform {
572 let transform_device_address = unsafe {
573 ray_tracing_functions
574 .buffer_device_address
575 .get_buffer_device_address(
576 &vk::BufferDeviceAddressInfo::default()
577 .buffer(transform.buffer.raw),
578 )
579 };
580 triangle_data =
581 triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
582 device_address: transform_device_address,
583 });
584
585 range = range.transform_offset(transform.offset);
586 }
587
588 let geometry = vk::AccelerationStructureGeometryKHR::default()
589 .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
590 .geometry(vk::AccelerationStructureGeometryDataKHR {
591 triangles: triangle_data,
592 })
593 .flags(conv::map_acceleration_structure_geometry_flags(
594 triangles.flags,
595 ));
596
597 geometries.push(geometry);
598 ranges.push(range);
599 }
600 (geometries, ranges)
601 }
602 crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
603 let mut ranges = smallvec::SmallVec::<
604 [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
605 >::with_capacity(in_geometries.len());
606 let mut geometries = smallvec::SmallVec::<
607 [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
608 >::with_capacity(in_geometries.len());
609 for aabb in in_geometries {
610 let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
611 .data(vk::DeviceOrHostAddressConstKHR {
612 device_address: get_device_address(aabb.buffer),
613 })
614 .stride(aabb.stride);
615
616 let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
617 .primitive_count(aabb.count)
618 .primitive_offset(aabb.offset);
619
620 let geometry = vk::AccelerationStructureGeometryKHR::default()
621 .geometry_type(vk::GeometryTypeKHR::AABBS)
622 .geometry(vk::AccelerationStructureGeometryDataKHR {
623 aabbs: aabbs_data,
624 })
625 .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
626
627 geometries.push(geometry);
628 ranges.push(range);
629 }
630 (geometries, ranges)
631 }
632 };
633
634 ranges_storage.push(ranges);
635 geometries_storage.push(geometries);
636
637 let scratch_device_address = unsafe {
638 ray_tracing_functions
639 .buffer_device_address
640 .get_buffer_device_address(
641 &vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
642 )
643 };
644 let ty = match *desc.entries {
645 crate::AccelerationStructureEntries::Instances(_) => {
646 vk::AccelerationStructureTypeKHR::TOP_LEVEL
647 }
648 _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
649 };
650 let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
651 .ty(ty)
652 .mode(conv::map_acceleration_structure_build_mode(desc.mode))
653 .flags(conv::map_acceleration_structure_flags(desc.flags))
654 .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
655 .scratch_data(vk::DeviceOrHostAddressKHR {
656 device_address: scratch_device_address + desc.scratch_buffer_offset,
657 });
658
659 if desc.mode == crate::AccelerationStructureBuildMode::Update {
660 geometry_info.src_acceleration_structure = desc
661 .source_acceleration_structure
662 .unwrap_or(desc.destination_acceleration_structure)
663 .raw;
664 }
665
666 geometry_infos.push(geometry_info);
667 }
668
669 for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
670 geometry_info.geometry_count = geometries_storage[i].len() as u32;
671 geometry_info.p_geometries = geometries_storage[i].as_ptr();
672 ranges_ptrs.push(&ranges_storage[i]);
673 }
674
675 unsafe {
676 ray_tracing_functions
677 .acceleration_structure
678 .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
679 }
680 }
681
682 unsafe fn place_acceleration_structure_barrier(
683 &mut self,
684 barrier: crate::AccelerationStructureBarrier,
685 ) {
686 let (src_stage, src_access) = conv::map_acceleration_structure_usage_to_barrier(
687 barrier.usage.from,
688 self.device.features,
689 );
690 let (dst_stage, dst_access) = conv::map_acceleration_structure_usage_to_barrier(
691 barrier.usage.to,
692 self.device.features,
693 );
694
695 unsafe {
696 self.device.raw.cmd_pipeline_barrier(
697 self.active,
698 src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
699 dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
700 vk::DependencyFlags::empty(),
701 &[vk::MemoryBarrier::default()
702 .src_access_mask(src_access)
703 .dst_access_mask(dst_access)],
704 &[],
705 &[],
706 )
707 };
708 }
709 unsafe fn begin_render_pass(
712 &mut self,
713 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
714 ) {
715 let mut vk_clear_values =
716 ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
717 let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
718 let mut rp_key = super::RenderPassKey::default();
719 let mut fb_key = super::FramebufferKey {
720 attachments: ArrayVec::default(),
721 extent: desc.extent,
722 sample_count: desc.sample_count,
723 };
724 let caps = &self.device.private_caps;
725
726 for cat in desc.color_attachments {
727 if let Some(cat) = cat.as_ref() {
728 vk_clear_values.push(vk::ClearValue {
729 color: unsafe { cat.make_vk_clear_color() },
730 });
731 vk_image_views.push(cat.target.view.raw);
732 let color = super::ColorAttachmentKey {
733 base: cat.target.make_attachment_key(cat.ops, caps),
734 resolve: cat.resolve_target.as_ref().map(|target| {
735 target.make_attachment_key(crate::AttachmentOps::STORE, caps)
736 }),
737 };
738
739 rp_key.colors.push(Some(color));
740 fb_key.attachments.push(cat.target.view.attachment.clone());
741 if let Some(ref at) = cat.resolve_target {
742 vk_clear_values.push(unsafe { mem::zeroed() });
743 vk_image_views.push(at.view.raw);
744 fb_key.attachments.push(at.view.attachment.clone());
745 }
746
747 if let Some(multiview) = desc.multiview {
750 assert_eq!(cat.target.view.layers, multiview);
751 if let Some(ref resolve_target) = cat.resolve_target {
752 assert_eq!(resolve_target.view.layers, multiview);
753 }
754 }
755 } else {
756 rp_key.colors.push(None);
757 }
758 }
759 if let Some(ref ds) = desc.depth_stencil_attachment {
760 vk_clear_values.push(vk::ClearValue {
761 depth_stencil: vk::ClearDepthStencilValue {
762 depth: ds.clear_value.0,
763 stencil: ds.clear_value.1,
764 },
765 });
766 vk_image_views.push(ds.target.view.raw);
767 rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
768 base: ds.target.make_attachment_key(ds.depth_ops, caps),
769 stencil_ops: ds.stencil_ops,
770 });
771 fb_key.attachments.push(ds.target.view.attachment.clone());
772
773 if let Some(multiview) = desc.multiview {
776 assert_eq!(ds.target.view.layers, multiview);
777 }
778 }
779 rp_key.sample_count = fb_key.sample_count;
780 rp_key.multiview = desc.multiview;
781
782 let render_area = vk::Rect2D {
783 offset: vk::Offset2D { x: 0, y: 0 },
784 extent: vk::Extent2D {
785 width: desc.extent.width,
786 height: desc.extent.height,
787 },
788 };
789 let vk_viewports = [vk::Viewport {
790 x: 0.0,
791 y: if self.device.private_caps.flip_y_requires_shift {
792 desc.extent.height as f32
793 } else {
794 0.0
795 },
796 width: desc.extent.width as f32,
797 height: -(desc.extent.height as f32),
798 min_depth: 0.0,
799 max_depth: 1.0,
800 }];
801
802 let raw_pass = self.device.make_render_pass(rp_key).unwrap();
803 let raw_framebuffer = self
804 .device
805 .make_framebuffer(fb_key, raw_pass, desc.label)
806 .unwrap();
807
808 let mut vk_info = vk::RenderPassBeginInfo::default()
809 .render_pass(raw_pass)
810 .render_area(render_area)
811 .clear_values(&vk_clear_values)
812 .framebuffer(raw_framebuffer);
813 let mut vk_attachment_info = if caps.imageless_framebuffers {
814 Some(vk::RenderPassAttachmentBeginInfo::default().attachments(&vk_image_views))
815 } else {
816 None
817 };
818 if let Some(attachment_info) = vk_attachment_info.as_mut() {
819 vk_info = vk_info.push_next(attachment_info);
820 }
821
822 if let Some(label) = desc.label {
823 unsafe { self.begin_debug_marker(label) };
824 self.rpass_debug_marker_active = true;
825 }
826
827 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
829 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
830 unsafe {
831 self.write_timestamp(timestamp_writes.query_set, index);
832 }
833 }
834 self.end_of_pass_timer_query = timestamp_writes
835 .end_of_pass_write_index
836 .map(|index| (timestamp_writes.query_set.raw, index));
837 }
838
839 unsafe {
840 self.device
841 .raw
842 .cmd_set_viewport(self.active, 0, &vk_viewports);
843 self.device
844 .raw
845 .cmd_set_scissor(self.active, 0, &[render_area]);
846 self.device.raw.cmd_begin_render_pass(
847 self.active,
848 &vk_info,
849 vk::SubpassContents::INLINE,
850 );
851 };
852
853 self.bind_point = vk::PipelineBindPoint::GRAPHICS;
854 }
855 unsafe fn end_render_pass(&mut self) {
856 unsafe {
857 self.device.raw.cmd_end_render_pass(self.active);
858 }
859
860 self.write_pass_end_timestamp_if_requested();
862
863 if self.rpass_debug_marker_active {
864 unsafe {
865 self.end_debug_marker();
866 }
867 self.rpass_debug_marker_active = false;
868 }
869 }
870
871 unsafe fn set_bind_group(
872 &mut self,
873 layout: &super::PipelineLayout,
874 index: u32,
875 group: &super::BindGroup,
876 dynamic_offsets: &[wgt::DynamicOffset],
877 ) {
878 let sets = [*group.set.raw()];
879 unsafe {
880 self.device.raw.cmd_bind_descriptor_sets(
881 self.active,
882 self.bind_point,
883 layout.raw,
884 index,
885 &sets,
886 dynamic_offsets,
887 )
888 };
889 }
890 unsafe fn set_push_constants(
891 &mut self,
892 layout: &super::PipelineLayout,
893 stages: wgt::ShaderStages,
894 offset_bytes: u32,
895 data: &[u32],
896 ) {
897 unsafe {
898 self.device.raw.cmd_push_constants(
899 self.active,
900 layout.raw,
901 conv::map_shader_stage(stages),
902 offset_bytes,
903 bytemuck::cast_slice(data),
904 )
905 };
906 }
907
908 unsafe fn insert_debug_marker(&mut self, label: &str) {
909 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
910 let cstr = self.temp.make_c_str(label);
911 let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
912 unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
913 }
914 }
915 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
916 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
917 let cstr = self.temp.make_c_str(group_label);
918 let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
919 unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
920 }
921 }
922 unsafe fn end_debug_marker(&mut self) {
923 if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
924 unsafe { ext.cmd_end_debug_utils_label(self.active) };
925 }
926 }
927
928 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
929 unsafe {
930 self.device.raw.cmd_bind_pipeline(
931 self.active,
932 vk::PipelineBindPoint::GRAPHICS,
933 pipeline.raw,
934 )
935 };
936 }
937
938 unsafe fn set_index_buffer<'a>(
939 &mut self,
940 binding: crate::BufferBinding<'a, super::Buffer>,
941 format: wgt::IndexFormat,
942 ) {
943 unsafe {
944 self.device.raw.cmd_bind_index_buffer(
945 self.active,
946 binding.buffer.raw,
947 binding.offset,
948 conv::map_index_format(format),
949 )
950 };
951 }
952 unsafe fn set_vertex_buffer<'a>(
953 &mut self,
954 index: u32,
955 binding: crate::BufferBinding<'a, super::Buffer>,
956 ) {
957 let vk_buffers = [binding.buffer.raw];
958 let vk_offsets = [binding.offset];
959 unsafe {
960 self.device
961 .raw
962 .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
963 };
964 }
965 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
966 let vk_viewports = [vk::Viewport {
967 x: rect.x,
968 y: if self.device.private_caps.flip_y_requires_shift {
969 rect.y + rect.h
970 } else {
971 rect.y
972 },
973 width: rect.w,
974 height: -rect.h, min_depth: depth_range.start,
976 max_depth: depth_range.end,
977 }];
978 unsafe {
979 self.device
980 .raw
981 .cmd_set_viewport(self.active, 0, &vk_viewports)
982 };
983 }
984 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
985 let vk_scissors = [vk::Rect2D {
986 offset: vk::Offset2D {
987 x: rect.x as i32,
988 y: rect.y as i32,
989 },
990 extent: vk::Extent2D {
991 width: rect.w,
992 height: rect.h,
993 },
994 }];
995 unsafe {
996 self.device
997 .raw
998 .cmd_set_scissor(self.active, 0, &vk_scissors)
999 };
1000 }
1001 unsafe fn set_stencil_reference(&mut self, value: u32) {
1002 unsafe {
1003 self.device.raw.cmd_set_stencil_reference(
1004 self.active,
1005 vk::StencilFaceFlags::FRONT_AND_BACK,
1006 value,
1007 )
1008 };
1009 }
1010 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1011 unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
1012 }
1013
1014 unsafe fn draw(
1015 &mut self,
1016 first_vertex: u32,
1017 vertex_count: u32,
1018 first_instance: u32,
1019 instance_count: u32,
1020 ) {
1021 unsafe {
1022 self.device.raw.cmd_draw(
1023 self.active,
1024 vertex_count,
1025 instance_count,
1026 first_vertex,
1027 first_instance,
1028 )
1029 };
1030 }
1031 unsafe fn draw_indexed(
1032 &mut self,
1033 first_index: u32,
1034 index_count: u32,
1035 base_vertex: i32,
1036 first_instance: u32,
1037 instance_count: u32,
1038 ) {
1039 unsafe {
1040 self.device.raw.cmd_draw_indexed(
1041 self.active,
1042 index_count,
1043 instance_count,
1044 first_index,
1045 base_vertex,
1046 first_instance,
1047 )
1048 };
1049 }
1050 unsafe fn draw_mesh_tasks(
1051 &mut self,
1052 group_count_x: u32,
1053 group_count_y: u32,
1054 group_count_z: u32,
1055 ) {
1056 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1057 unsafe {
1058 t.cmd_draw_mesh_tasks(self.active, group_count_x, group_count_y, group_count_z);
1059 };
1060 } else {
1061 panic!("Feature `MESH_SHADING` not enabled");
1062 }
1063 }
1064 unsafe fn draw_indirect(
1065 &mut self,
1066 buffer: &super::Buffer,
1067 offset: wgt::BufferAddress,
1068 draw_count: u32,
1069 ) {
1070 unsafe {
1071 self.device.raw.cmd_draw_indirect(
1072 self.active,
1073 buffer.raw,
1074 offset,
1075 draw_count,
1076 size_of::<wgt::DrawIndirectArgs>() as u32,
1077 )
1078 };
1079 }
1080 unsafe fn draw_indexed_indirect(
1081 &mut self,
1082 buffer: &super::Buffer,
1083 offset: wgt::BufferAddress,
1084 draw_count: u32,
1085 ) {
1086 unsafe {
1087 self.device.raw.cmd_draw_indexed_indirect(
1088 self.active,
1089 buffer.raw,
1090 offset,
1091 draw_count,
1092 size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1093 )
1094 };
1095 }
1096 unsafe fn draw_mesh_tasks_indirect(
1097 &mut self,
1098 buffer: &<Self::A as crate::Api>::Buffer,
1099 offset: wgt::BufferAddress,
1100 draw_count: u32,
1101 ) {
1102 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1103 unsafe {
1104 t.cmd_draw_mesh_tasks_indirect(
1105 self.active,
1106 buffer.raw,
1107 offset,
1108 draw_count,
1109 size_of::<wgt::DispatchIndirectArgs>() as u32,
1110 );
1111 };
1112 } else {
1113 panic!("Feature `MESH_SHADING` not enabled");
1114 }
1115 }
1116 unsafe fn draw_indirect_count(
1117 &mut self,
1118 buffer: &super::Buffer,
1119 offset: wgt::BufferAddress,
1120 count_buffer: &super::Buffer,
1121 count_offset: wgt::BufferAddress,
1122 max_count: u32,
1123 ) {
1124 let stride = size_of::<wgt::DrawIndirectArgs>() as u32;
1125 match self.device.extension_fns.draw_indirect_count {
1126 Some(ref t) => {
1127 unsafe {
1128 t.cmd_draw_indirect_count(
1129 self.active,
1130 buffer.raw,
1131 offset,
1132 count_buffer.raw,
1133 count_offset,
1134 max_count,
1135 stride,
1136 )
1137 };
1138 }
1139 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1140 }
1141 }
1142 unsafe fn draw_indexed_indirect_count(
1143 &mut self,
1144 buffer: &super::Buffer,
1145 offset: wgt::BufferAddress,
1146 count_buffer: &super::Buffer,
1147 count_offset: wgt::BufferAddress,
1148 max_count: u32,
1149 ) {
1150 let stride = size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1151 match self.device.extension_fns.draw_indirect_count {
1152 Some(ref t) => {
1153 unsafe {
1154 t.cmd_draw_indexed_indirect_count(
1155 self.active,
1156 buffer.raw,
1157 offset,
1158 count_buffer.raw,
1159 count_offset,
1160 max_count,
1161 stride,
1162 )
1163 };
1164 }
1165 None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1166 }
1167 }
1168 unsafe fn draw_mesh_tasks_indirect_count(
1169 &mut self,
1170 buffer: &<Self::A as crate::Api>::Buffer,
1171 offset: wgt::BufferAddress,
1172 count_buffer: &super::Buffer,
1173 count_offset: wgt::BufferAddress,
1174 max_count: u32,
1175 ) {
1176 if self.device.extension_fns.draw_indirect_count.is_none() {
1177 panic!("Feature `DRAW_INDIRECT_COUNT` not enabled");
1178 }
1179 if let Some(ref t) = self.device.extension_fns.mesh_shading {
1180 unsafe {
1181 t.cmd_draw_mesh_tasks_indirect_count(
1182 self.active,
1183 buffer.raw,
1184 offset,
1185 count_buffer.raw,
1186 count_offset,
1187 max_count,
1188 size_of::<wgt::DispatchIndirectArgs>() as u32,
1189 );
1190 };
1191 } else {
1192 panic!("Feature `MESH_SHADING` not enabled");
1193 }
1194 }
1195
1196 unsafe fn begin_compute_pass(
1199 &mut self,
1200 desc: &crate::ComputePassDescriptor<'_, super::QuerySet>,
1201 ) {
1202 self.bind_point = vk::PipelineBindPoint::COMPUTE;
1203 if let Some(label) = desc.label {
1204 unsafe { self.begin_debug_marker(label) };
1205 self.rpass_debug_marker_active = true;
1206 }
1207
1208 if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1209 if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1210 unsafe {
1211 self.write_timestamp(timestamp_writes.query_set, index);
1212 }
1213 }
1214 self.end_of_pass_timer_query = timestamp_writes
1215 .end_of_pass_write_index
1216 .map(|index| (timestamp_writes.query_set.raw, index));
1217 }
1218 }
1219 unsafe fn end_compute_pass(&mut self) {
1220 self.write_pass_end_timestamp_if_requested();
1221
1222 if self.rpass_debug_marker_active {
1223 unsafe { self.end_debug_marker() };
1224 self.rpass_debug_marker_active = false
1225 }
1226 }
1227
1228 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1229 unsafe {
1230 self.device.raw.cmd_bind_pipeline(
1231 self.active,
1232 vk::PipelineBindPoint::COMPUTE,
1233 pipeline.raw,
1234 )
1235 };
1236 }
1237
1238 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1239 unsafe {
1240 self.device
1241 .raw
1242 .cmd_dispatch(self.active, count[0], count[1], count[2])
1243 };
1244 }
1245 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1246 unsafe {
1247 self.device
1248 .raw
1249 .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1250 }
1251 }
1252
1253 unsafe fn copy_acceleration_structure_to_acceleration_structure(
1254 &mut self,
1255 src: &super::AccelerationStructure,
1256 dst: &super::AccelerationStructure,
1257 copy: wgt::AccelerationStructureCopy,
1258 ) {
1259 let ray_tracing_functions = self
1260 .device
1261 .extension_fns
1262 .ray_tracing
1263 .as_ref()
1264 .expect("Feature `RAY_TRACING` not enabled");
1265
1266 let mode = match copy {
1267 wgt::AccelerationStructureCopy::Clone => vk::CopyAccelerationStructureModeKHR::CLONE,
1268 wgt::AccelerationStructureCopy::Compact => {
1269 vk::CopyAccelerationStructureModeKHR::COMPACT
1270 }
1271 };
1272
1273 unsafe {
1274 ray_tracing_functions
1275 .acceleration_structure
1276 .cmd_copy_acceleration_structure(
1277 self.active,
1278 &vk::CopyAccelerationStructureInfoKHR {
1279 s_type: vk::StructureType::COPY_ACCELERATION_STRUCTURE_INFO_KHR,
1280 p_next: std::ptr::null(),
1281 src: src.raw,
1282 dst: dst.raw,
1283 mode,
1284 _marker: Default::default(),
1285 },
1286 );
1287 }
1288 }
1289}
1290
1291#[test]
1292fn check_dst_image_layout() {
1293 assert_eq!(
1294 conv::derive_image_layout(wgt::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1295 DST_IMAGE_LAYOUT
1296 );
1297}