wgpu_hal/vulkan/
command.rs

1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::vk;
5
6use std::{
7    mem::{self, size_of},
8    ops::Range,
9    slice,
10};
11
12const ALLOCATION_GRANULARITY: u32 = 16;
13const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
14
15impl super::Texture {
16    fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
17    where
18        T: Iterator<Item = crate::BufferTextureCopy>,
19    {
20        let (block_width, block_height) = self.format.block_dimensions();
21        let format = self.format;
22        let copy_size = self.copy_size;
23        regions.map(move |r| {
24            let extent = r.texture_base.max_copy_size(&copy_size).min(&r.size);
25            let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
26            vk::BufferImageCopy {
27                buffer_offset: r.buffer_layout.offset,
28                buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
29                    let block_size = format
30                        .block_copy_size(Some(r.texture_base.aspect.map()))
31                        .unwrap();
32                    block_width * (bpr / block_size)
33                }),
34                buffer_image_height: r
35                    .buffer_layout
36                    .rows_per_image
37                    .map_or(0, |rpi| rpi * block_height),
38                image_subresource,
39                image_offset,
40                image_extent: conv::map_copy_extent(&extent),
41            }
42        })
43    }
44}
45
46impl super::CommandEncoder {
47    fn write_pass_end_timestamp_if_requested(&mut self) {
48        if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
49            unsafe {
50                self.device.raw.cmd_write_timestamp(
51                    self.active,
52                    vk::PipelineStageFlags::BOTTOM_OF_PIPE,
53                    query_set,
54                    index,
55                );
56            }
57        }
58    }
59}
60
61impl crate::CommandEncoder for super::CommandEncoder {
62    type A = super::Api;
63
64    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
65        if self.free.is_empty() {
66            let vk_info = vk::CommandBufferAllocateInfo::default()
67                .command_pool(self.raw)
68                .command_buffer_count(ALLOCATION_GRANULARITY);
69            let cmd_buf_vec = unsafe {
70                self.device
71                    .raw
72                    .allocate_command_buffers(&vk_info)
73                    .map_err(super::map_host_device_oom_err)?
74            };
75            self.free.extend(cmd_buf_vec);
76        }
77        let raw = self.free.pop().unwrap();
78
79        // Set the name unconditionally, since there might be a
80        // previous name assigned to this.
81        unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
82
83        // Reset this in case the last renderpass was never ended.
84        self.rpass_debug_marker_active = false;
85
86        let vk_info = vk::CommandBufferBeginInfo::default()
87            .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
88        unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }
89            .map_err(super::map_host_device_oom_err)?;
90        self.active = raw;
91
92        Ok(())
93    }
94
95    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
96        let raw = self.active;
97        self.active = vk::CommandBuffer::null();
98        unsafe { self.device.raw.end_command_buffer(raw) }.map_err(map_err)?;
99        fn map_err(err: vk::Result) -> crate::DeviceError {
100            // We don't use VK_KHR_video_encode_queue
101            // VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR
102            super::map_host_device_oom_err(err)
103        }
104        Ok(super::CommandBuffer { raw })
105    }
106
107    unsafe fn discard_encoding(&mut self) {
108        // Safe use requires this is not called in the "closed" state, so the buffer
109        // shouldn't be null. Assert this to make sure we're not pushing null
110        // buffers to the discard pile.
111        assert_ne!(self.active, vk::CommandBuffer::null());
112
113        self.discarded.push(self.active);
114        self.active = vk::CommandBuffer::null();
115    }
116
117    unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
118    where
119        I: Iterator<Item = super::CommandBuffer>,
120    {
121        self.temp.clear();
122        self.free
123            .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
124        self.free.append(&mut self.discarded);
125        let _ = unsafe {
126            self.device
127                .raw
128                .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
129        };
130    }
131
132    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
133    where
134        T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
135    {
136        //Note: this is done so that we never end up with empty stage flags
137        let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
138        let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
139        let vk_barriers = &mut self.temp.buffer_barriers;
140        vk_barriers.clear();
141
142        for bar in barriers {
143            let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.from);
144            src_stages |= src_stage;
145            let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.to);
146            dst_stages |= dst_stage;
147
148            vk_barriers.push(
149                vk::BufferMemoryBarrier::default()
150                    .buffer(bar.buffer.raw)
151                    .size(vk::WHOLE_SIZE)
152                    .src_access_mask(src_access)
153                    .dst_access_mask(dst_access),
154            )
155        }
156
157        if !vk_barriers.is_empty() {
158            unsafe {
159                self.device.raw.cmd_pipeline_barrier(
160                    self.active,
161                    src_stages,
162                    dst_stages,
163                    vk::DependencyFlags::empty(),
164                    &[],
165                    vk_barriers,
166                    &[],
167                )
168            };
169        }
170    }
171
172    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
173    where
174        T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
175    {
176        let mut src_stages = vk::PipelineStageFlags::empty();
177        let mut dst_stages = vk::PipelineStageFlags::empty();
178        let vk_barriers = &mut self.temp.image_barriers;
179        vk_barriers.clear();
180
181        for bar in barriers {
182            let range = conv::map_subresource_range_combined_aspect(
183                &bar.range,
184                bar.texture.format,
185                &self.device.private_caps,
186            );
187            let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.from);
188            let src_layout = conv::derive_image_layout(bar.usage.from, bar.texture.format);
189            src_stages |= src_stage;
190            let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.to);
191            let dst_layout = conv::derive_image_layout(bar.usage.to, bar.texture.format);
192            dst_stages |= dst_stage;
193
194            vk_barriers.push(
195                vk::ImageMemoryBarrier::default()
196                    .image(bar.texture.raw)
197                    .subresource_range(range)
198                    .src_access_mask(src_access)
199                    .dst_access_mask(dst_access)
200                    .old_layout(src_layout)
201                    .new_layout(dst_layout),
202            );
203        }
204
205        if !vk_barriers.is_empty() {
206            unsafe {
207                self.device.raw.cmd_pipeline_barrier(
208                    self.active,
209                    src_stages,
210                    dst_stages,
211                    vk::DependencyFlags::empty(),
212                    &[],
213                    &[],
214                    vk_barriers,
215                )
216            };
217        }
218    }
219
220    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
221        let range_size = range.end - range.start;
222        if self.device.workarounds.contains(
223            super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
224        ) && range_size >= 4096
225            && range.start % 16 != 0
226        {
227            let rounded_start = wgt::math::align_to(range.start, 16);
228            let prefix_size = rounded_start - range.start;
229
230            unsafe {
231                self.device.raw.cmd_fill_buffer(
232                    self.active,
233                    buffer.raw,
234                    range.start,
235                    prefix_size,
236                    0,
237                )
238            };
239
240            // This will never be zero, as rounding can only add up to 12 bytes, and the total size is 4096.
241            let suffix_size = range.end - rounded_start;
242
243            unsafe {
244                self.device.raw.cmd_fill_buffer(
245                    self.active,
246                    buffer.raw,
247                    rounded_start,
248                    suffix_size,
249                    0,
250                )
251            };
252        } else {
253            unsafe {
254                self.device
255                    .raw
256                    .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
257            };
258        }
259    }
260
261    unsafe fn copy_buffer_to_buffer<T>(
262        &mut self,
263        src: &super::Buffer,
264        dst: &super::Buffer,
265        regions: T,
266    ) where
267        T: Iterator<Item = crate::BufferCopy>,
268    {
269        let vk_regions_iter = regions.map(|r| vk::BufferCopy {
270            src_offset: r.src_offset,
271            dst_offset: r.dst_offset,
272            size: r.size.get(),
273        });
274
275        unsafe {
276            self.device.raw.cmd_copy_buffer(
277                self.active,
278                src.raw,
279                dst.raw,
280                &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
281            )
282        };
283    }
284
285    unsafe fn copy_texture_to_texture<T>(
286        &mut self,
287        src: &super::Texture,
288        src_usage: crate::TextureUses,
289        dst: &super::Texture,
290        regions: T,
291    ) where
292        T: Iterator<Item = crate::TextureCopy>,
293    {
294        let src_layout = conv::derive_image_layout(src_usage, src.format);
295
296        let vk_regions_iter = regions.map(|r| {
297            let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
298            let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
299            let extent = r
300                .size
301                .min(&r.src_base.max_copy_size(&src.copy_size))
302                .min(&r.dst_base.max_copy_size(&dst.copy_size));
303            vk::ImageCopy {
304                src_subresource,
305                src_offset,
306                dst_subresource,
307                dst_offset,
308                extent: conv::map_copy_extent(&extent),
309            }
310        });
311
312        unsafe {
313            self.device.raw.cmd_copy_image(
314                self.active,
315                src.raw,
316                src_layout,
317                dst.raw,
318                DST_IMAGE_LAYOUT,
319                &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
320            )
321        };
322    }
323
324    unsafe fn copy_buffer_to_texture<T>(
325        &mut self,
326        src: &super::Buffer,
327        dst: &super::Texture,
328        regions: T,
329    ) where
330        T: Iterator<Item = crate::BufferTextureCopy>,
331    {
332        let vk_regions_iter = dst.map_buffer_copies(regions);
333
334        unsafe {
335            self.device.raw.cmd_copy_buffer_to_image(
336                self.active,
337                src.raw,
338                dst.raw,
339                DST_IMAGE_LAYOUT,
340                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
341            )
342        };
343    }
344
345    unsafe fn copy_texture_to_buffer<T>(
346        &mut self,
347        src: &super::Texture,
348        src_usage: crate::TextureUses,
349        dst: &super::Buffer,
350        regions: T,
351    ) where
352        T: Iterator<Item = crate::BufferTextureCopy>,
353    {
354        let src_layout = conv::derive_image_layout(src_usage, src.format);
355        let vk_regions_iter = src.map_buffer_copies(regions);
356
357        unsafe {
358            self.device.raw.cmd_copy_image_to_buffer(
359                self.active,
360                src.raw,
361                src_layout,
362                dst.raw,
363                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
364            )
365        };
366    }
367
368    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
369        unsafe {
370            self.device.raw.cmd_begin_query(
371                self.active,
372                set.raw,
373                index,
374                vk::QueryControlFlags::empty(),
375            )
376        };
377    }
378    unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
379        unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
380    }
381    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
382        unsafe {
383            self.device.raw.cmd_write_timestamp(
384                self.active,
385                vk::PipelineStageFlags::BOTTOM_OF_PIPE,
386                set.raw,
387                index,
388            )
389        };
390    }
391    unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
392        unsafe {
393            self.device.raw.cmd_reset_query_pool(
394                self.active,
395                set.raw,
396                range.start,
397                range.end - range.start,
398            )
399        };
400    }
401    unsafe fn copy_query_results(
402        &mut self,
403        set: &super::QuerySet,
404        range: Range<u32>,
405        buffer: &super::Buffer,
406        offset: wgt::BufferAddress,
407        stride: wgt::BufferSize,
408    ) {
409        unsafe {
410            self.device.raw.cmd_copy_query_pool_results(
411                self.active,
412                set.raw,
413                range.start,
414                range.end - range.start,
415                buffer.raw,
416                offset,
417                stride.get(),
418                vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
419            )
420        };
421    }
422
423    unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
424    where
425        super::Api: 'a,
426        T: IntoIterator<
427            Item = crate::BuildAccelerationStructureDescriptor<
428                'a,
429                super::Buffer,
430                super::AccelerationStructure,
431            >,
432        >,
433    {
434        const CAPACITY_OUTER: usize = 8;
435        const CAPACITY_INNER: usize = 1;
436        let descriptor_count = descriptor_count as usize;
437
438        let ray_tracing_functions = self
439            .device
440            .extension_fns
441            .ray_tracing
442            .as_ref()
443            .expect("Feature `RAY_TRACING` not enabled");
444
445        let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
446            match buffer {
447                Some(buffer) => ray_tracing_functions
448                    .buffer_device_address
449                    .get_buffer_device_address(
450                        &vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
451                    ),
452                None => panic!("Buffers are required to build acceleration structures"),
453            }
454        };
455
456        // storage to all the data required for cmd_build_acceleration_structures
457        let mut ranges_storage = smallvec::SmallVec::<
458            [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
459                CAPACITY_OUTER],
460        >::with_capacity(descriptor_count);
461        let mut geometries_storage = smallvec::SmallVec::<
462            [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
463                CAPACITY_OUTER],
464        >::with_capacity(descriptor_count);
465
466        // pointers to all the data required for cmd_build_acceleration_structures
467        let mut geometry_infos = smallvec::SmallVec::<
468            [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
469        >::with_capacity(descriptor_count);
470        let mut ranges_ptrs = smallvec::SmallVec::<
471            [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
472        >::with_capacity(descriptor_count);
473
474        for desc in descriptors {
475            let (geometries, ranges) = match *desc.entries {
476                crate::AccelerationStructureEntries::Instances(ref instances) => {
477                    let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
478                    // TODO: Code is so large that rustfmt refuses to treat this... :(
479                    )
480                    .data(vk::DeviceOrHostAddressConstKHR {
481                        device_address: get_device_address(instances.buffer),
482                    });
483
484                    let geometry = vk::AccelerationStructureGeometryKHR::default()
485                        .geometry_type(vk::GeometryTypeKHR::INSTANCES)
486                        .geometry(vk::AccelerationStructureGeometryDataKHR {
487                            instances: instance_data,
488                        });
489
490                    let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
491                        .primitive_count(instances.count)
492                        .primitive_offset(instances.offset);
493
494                    (smallvec::smallvec![geometry], smallvec::smallvec![range])
495                }
496                crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
497                    let mut ranges = smallvec::SmallVec::<
498                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
499                    >::with_capacity(in_geometries.len());
500                    let mut geometries = smallvec::SmallVec::<
501                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
502                    >::with_capacity(in_geometries.len());
503                    for triangles in in_geometries {
504                        let mut triangle_data =
505                            vk::AccelerationStructureGeometryTrianglesDataKHR::default()
506                                // IndexType::NONE_KHR is not set by default (due to being provided by VK_KHR_acceleration_structure) but unless there is an
507                                // index buffer we need to have IndexType::NONE_KHR as our index type.
508                                .index_type(vk::IndexType::NONE_KHR)
509                                .vertex_data(vk::DeviceOrHostAddressConstKHR {
510                                    device_address: get_device_address(triangles.vertex_buffer),
511                                })
512                                .vertex_format(conv::map_vertex_format(triangles.vertex_format))
513                                .max_vertex(triangles.vertex_count)
514                                .vertex_stride(triangles.vertex_stride);
515
516                        let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
517
518                        if let Some(ref indices) = triangles.indices {
519                            triangle_data = triangle_data
520                                .index_data(vk::DeviceOrHostAddressConstKHR {
521                                    device_address: get_device_address(indices.buffer),
522                                })
523                                .index_type(conv::map_index_format(indices.format));
524
525                            range = range
526                                .primitive_count(indices.count / 3)
527                                .primitive_offset(indices.offset)
528                                .first_vertex(triangles.first_vertex);
529                        } else {
530                            range = range
531                                .primitive_count(triangles.vertex_count)
532                                .first_vertex(triangles.first_vertex);
533                        }
534
535                        if let Some(ref transform) = triangles.transform {
536                            let transform_device_address = unsafe {
537                                ray_tracing_functions
538                                    .buffer_device_address
539                                    .get_buffer_device_address(
540                                        &vk::BufferDeviceAddressInfo::default()
541                                            .buffer(transform.buffer.raw),
542                                    )
543                            };
544                            triangle_data =
545                                triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
546                                    device_address: transform_device_address,
547                                });
548
549                            range = range.transform_offset(transform.offset);
550                        }
551
552                        let geometry = vk::AccelerationStructureGeometryKHR::default()
553                            .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
554                            .geometry(vk::AccelerationStructureGeometryDataKHR {
555                                triangles: triangle_data,
556                            })
557                            .flags(conv::map_acceleration_structure_geometry_flags(
558                                triangles.flags,
559                            ));
560
561                        geometries.push(geometry);
562                        ranges.push(range);
563                    }
564                    (geometries, ranges)
565                }
566                crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
567                    let mut ranges = smallvec::SmallVec::<
568                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
569                    >::with_capacity(in_geometries.len());
570                    let mut geometries = smallvec::SmallVec::<
571                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
572                    >::with_capacity(in_geometries.len());
573                    for aabb in in_geometries {
574                        let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
575                            .data(vk::DeviceOrHostAddressConstKHR {
576                                device_address: get_device_address(aabb.buffer),
577                            })
578                            .stride(aabb.stride);
579
580                        let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
581                            .primitive_count(aabb.count)
582                            .primitive_offset(aabb.offset);
583
584                        let geometry = vk::AccelerationStructureGeometryKHR::default()
585                            .geometry_type(vk::GeometryTypeKHR::AABBS)
586                            .geometry(vk::AccelerationStructureGeometryDataKHR {
587                                aabbs: aabbs_data,
588                            })
589                            .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
590
591                        geometries.push(geometry);
592                        ranges.push(range);
593                    }
594                    (geometries, ranges)
595                }
596            };
597
598            ranges_storage.push(ranges);
599            geometries_storage.push(geometries);
600
601            let scratch_device_address = unsafe {
602                ray_tracing_functions
603                    .buffer_device_address
604                    .get_buffer_device_address(
605                        &vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
606                    )
607            };
608            let ty = match *desc.entries {
609                crate::AccelerationStructureEntries::Instances(_) => {
610                    vk::AccelerationStructureTypeKHR::TOP_LEVEL
611                }
612                _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
613            };
614            let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
615                .ty(ty)
616                .mode(conv::map_acceleration_structure_build_mode(desc.mode))
617                .flags(conv::map_acceleration_structure_flags(desc.flags))
618                .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
619                .scratch_data(vk::DeviceOrHostAddressKHR {
620                    device_address: scratch_device_address + desc.scratch_buffer_offset,
621                });
622
623            if desc.mode == crate::AccelerationStructureBuildMode::Update {
624                geometry_info.src_acceleration_structure = desc
625                    .source_acceleration_structure
626                    .unwrap_or(desc.destination_acceleration_structure)
627                    .raw;
628            }
629
630            geometry_infos.push(geometry_info);
631        }
632
633        for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
634            geometry_info.geometry_count = geometries_storage[i].len() as u32;
635            geometry_info.p_geometries = geometries_storage[i].as_ptr();
636            ranges_ptrs.push(&ranges_storage[i]);
637        }
638
639        unsafe {
640            ray_tracing_functions
641                .acceleration_structure
642                .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
643        }
644    }
645
646    unsafe fn place_acceleration_structure_barrier(
647        &mut self,
648        barrier: crate::AccelerationStructureBarrier,
649    ) {
650        let (src_stage, src_access) = conv::map_acceleration_structure_usage_to_barrier(
651            barrier.usage.from,
652            self.device.features,
653        );
654        let (dst_stage, dst_access) = conv::map_acceleration_structure_usage_to_barrier(
655            barrier.usage.to,
656            self.device.features,
657        );
658
659        unsafe {
660            self.device.raw.cmd_pipeline_barrier(
661                self.active,
662                src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
663                dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
664                vk::DependencyFlags::empty(),
665                &[vk::MemoryBarrier::default()
666                    .src_access_mask(src_access)
667                    .dst_access_mask(dst_access)],
668                &[],
669                &[],
670            )
671        };
672    }
673    // render
674
675    unsafe fn begin_render_pass(
676        &mut self,
677        desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
678    ) {
679        let mut vk_clear_values =
680            ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
681        let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
682        let mut rp_key = super::RenderPassKey::default();
683        let mut fb_key = super::FramebufferKey {
684            attachments: ArrayVec::default(),
685            extent: desc.extent,
686            sample_count: desc.sample_count,
687        };
688        let caps = &self.device.private_caps;
689
690        for cat in desc.color_attachments {
691            if let Some(cat) = cat.as_ref() {
692                vk_clear_values.push(vk::ClearValue {
693                    color: unsafe { cat.make_vk_clear_color() },
694                });
695                vk_image_views.push(cat.target.view.raw);
696                let color = super::ColorAttachmentKey {
697                    base: cat.target.make_attachment_key(cat.ops, caps),
698                    resolve: cat.resolve_target.as_ref().map(|target| {
699                        target.make_attachment_key(crate::AttachmentOps::STORE, caps)
700                    }),
701                };
702
703                rp_key.colors.push(Some(color));
704                fb_key.attachments.push(cat.target.view.attachment.clone());
705                if let Some(ref at) = cat.resolve_target {
706                    vk_clear_values.push(unsafe { mem::zeroed() });
707                    vk_image_views.push(at.view.raw);
708                    fb_key.attachments.push(at.view.attachment.clone());
709                }
710
711                // Assert this attachment is valid for the detected multiview, as a sanity check
712                // The driver crash for this is really bad on AMD, so the check is worth it
713                if let Some(multiview) = desc.multiview {
714                    assert_eq!(cat.target.view.layers, multiview);
715                    if let Some(ref resolve_target) = cat.resolve_target {
716                        assert_eq!(resolve_target.view.layers, multiview);
717                    }
718                }
719            } else {
720                rp_key.colors.push(None);
721            }
722        }
723        if let Some(ref ds) = desc.depth_stencil_attachment {
724            vk_clear_values.push(vk::ClearValue {
725                depth_stencil: vk::ClearDepthStencilValue {
726                    depth: ds.clear_value.0,
727                    stencil: ds.clear_value.1,
728                },
729            });
730            vk_image_views.push(ds.target.view.raw);
731            rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
732                base: ds.target.make_attachment_key(ds.depth_ops, caps),
733                stencil_ops: ds.stencil_ops,
734            });
735            fb_key.attachments.push(ds.target.view.attachment.clone());
736
737            // Assert this attachment is valid for the detected multiview, as a sanity check
738            // The driver crash for this is really bad on AMD, so the check is worth it
739            if let Some(multiview) = desc.multiview {
740                assert_eq!(ds.target.view.layers, multiview);
741            }
742        }
743        rp_key.sample_count = fb_key.sample_count;
744        rp_key.multiview = desc.multiview;
745
746        let render_area = vk::Rect2D {
747            offset: vk::Offset2D { x: 0, y: 0 },
748            extent: vk::Extent2D {
749                width: desc.extent.width,
750                height: desc.extent.height,
751            },
752        };
753        let vk_viewports = [vk::Viewport {
754            x: 0.0,
755            y: if self.device.private_caps.flip_y_requires_shift {
756                desc.extent.height as f32
757            } else {
758                0.0
759            },
760            width: desc.extent.width as f32,
761            height: -(desc.extent.height as f32),
762            min_depth: 0.0,
763            max_depth: 1.0,
764        }];
765
766        let raw_pass = self.device.make_render_pass(rp_key).unwrap();
767        let raw_framebuffer = self
768            .device
769            .make_framebuffer(fb_key, raw_pass, desc.label)
770            .unwrap();
771
772        let mut vk_info = vk::RenderPassBeginInfo::default()
773            .render_pass(raw_pass)
774            .render_area(render_area)
775            .clear_values(&vk_clear_values)
776            .framebuffer(raw_framebuffer);
777        let mut vk_attachment_info = if caps.imageless_framebuffers {
778            Some(vk::RenderPassAttachmentBeginInfo::default().attachments(&vk_image_views))
779        } else {
780            None
781        };
782        if let Some(attachment_info) = vk_attachment_info.as_mut() {
783            vk_info = vk_info.push_next(attachment_info);
784        }
785
786        if let Some(label) = desc.label {
787            unsafe { self.begin_debug_marker(label) };
788            self.rpass_debug_marker_active = true;
789        }
790
791        // Start timestamp if any (before all other commands but after debug marker)
792        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
793            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
794                unsafe {
795                    self.write_timestamp(timestamp_writes.query_set, index);
796                }
797            }
798            self.end_of_pass_timer_query = timestamp_writes
799                .end_of_pass_write_index
800                .map(|index| (timestamp_writes.query_set.raw, index));
801        }
802
803        unsafe {
804            self.device
805                .raw
806                .cmd_set_viewport(self.active, 0, &vk_viewports);
807            self.device
808                .raw
809                .cmd_set_scissor(self.active, 0, &[render_area]);
810            self.device.raw.cmd_begin_render_pass(
811                self.active,
812                &vk_info,
813                vk::SubpassContents::INLINE,
814            );
815        };
816
817        self.bind_point = vk::PipelineBindPoint::GRAPHICS;
818    }
819    unsafe fn end_render_pass(&mut self) {
820        unsafe {
821            self.device.raw.cmd_end_render_pass(self.active);
822        }
823
824        // After all other commands but before debug marker, so this is still seen as part of this pass.
825        self.write_pass_end_timestamp_if_requested();
826
827        if self.rpass_debug_marker_active {
828            unsafe {
829                self.end_debug_marker();
830            }
831            self.rpass_debug_marker_active = false;
832        }
833    }
834
835    unsafe fn set_bind_group(
836        &mut self,
837        layout: &super::PipelineLayout,
838        index: u32,
839        group: &super::BindGroup,
840        dynamic_offsets: &[wgt::DynamicOffset],
841    ) {
842        let sets = [*group.set.raw()];
843        unsafe {
844            self.device.raw.cmd_bind_descriptor_sets(
845                self.active,
846                self.bind_point,
847                layout.raw,
848                index,
849                &sets,
850                dynamic_offsets,
851            )
852        };
853    }
854    unsafe fn set_push_constants(
855        &mut self,
856        layout: &super::PipelineLayout,
857        stages: wgt::ShaderStages,
858        offset_bytes: u32,
859        data: &[u32],
860    ) {
861        unsafe {
862            self.device.raw.cmd_push_constants(
863                self.active,
864                layout.raw,
865                conv::map_shader_stage(stages),
866                offset_bytes,
867                slice::from_raw_parts(data.as_ptr().cast(), data.len() * 4),
868            )
869        };
870    }
871
872    unsafe fn insert_debug_marker(&mut self, label: &str) {
873        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
874            let cstr = self.temp.make_c_str(label);
875            let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
876            unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
877        }
878    }
879    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
880        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
881            let cstr = self.temp.make_c_str(group_label);
882            let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
883            unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
884        }
885    }
886    unsafe fn end_debug_marker(&mut self) {
887        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
888            unsafe { ext.cmd_end_debug_utils_label(self.active) };
889        }
890    }
891
892    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
893        unsafe {
894            self.device.raw.cmd_bind_pipeline(
895                self.active,
896                vk::PipelineBindPoint::GRAPHICS,
897                pipeline.raw,
898            )
899        };
900    }
901
902    unsafe fn set_index_buffer<'a>(
903        &mut self,
904        binding: crate::BufferBinding<'a, super::Buffer>,
905        format: wgt::IndexFormat,
906    ) {
907        unsafe {
908            self.device.raw.cmd_bind_index_buffer(
909                self.active,
910                binding.buffer.raw,
911                binding.offset,
912                conv::map_index_format(format),
913            )
914        };
915    }
916    unsafe fn set_vertex_buffer<'a>(
917        &mut self,
918        index: u32,
919        binding: crate::BufferBinding<'a, super::Buffer>,
920    ) {
921        let vk_buffers = [binding.buffer.raw];
922        let vk_offsets = [binding.offset];
923        unsafe {
924            self.device
925                .raw
926                .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
927        };
928    }
929    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
930        let vk_viewports = [vk::Viewport {
931            x: rect.x,
932            y: if self.device.private_caps.flip_y_requires_shift {
933                rect.y + rect.h
934            } else {
935                rect.y
936            },
937            width: rect.w,
938            height: -rect.h, // flip Y
939            min_depth: depth_range.start,
940            max_depth: depth_range.end,
941        }];
942        unsafe {
943            self.device
944                .raw
945                .cmd_set_viewport(self.active, 0, &vk_viewports)
946        };
947    }
948    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
949        let vk_scissors = [vk::Rect2D {
950            offset: vk::Offset2D {
951                x: rect.x as i32,
952                y: rect.y as i32,
953            },
954            extent: vk::Extent2D {
955                width: rect.w,
956                height: rect.h,
957            },
958        }];
959        unsafe {
960            self.device
961                .raw
962                .cmd_set_scissor(self.active, 0, &vk_scissors)
963        };
964    }
965    unsafe fn set_stencil_reference(&mut self, value: u32) {
966        unsafe {
967            self.device.raw.cmd_set_stencil_reference(
968                self.active,
969                vk::StencilFaceFlags::FRONT_AND_BACK,
970                value,
971            )
972        };
973    }
974    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
975        unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
976    }
977
978    unsafe fn draw(
979        &mut self,
980        first_vertex: u32,
981        vertex_count: u32,
982        first_instance: u32,
983        instance_count: u32,
984    ) {
985        unsafe {
986            self.device.raw.cmd_draw(
987                self.active,
988                vertex_count,
989                instance_count,
990                first_vertex,
991                first_instance,
992            )
993        };
994    }
995    unsafe fn draw_indexed(
996        &mut self,
997        first_index: u32,
998        index_count: u32,
999        base_vertex: i32,
1000        first_instance: u32,
1001        instance_count: u32,
1002    ) {
1003        unsafe {
1004            self.device.raw.cmd_draw_indexed(
1005                self.active,
1006                index_count,
1007                instance_count,
1008                first_index,
1009                base_vertex,
1010                first_instance,
1011            )
1012        };
1013    }
1014    unsafe fn draw_indirect(
1015        &mut self,
1016        buffer: &super::Buffer,
1017        offset: wgt::BufferAddress,
1018        draw_count: u32,
1019    ) {
1020        unsafe {
1021            self.device.raw.cmd_draw_indirect(
1022                self.active,
1023                buffer.raw,
1024                offset,
1025                draw_count,
1026                size_of::<wgt::DrawIndirectArgs>() as u32,
1027            )
1028        };
1029    }
1030    unsafe fn draw_indexed_indirect(
1031        &mut self,
1032        buffer: &super::Buffer,
1033        offset: wgt::BufferAddress,
1034        draw_count: u32,
1035    ) {
1036        unsafe {
1037            self.device.raw.cmd_draw_indexed_indirect(
1038                self.active,
1039                buffer.raw,
1040                offset,
1041                draw_count,
1042                size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1043            )
1044        };
1045    }
1046    unsafe fn draw_indirect_count(
1047        &mut self,
1048        buffer: &super::Buffer,
1049        offset: wgt::BufferAddress,
1050        count_buffer: &super::Buffer,
1051        count_offset: wgt::BufferAddress,
1052        max_count: u32,
1053    ) {
1054        let stride = size_of::<wgt::DrawIndirectArgs>() as u32;
1055        match self.device.extension_fns.draw_indirect_count {
1056            Some(ref t) => {
1057                unsafe {
1058                    t.cmd_draw_indirect_count(
1059                        self.active,
1060                        buffer.raw,
1061                        offset,
1062                        count_buffer.raw,
1063                        count_offset,
1064                        max_count,
1065                        stride,
1066                    )
1067                };
1068            }
1069            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1070        }
1071    }
1072    unsafe fn draw_indexed_indirect_count(
1073        &mut self,
1074        buffer: &super::Buffer,
1075        offset: wgt::BufferAddress,
1076        count_buffer: &super::Buffer,
1077        count_offset: wgt::BufferAddress,
1078        max_count: u32,
1079    ) {
1080        let stride = size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1081        match self.device.extension_fns.draw_indirect_count {
1082            Some(ref t) => {
1083                unsafe {
1084                    t.cmd_draw_indexed_indirect_count(
1085                        self.active,
1086                        buffer.raw,
1087                        offset,
1088                        count_buffer.raw,
1089                        count_offset,
1090                        max_count,
1091                        stride,
1092                    )
1093                };
1094            }
1095            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1096        }
1097    }
1098
1099    // compute
1100
1101    unsafe fn begin_compute_pass(
1102        &mut self,
1103        desc: &crate::ComputePassDescriptor<'_, super::QuerySet>,
1104    ) {
1105        self.bind_point = vk::PipelineBindPoint::COMPUTE;
1106        if let Some(label) = desc.label {
1107            unsafe { self.begin_debug_marker(label) };
1108            self.rpass_debug_marker_active = true;
1109        }
1110
1111        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1112            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1113                unsafe {
1114                    self.write_timestamp(timestamp_writes.query_set, index);
1115                }
1116            }
1117            self.end_of_pass_timer_query = timestamp_writes
1118                .end_of_pass_write_index
1119                .map(|index| (timestamp_writes.query_set.raw, index));
1120        }
1121    }
1122    unsafe fn end_compute_pass(&mut self) {
1123        self.write_pass_end_timestamp_if_requested();
1124
1125        if self.rpass_debug_marker_active {
1126            unsafe { self.end_debug_marker() };
1127            self.rpass_debug_marker_active = false
1128        }
1129    }
1130
1131    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1132        unsafe {
1133            self.device.raw.cmd_bind_pipeline(
1134                self.active,
1135                vk::PipelineBindPoint::COMPUTE,
1136                pipeline.raw,
1137            )
1138        };
1139    }
1140
1141    unsafe fn dispatch(&mut self, count: [u32; 3]) {
1142        unsafe {
1143            self.device
1144                .raw
1145                .cmd_dispatch(self.active, count[0], count[1], count[2])
1146        };
1147    }
1148    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1149        unsafe {
1150            self.device
1151                .raw
1152                .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1153        }
1154    }
1155}
1156
1157#[test]
1158fn check_dst_image_layout() {
1159    assert_eq!(
1160        conv::derive_image_layout(crate::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1161        DST_IMAGE_LAYOUT
1162    );
1163}