1use super::{conv, Command as C};
2use arrayvec::ArrayVec;
3use std::{
4 mem::{self, size_of, size_of_val},
5 ops::Range,
6};
7
8#[derive(Clone, Copy, Debug, Default)]
9struct TextureSlotDesc {
10 tex_target: super::BindTarget,
11 sampler_index: Option<u8>,
12}
13
14pub(super) struct State {
15 topology: u32,
16 primitive: super::PrimitiveState,
17 index_format: wgt::IndexFormat,
18 index_offset: wgt::BufferAddress,
19 vertex_buffers:
20 [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
21 vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
22 color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
23 stencil: super::StencilState,
24 depth_bias: wgt::DepthBiasState,
25 alpha_to_coverage_enabled: bool,
26 samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
27 texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
28 render_size: wgt::Extent3d,
29 resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
30 invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
31 has_pass_label: bool,
32 instance_vbuf_mask: usize,
33 dirty_vbuf_mask: usize,
34 active_first_instance: u32,
35 first_instance_location: Option<glow::UniformLocation>,
36 push_constant_descs: ArrayVec<super::PushConstantDesc, { super::MAX_PUSH_CONSTANT_COMMANDS }>,
37 current_push_constant_data: [u32; super::MAX_PUSH_CONSTANTS],
39 end_of_pass_timestamp: Option<glow::Query>,
40}
41
42impl Default for State {
43 fn default() -> Self {
44 Self {
45 topology: Default::default(),
46 primitive: Default::default(),
47 index_format: Default::default(),
48 index_offset: Default::default(),
49 vertex_buffers: Default::default(),
50 vertex_attributes: Default::default(),
51 color_targets: Default::default(),
52 stencil: Default::default(),
53 depth_bias: Default::default(),
54 alpha_to_coverage_enabled: Default::default(),
55 samplers: Default::default(),
56 texture_slots: Default::default(),
57 render_size: Default::default(),
58 resolve_attachments: Default::default(),
59 invalidate_attachments: Default::default(),
60 has_pass_label: Default::default(),
61 instance_vbuf_mask: Default::default(),
62 dirty_vbuf_mask: Default::default(),
63 active_first_instance: Default::default(),
64 first_instance_location: Default::default(),
65 push_constant_descs: Default::default(),
66 current_push_constant_data: [0; super::MAX_PUSH_CONSTANTS],
67 end_of_pass_timestamp: Default::default(),
68 }
69 }
70}
71
72impl super::CommandBuffer {
73 fn clear(&mut self) {
74 self.label = None;
75 self.commands.clear();
76 self.data_bytes.clear();
77 self.queries.clear();
78 }
79
80 fn add_marker(&mut self, marker: &str) -> Range<u32> {
81 let start = self.data_bytes.len() as u32;
82 self.data_bytes.extend(marker.as_bytes());
83 start..self.data_bytes.len() as u32
84 }
85
86 fn add_push_constant_data(&mut self, data: &[u32]) -> Range<u32> {
87 let data_raw =
88 unsafe { std::slice::from_raw_parts(data.as_ptr().cast(), size_of_val(data)) };
89 let start = self.data_bytes.len();
90 assert!(start < u32::MAX as usize);
91 self.data_bytes.extend_from_slice(data_raw);
92 let end = self.data_bytes.len();
93 assert!(end < u32::MAX as usize);
94 (start as u32)..(end as u32)
95 }
96}
97
98impl Drop for super::CommandEncoder {
99 fn drop(&mut self) {
100 use crate::CommandEncoder;
101 unsafe { self.discard_encoding() }
102 self.counters.command_encoders.sub(1);
103 }
104}
105
106impl super::CommandEncoder {
107 fn rebind_stencil_func(&mut self) {
108 fn make(s: &super::StencilSide, face: u32) -> C {
109 C::SetStencilFunc {
110 face,
111 function: s.function,
112 reference: s.reference,
113 read_mask: s.mask_read,
114 }
115 }
116
117 let s = &self.state.stencil;
118 if s.front.function == s.back.function
119 && s.front.mask_read == s.back.mask_read
120 && s.front.reference == s.back.reference
121 {
122 self.cmd_buffer
123 .commands
124 .push(make(&s.front, glow::FRONT_AND_BACK));
125 } else {
126 self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
127 self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
128 }
129 }
130
131 fn rebind_vertex_data(&mut self, first_instance: u32) {
132 if self
133 .private_caps
134 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
135 {
136 for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
137 if self.state.dirty_vbuf_mask & (1 << index) == 0 {
138 continue;
139 }
140 let (buffer_desc, vb) = match *pair {
141 (_, None) => continue,
143 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
144 };
145 let instance_offset = match buffer_desc.step {
146 wgt::VertexStepMode::Vertex => 0,
147 wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
148 };
149
150 self.cmd_buffer.commands.push(C::SetVertexBuffer {
151 index: index as u32,
152 buffer: super::BufferBinding {
153 raw: vb.raw,
154 offset: vb.offset + instance_offset as wgt::BufferAddress,
155 },
156 buffer_desc,
157 });
158 self.state.dirty_vbuf_mask ^= 1 << index;
159 }
160 } else {
161 let mut vbuf_mask = 0;
162 for attribute in self.state.vertex_attributes.iter() {
163 if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
164 continue;
165 }
166 let (buffer_desc, vb) =
167 match self.state.vertex_buffers[attribute.buffer_index as usize] {
168 (_, None) => continue,
170 (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
171 };
172
173 let mut attribute_desc = attribute.clone();
174 attribute_desc.offset += vb.offset as u32;
175 if buffer_desc.step == wgt::VertexStepMode::Instance {
176 attribute_desc.offset += buffer_desc.stride * first_instance;
177 }
178
179 self.cmd_buffer.commands.push(C::SetVertexAttribute {
180 buffer: Some(vb.raw),
181 buffer_desc,
182 attribute_desc,
183 });
184 vbuf_mask |= 1 << attribute.buffer_index;
185 }
186 self.state.dirty_vbuf_mask ^= vbuf_mask;
187 }
188 }
189
190 fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
191 for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
192 if dirty_textures & (1 << texture_index) != 0
193 || slot
194 .sampler_index
195 .is_some_and(|si| dirty_samplers & (1 << si) != 0)
196 {
197 let sampler = slot
198 .sampler_index
199 .and_then(|si| self.state.samplers[si as usize]);
200 self.cmd_buffer
201 .commands
202 .push(C::BindSampler(texture_index as u32, sampler));
203 }
204 }
205 }
206
207 fn prepare_draw(&mut self, first_instance: u32) {
208 let emulated_first_instance_value = if self
211 .private_caps
212 .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
213 {
214 0
215 } else {
216 first_instance
217 };
218
219 if emulated_first_instance_value != self.state.active_first_instance {
220 self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
222 self.state.active_first_instance = emulated_first_instance_value;
223 }
224 if self.state.dirty_vbuf_mask != 0 {
225 self.rebind_vertex_data(emulated_first_instance_value);
226 }
227 }
228
229 #[allow(clippy::clone_on_copy)] fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
231 self.cmd_buffer.commands.push(C::SetProgram(inner.program));
232
233 self.state
234 .first_instance_location
235 .clone_from(&inner.first_instance_location);
236 self.state
237 .push_constant_descs
238 .clone_from(&inner.push_constant_descs);
239
240 let mut dirty_textures = 0u32;
242 for (texture_index, (slot, &sampler_index)) in self
243 .state
244 .texture_slots
245 .iter_mut()
246 .zip(inner.sampler_map.iter())
247 .enumerate()
248 {
249 if slot.sampler_index != sampler_index {
250 slot.sampler_index = sampler_index;
251 dirty_textures |= 1 << texture_index;
252 }
253 }
254 if dirty_textures != 0 {
255 self.rebind_sampler_states(dirty_textures, 0);
256 }
257 }
258}
259
260impl crate::CommandEncoder for super::CommandEncoder {
261 type A = super::Api;
262
263 unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
264 self.state = State::default();
265 self.cmd_buffer.label = label.map(str::to_string);
266 Ok(())
267 }
268 unsafe fn discard_encoding(&mut self) {
269 self.cmd_buffer.clear();
270 }
271 unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
272 Ok(mem::take(&mut self.cmd_buffer))
273 }
274 unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
275 }
277
278 unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
279 where
280 T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
281 {
282 if !self
283 .private_caps
284 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
285 {
286 return;
287 }
288 for bar in barriers {
289 if !bar
291 .usage
292 .from
293 .contains(crate::BufferUses::STORAGE_READ_WRITE)
294 {
295 continue;
296 }
297 self.cmd_buffer
298 .commands
299 .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.to));
300 }
301 }
302
303 unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
304 where
305 T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
306 {
307 if !self
308 .private_caps
309 .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
310 {
311 return;
312 }
313
314 let mut combined_usage = crate::TextureUses::empty();
315 for bar in barriers {
316 if !bar
318 .usage
319 .from
320 .contains(crate::TextureUses::STORAGE_READ_WRITE)
321 {
322 continue;
323 }
324 combined_usage |= bar.usage.to;
327 }
328
329 if !combined_usage.is_empty() {
330 self.cmd_buffer
331 .commands
332 .push(C::TextureBarrier(combined_usage));
333 }
334 }
335
336 unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
337 self.cmd_buffer.commands.push(C::ClearBuffer {
338 dst: buffer.clone(),
339 dst_target: buffer.target,
340 range,
341 });
342 }
343
344 unsafe fn copy_buffer_to_buffer<T>(
345 &mut self,
346 src: &super::Buffer,
347 dst: &super::Buffer,
348 regions: T,
349 ) where
350 T: Iterator<Item = crate::BufferCopy>,
351 {
352 let (src_target, dst_target) = if src.target == dst.target {
353 (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
354 } else {
355 (src.target, dst.target)
356 };
357 for copy in regions {
358 self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
359 src: src.clone(),
360 src_target,
361 dst: dst.clone(),
362 dst_target,
363 copy,
364 })
365 }
366 }
367
368 #[cfg(webgl)]
369 unsafe fn copy_external_image_to_texture<T>(
370 &mut self,
371 src: &wgt::CopyExternalImageSourceInfo,
372 dst: &super::Texture,
373 dst_premultiplication: bool,
374 regions: T,
375 ) where
376 T: Iterator<Item = crate::TextureCopy>,
377 {
378 let (dst_raw, dst_target) = dst.inner.as_native();
379 for copy in regions {
380 self.cmd_buffer
381 .commands
382 .push(C::CopyExternalImageToTexture {
383 src: src.clone(),
384 dst: dst_raw,
385 dst_target,
386 dst_format: dst.format,
387 dst_premultiplication,
388 copy,
389 })
390 }
391 }
392
393 unsafe fn copy_texture_to_texture<T>(
394 &mut self,
395 src: &super::Texture,
396 _src_usage: crate::TextureUses,
397 dst: &super::Texture,
398 regions: T,
399 ) where
400 T: Iterator<Item = crate::TextureCopy>,
401 {
402 let (src_raw, src_target) = src.inner.as_native();
403 let (dst_raw, dst_target) = dst.inner.as_native();
404 for mut copy in regions {
405 copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
406 self.cmd_buffer.commands.push(C::CopyTextureToTexture {
407 src: src_raw,
408 src_target,
409 dst: dst_raw,
410 dst_target,
411 copy,
412 })
413 }
414 }
415
416 unsafe fn copy_buffer_to_texture<T>(
417 &mut self,
418 src: &super::Buffer,
419 dst: &super::Texture,
420 regions: T,
421 ) where
422 T: Iterator<Item = crate::BufferTextureCopy>,
423 {
424 let (dst_raw, dst_target) = dst.inner.as_native();
425
426 for mut copy in regions {
427 copy.clamp_size_to_virtual(&dst.copy_size);
428 self.cmd_buffer.commands.push(C::CopyBufferToTexture {
429 src: src.clone(),
430 src_target: src.target,
431 dst: dst_raw,
432 dst_target,
433 dst_format: dst.format,
434 copy,
435 })
436 }
437 }
438
439 unsafe fn copy_texture_to_buffer<T>(
440 &mut self,
441 src: &super::Texture,
442 _src_usage: crate::TextureUses,
443 dst: &super::Buffer,
444 regions: T,
445 ) where
446 T: Iterator<Item = crate::BufferTextureCopy>,
447 {
448 let (src_raw, src_target) = src.inner.as_native();
449 for mut copy in regions {
450 copy.clamp_size_to_virtual(&src.copy_size);
451 self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
452 src: src_raw,
453 src_target,
454 src_format: src.format,
455 dst: dst.clone(),
456 dst_target: dst.target,
457 copy,
458 })
459 }
460 }
461
462 unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
463 let query = set.queries[index as usize];
464 self.cmd_buffer
465 .commands
466 .push(C::BeginQuery(query, set.target));
467 }
468 unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
469 self.cmd_buffer.commands.push(C::EndQuery(set.target));
470 }
471 unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
472 let query = set.queries[index as usize];
473 self.cmd_buffer.commands.push(C::TimestampQuery(query));
474 }
475 unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
476 }
478 unsafe fn copy_query_results(
479 &mut self,
480 set: &super::QuerySet,
481 range: Range<u32>,
482 buffer: &super::Buffer,
483 offset: wgt::BufferAddress,
484 _stride: wgt::BufferSize,
485 ) {
486 let start = self.cmd_buffer.queries.len();
487 self.cmd_buffer
488 .queries
489 .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
490 let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
491 self.cmd_buffer.commands.push(C::CopyQueryResults {
492 query_range,
493 dst: buffer.clone(),
494 dst_target: buffer.target,
495 dst_offset: offset,
496 });
497 }
498
499 unsafe fn begin_render_pass(
502 &mut self,
503 desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
504 ) {
505 debug_assert!(self.state.end_of_pass_timestamp.is_none());
506 if let Some(ref t) = desc.timestamp_writes {
507 if let Some(index) = t.beginning_of_pass_write_index {
508 unsafe { self.write_timestamp(t.query_set, index) }
509 }
510 self.state.end_of_pass_timestamp = t
511 .end_of_pass_write_index
512 .map(|index| t.query_set.queries[index as usize]);
513 }
514
515 self.state.render_size = desc.extent;
516 self.state.resolve_attachments.clear();
517 self.state.invalidate_attachments.clear();
518 if let Some(label) = desc.label {
519 let range = self.cmd_buffer.add_marker(label);
520 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
521 self.state.has_pass_label = true;
522 }
523
524 let rendering_to_external_framebuffer = desc
525 .color_attachments
526 .iter()
527 .filter_map(|at| at.as_ref())
528 .any(|at| match at.target.view.inner {
529 #[cfg(webgl)]
530 super::TextureInner::ExternalFramebuffer { .. } => true,
531 _ => false,
532 });
533
534 if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
535 panic!("Multiple render attachments with external framebuffers are not supported.");
536 }
537
538 assert!(desc.color_attachments.len() <= 32);
540
541 match desc
542 .color_attachments
543 .first()
544 .filter(|at| at.is_some())
545 .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
546 {
547 Some(&super::TextureInner::DefaultRenderbuffer) => {
549 self.cmd_buffer
550 .commands
551 .push(C::ResetFramebuffer { is_default: true });
552 }
553 _ => {
554 self.cmd_buffer
556 .commands
557 .push(C::ResetFramebuffer { is_default: false });
558
559 for (i, cat) in desc.color_attachments.iter().enumerate() {
560 if let Some(cat) = cat.as_ref() {
561 let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
562 self.cmd_buffer.commands.push(C::BindAttachment {
563 attachment,
564 view: cat.target.view.clone(),
565 });
566 if let Some(ref rat) = cat.resolve_target {
567 self.state
568 .resolve_attachments
569 .push((attachment, rat.view.clone()));
570 }
571 if !cat.ops.contains(crate::AttachmentOps::STORE) {
572 self.state.invalidate_attachments.push(attachment);
573 }
574 }
575 }
576 if let Some(ref dsat) = desc.depth_stencil_attachment {
577 let aspects = dsat.target.view.aspects;
578 let attachment = match aspects {
579 crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
580 crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
581 _ => glow::DEPTH_STENCIL_ATTACHMENT,
582 };
583 self.cmd_buffer.commands.push(C::BindAttachment {
584 attachment,
585 view: dsat.target.view.clone(),
586 });
587 if aspects.contains(crate::FormatAspects::DEPTH)
588 && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
589 {
590 self.state
591 .invalidate_attachments
592 .push(glow::DEPTH_ATTACHMENT);
593 }
594 if aspects.contains(crate::FormatAspects::STENCIL)
595 && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
596 {
597 self.state
598 .invalidate_attachments
599 .push(glow::STENCIL_ATTACHMENT);
600 }
601 }
602 }
603 }
604
605 let rect = crate::Rect {
606 x: 0,
607 y: 0,
608 w: desc.extent.width as i32,
609 h: desc.extent.height as i32,
610 };
611 self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
612 self.cmd_buffer.commands.push(C::SetViewport {
613 rect,
614 depth: 0.0..1.0,
615 });
616
617 if !rendering_to_external_framebuffer {
618 self.cmd_buffer
620 .commands
621 .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
622 }
623
624 for (i, cat) in desc
626 .color_attachments
627 .iter()
628 .filter_map(|at| at.as_ref())
629 .enumerate()
630 {
631 if !cat.ops.contains(crate::AttachmentOps::LOAD) {
632 let c = &cat.clear_value;
633 self.cmd_buffer.commands.push(
634 match cat.target.view.format.sample_type(None, None).unwrap() {
635 wgt::TextureSampleType::Float { .. } => C::ClearColorF {
636 draw_buffer: i as u32,
637 color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
638 is_srgb: cat.target.view.format.is_srgb(),
639 },
640 wgt::TextureSampleType::Uint => C::ClearColorU(
641 i as u32,
642 [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
643 ),
644 wgt::TextureSampleType::Sint => C::ClearColorI(
645 i as u32,
646 [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
647 ),
648 wgt::TextureSampleType::Depth => unreachable!(),
649 },
650 );
651 }
652 }
653
654 if let Some(ref dsat) = desc.depth_stencil_attachment {
655 let clear_depth = !dsat.depth_ops.contains(crate::AttachmentOps::LOAD);
656 let clear_stencil = !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD);
657
658 if clear_depth && clear_stencil {
659 self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
660 dsat.clear_value.0,
661 dsat.clear_value.1,
662 ));
663 } else if clear_depth {
664 self.cmd_buffer
665 .commands
666 .push(C::ClearDepth(dsat.clear_value.0));
667 } else if clear_stencil {
668 self.cmd_buffer
669 .commands
670 .push(C::ClearStencil(dsat.clear_value.1));
671 }
672 }
673 }
674 unsafe fn end_render_pass(&mut self) {
675 for (attachment, dst) in self.state.resolve_attachments.drain(..) {
676 self.cmd_buffer.commands.push(C::ResolveAttachment {
677 attachment,
678 dst,
679 size: self.state.render_size,
680 });
681 }
682 if !self.state.invalidate_attachments.is_empty() {
683 self.cmd_buffer.commands.push(C::InvalidateAttachments(
684 self.state.invalidate_attachments.clone(),
685 ));
686 self.state.invalidate_attachments.clear();
687 }
688 if self.state.has_pass_label {
689 self.cmd_buffer.commands.push(C::PopDebugGroup);
690 self.state.has_pass_label = false;
691 }
692 self.state.instance_vbuf_mask = 0;
693 self.state.dirty_vbuf_mask = 0;
694 self.state.active_first_instance = 0;
695 self.state.color_targets.clear();
696 for vat in &self.state.vertex_attributes {
697 self.cmd_buffer
698 .commands
699 .push(C::UnsetVertexAttribute(vat.location));
700 }
701 self.state.vertex_attributes.clear();
702 self.state.primitive = super::PrimitiveState::default();
703
704 if let Some(query) = self.state.end_of_pass_timestamp.take() {
705 self.cmd_buffer.commands.push(C::TimestampQuery(query));
706 }
707 }
708
709 unsafe fn set_bind_group(
710 &mut self,
711 layout: &super::PipelineLayout,
712 index: u32,
713 group: &super::BindGroup,
714 dynamic_offsets: &[wgt::DynamicOffset],
715 ) {
716 let mut do_index = 0;
717 let mut dirty_textures = 0u32;
718 let mut dirty_samplers = 0u32;
719 let group_info = &layout.group_infos[index as usize];
720
721 for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
722 let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
723 match *raw_binding {
724 super::RawBinding::Buffer {
725 raw,
726 offset: base_offset,
727 size,
728 } => {
729 let mut offset = base_offset;
730 let target = match binding_layout.ty {
731 wgt::BindingType::Buffer {
732 ty,
733 has_dynamic_offset,
734 min_binding_size: _,
735 } => {
736 if has_dynamic_offset {
737 offset += dynamic_offsets[do_index] as i32;
738 do_index += 1;
739 }
740 match ty {
741 wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
742 wgt::BufferBindingType::Storage { .. } => {
743 glow::SHADER_STORAGE_BUFFER
744 }
745 }
746 }
747 _ => unreachable!(),
748 };
749 self.cmd_buffer.commands.push(C::BindBuffer {
750 target,
751 slot,
752 buffer: raw,
753 offset,
754 size,
755 });
756 }
757 super::RawBinding::Sampler(sampler) => {
758 dirty_samplers |= 1 << slot;
759 self.state.samplers[slot as usize] = Some(sampler);
760 }
761 super::RawBinding::Texture {
762 raw,
763 target,
764 aspects,
765 ref mip_levels,
766 } => {
767 dirty_textures |= 1 << slot;
768 self.state.texture_slots[slot as usize].tex_target = target;
769 self.cmd_buffer.commands.push(C::BindTexture {
770 slot,
771 texture: raw,
772 target,
773 aspects,
774 mip_levels: mip_levels.clone(),
775 });
776 }
777 super::RawBinding::Image(ref binding) => {
778 self.cmd_buffer.commands.push(C::BindImage {
779 slot,
780 binding: binding.clone(),
781 });
782 }
783 }
784 }
785
786 self.rebind_sampler_states(dirty_textures, dirty_samplers);
787 }
788
789 unsafe fn set_push_constants(
790 &mut self,
791 _layout: &super::PipelineLayout,
792 _stages: wgt::ShaderStages,
793 offset_bytes: u32,
794 data: &[u32],
795 ) {
796 let start_words = offset_bytes / 4;
804 let end_words = start_words + data.len() as u32;
805 self.state.current_push_constant_data[start_words as usize..end_words as usize]
806 .copy_from_slice(data);
807
808 for uniform in self.state.push_constant_descs.iter().cloned() {
814 let uniform_size_words = uniform.size_bytes / 4;
815 let uniform_start_words = uniform.offset / 4;
816 let uniform_end_words = uniform_start_words + uniform_size_words;
817
818 let needs_updating =
820 start_words < uniform_end_words || uniform_start_words <= end_words;
821
822 if needs_updating {
823 let uniform_data = &self.state.current_push_constant_data
824 [uniform_start_words as usize..uniform_end_words as usize];
825
826 let range = self.cmd_buffer.add_push_constant_data(uniform_data);
827
828 self.cmd_buffer.commands.push(C::SetPushConstants {
829 uniform,
830 offset: range.start,
831 });
832 }
833 }
834 }
835
836 unsafe fn insert_debug_marker(&mut self, label: &str) {
837 let range = self.cmd_buffer.add_marker(label);
838 self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
839 }
840 unsafe fn begin_debug_marker(&mut self, group_label: &str) {
841 let range = self.cmd_buffer.add_marker(group_label);
842 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
843 }
844 unsafe fn end_debug_marker(&mut self) {
845 self.cmd_buffer.commands.push(C::PopDebugGroup);
846 }
847
848 unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
849 self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
850
851 if self
852 .private_caps
853 .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
854 {
855 for vat in pipeline.vertex_attributes.iter() {
856 let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
857 self.cmd_buffer.commands.push(C::SetVertexAttribute {
859 buffer: None,
860 buffer_desc: vb.clone(),
861 attribute_desc: vat.clone(),
862 });
863 }
864 } else {
865 for vat in &self.state.vertex_attributes {
866 self.cmd_buffer
867 .commands
868 .push(C::UnsetVertexAttribute(vat.location));
869 }
870 self.state.vertex_attributes.clear();
871
872 self.state.dirty_vbuf_mask = 0;
873 for vat in pipeline.vertex_attributes.iter() {
875 self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
877 self.state.vertex_attributes.push(vat.clone());
878 }
879 }
880
881 self.state.instance_vbuf_mask = 0;
882 for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
884 .state
885 .vertex_buffers
886 .iter_mut()
887 .zip(pipeline.vertex_buffers.iter())
888 .enumerate()
889 {
890 if pipe_desc.step == wgt::VertexStepMode::Instance {
891 self.state.instance_vbuf_mask |= 1 << index;
892 }
893 if state_desc != pipe_desc {
894 self.state.dirty_vbuf_mask |= 1 << index;
895 *state_desc = pipe_desc.clone();
896 }
897 }
898
899 self.set_pipeline_inner(&pipeline.inner);
900
901 let prim_state = conv::map_primitive_state(&pipeline.primitive);
903 if prim_state != self.state.primitive {
904 self.cmd_buffer
905 .commands
906 .push(C::SetPrimitive(prim_state.clone()));
907 self.state.primitive = prim_state;
908 }
909
910 let mut aspects = crate::FormatAspects::empty();
912 if pipeline.depth_bias != self.state.depth_bias {
913 self.state.depth_bias = pipeline.depth_bias;
914 self.cmd_buffer
915 .commands
916 .push(C::SetDepthBias(pipeline.depth_bias));
917 }
918 if let Some(ref depth) = pipeline.depth {
919 aspects |= crate::FormatAspects::DEPTH;
920 self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
921 }
922 if let Some(ref stencil) = pipeline.stencil {
923 aspects |= crate::FormatAspects::STENCIL;
924 self.state.stencil = stencil.clone();
925 self.rebind_stencil_func();
926 if stencil.front.ops == stencil.back.ops
927 && stencil.front.mask_write == stencil.back.mask_write
928 {
929 self.cmd_buffer.commands.push(C::SetStencilOps {
930 face: glow::FRONT_AND_BACK,
931 write_mask: stencil.front.mask_write,
932 ops: stencil.front.ops.clone(),
933 });
934 } else {
935 self.cmd_buffer.commands.push(C::SetStencilOps {
936 face: glow::FRONT,
937 write_mask: stencil.front.mask_write,
938 ops: stencil.front.ops.clone(),
939 });
940 self.cmd_buffer.commands.push(C::SetStencilOps {
941 face: glow::BACK,
942 write_mask: stencil.back.mask_write,
943 ops: stencil.back.ops.clone(),
944 });
945 }
946 }
947 self.cmd_buffer
948 .commands
949 .push(C::ConfigureDepthStencil(aspects));
950
951 if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
953 self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
954 self.cmd_buffer
955 .commands
956 .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
957 }
958
959 if self.state.color_targets[..] != pipeline.color_targets[..] {
961 if pipeline
962 .color_targets
963 .iter()
964 .skip(1)
965 .any(|ct| *ct != pipeline.color_targets[0])
966 {
967 for (index, ct) in pipeline.color_targets.iter().enumerate() {
968 self.cmd_buffer.commands.push(C::SetColorTarget {
969 draw_buffer_index: Some(index as u32),
970 desc: ct.clone(),
971 });
972 }
973 } else {
974 self.cmd_buffer.commands.push(C::SetColorTarget {
975 draw_buffer_index: None,
976 desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
977 });
978 }
979 }
980 self.state.color_targets.clear();
981 for ct in pipeline.color_targets.iter() {
982 self.state.color_targets.push(ct.clone());
983 }
984 }
985
986 unsafe fn set_index_buffer<'a>(
987 &mut self,
988 binding: crate::BufferBinding<'a, super::Buffer>,
989 format: wgt::IndexFormat,
990 ) {
991 self.state.index_offset = binding.offset;
992 self.state.index_format = format;
993 self.cmd_buffer
994 .commands
995 .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
996 }
997 unsafe fn set_vertex_buffer<'a>(
998 &mut self,
999 index: u32,
1000 binding: crate::BufferBinding<'a, super::Buffer>,
1001 ) {
1002 self.state.dirty_vbuf_mask |= 1 << index;
1003 let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
1004 *vb = Some(super::BufferBinding {
1005 raw: binding.buffer.raw.unwrap(),
1006 offset: binding.offset,
1007 });
1008 }
1009 unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1010 self.cmd_buffer.commands.push(C::SetViewport {
1011 rect: crate::Rect {
1012 x: rect.x as i32,
1013 y: rect.y as i32,
1014 w: rect.w as i32,
1015 h: rect.h as i32,
1016 },
1017 depth,
1018 });
1019 }
1020 unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1021 self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1022 x: rect.x as i32,
1023 y: rect.y as i32,
1024 w: rect.w as i32,
1025 h: rect.h as i32,
1026 }));
1027 }
1028 unsafe fn set_stencil_reference(&mut self, value: u32) {
1029 self.state.stencil.front.reference = value;
1030 self.state.stencil.back.reference = value;
1031 self.rebind_stencil_func();
1032 }
1033 unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1034 self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1035 }
1036
1037 unsafe fn draw(
1038 &mut self,
1039 first_vertex: u32,
1040 vertex_count: u32,
1041 first_instance: u32,
1042 instance_count: u32,
1043 ) {
1044 self.prepare_draw(first_instance);
1045 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::Draw {
1047 topology: self.state.topology,
1048 first_vertex,
1049 vertex_count,
1050 first_instance,
1051 instance_count,
1052 first_instance_location: self.state.first_instance_location.clone(),
1053 });
1054 }
1055 unsafe fn draw_indexed(
1056 &mut self,
1057 first_index: u32,
1058 index_count: u32,
1059 base_vertex: i32,
1060 first_instance: u32,
1061 instance_count: u32,
1062 ) {
1063 self.prepare_draw(first_instance);
1064 let (index_size, index_type) = match self.state.index_format {
1065 wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1066 wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1067 };
1068 let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1069 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexed {
1071 topology: self.state.topology,
1072 index_type,
1073 index_offset,
1074 index_count,
1075 base_vertex,
1076 first_instance,
1077 instance_count,
1078 first_instance_location: self.state.first_instance_location.clone(),
1079 });
1080 }
1081 unsafe fn draw_indirect(
1082 &mut self,
1083 buffer: &super::Buffer,
1084 offset: wgt::BufferAddress,
1085 draw_count: u32,
1086 ) {
1087 self.prepare_draw(0);
1088 for draw in 0..draw_count as wgt::BufferAddress {
1089 let indirect_offset =
1090 offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1091 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndirect {
1093 topology: self.state.topology,
1094 indirect_buf: buffer.raw.unwrap(),
1095 indirect_offset,
1096 first_instance_location: self.state.first_instance_location.clone(),
1097 });
1098 }
1099 }
1100 unsafe fn draw_indexed_indirect(
1101 &mut self,
1102 buffer: &super::Buffer,
1103 offset: wgt::BufferAddress,
1104 draw_count: u32,
1105 ) {
1106 self.prepare_draw(0);
1107 let index_type = match self.state.index_format {
1108 wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1109 wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1110 };
1111 for draw in 0..draw_count as wgt::BufferAddress {
1112 let indirect_offset =
1113 offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1114 #[allow(clippy::clone_on_copy)] self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1116 topology: self.state.topology,
1117 index_type,
1118 indirect_buf: buffer.raw.unwrap(),
1119 indirect_offset,
1120 first_instance_location: self.state.first_instance_location.clone(),
1121 });
1122 }
1123 }
1124 unsafe fn draw_indirect_count(
1125 &mut self,
1126 _buffer: &super::Buffer,
1127 _offset: wgt::BufferAddress,
1128 _count_buffer: &super::Buffer,
1129 _count_offset: wgt::BufferAddress,
1130 _max_count: u32,
1131 ) {
1132 unreachable!()
1133 }
1134 unsafe fn draw_indexed_indirect_count(
1135 &mut self,
1136 _buffer: &super::Buffer,
1137 _offset: wgt::BufferAddress,
1138 _count_buffer: &super::Buffer,
1139 _count_offset: wgt::BufferAddress,
1140 _max_count: u32,
1141 ) {
1142 unreachable!()
1143 }
1144
1145 unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::QuerySet>) {
1148 debug_assert!(self.state.end_of_pass_timestamp.is_none());
1149 if let Some(ref t) = desc.timestamp_writes {
1150 if let Some(index) = t.beginning_of_pass_write_index {
1151 unsafe { self.write_timestamp(t.query_set, index) }
1152 }
1153 self.state.end_of_pass_timestamp = t
1154 .end_of_pass_write_index
1155 .map(|index| t.query_set.queries[index as usize]);
1156 }
1157
1158 if let Some(label) = desc.label {
1159 let range = self.cmd_buffer.add_marker(label);
1160 self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1161 self.state.has_pass_label = true;
1162 }
1163 }
1164 unsafe fn end_compute_pass(&mut self) {
1165 if self.state.has_pass_label {
1166 self.cmd_buffer.commands.push(C::PopDebugGroup);
1167 self.state.has_pass_label = false;
1168 }
1169
1170 if let Some(query) = self.state.end_of_pass_timestamp.take() {
1171 self.cmd_buffer.commands.push(C::TimestampQuery(query));
1172 }
1173 }
1174
1175 unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1176 self.set_pipeline_inner(&pipeline.inner);
1177 }
1178
1179 unsafe fn dispatch(&mut self, count: [u32; 3]) {
1180 if count.iter().any(|&c| c == 0) {
1182 return;
1183 }
1184 self.cmd_buffer.commands.push(C::Dispatch(count));
1185 }
1186 unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1187 self.cmd_buffer.commands.push(C::DispatchIndirect {
1188 indirect_buf: buffer.raw.unwrap(),
1189 indirect_offset: offset,
1190 });
1191 }
1192
1193 unsafe fn build_acceleration_structures<'a, T>(
1194 &mut self,
1195 _descriptor_count: u32,
1196 _descriptors: T,
1197 ) where
1198 super::Api: 'a,
1199 T: IntoIterator<
1200 Item = crate::BuildAccelerationStructureDescriptor<
1201 'a,
1202 super::Buffer,
1203 super::AccelerationStructure,
1204 >,
1205 >,
1206 {
1207 unimplemented!()
1208 }
1209
1210 unsafe fn place_acceleration_structure_barrier(
1211 &mut self,
1212 _barriers: crate::AccelerationStructureBarrier,
1213 ) {
1214 unimplemented!()
1215 }
1216}