wgpu_hal/gles/
command.rs

1use alloc::string::String;
2use core::{mem, ops::Range};
3
4use arrayvec::ArrayVec;
5
6use super::{conv, Command as C};
7
8#[derive(Clone, Copy, Debug, Default)]
9struct TextureSlotDesc {
10    tex_target: super::BindTarget,
11    sampler_index: Option<u8>,
12}
13
14pub(super) struct State {
15    topology: u32,
16    primitive: super::PrimitiveState,
17    index_format: wgt::IndexFormat,
18    index_offset: wgt::BufferAddress,
19    vertex_buffers:
20        [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
21    vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
22    color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
23    stencil: super::StencilState,
24    depth_bias: wgt::DepthBiasState,
25    alpha_to_coverage_enabled: bool,
26    samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
27    texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
28    render_size: wgt::Extent3d,
29    resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
30    invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
31    has_pass_label: bool,
32    instance_vbuf_mask: usize,
33    dirty_vbuf_mask: usize,
34    active_first_instance: u32,
35    first_instance_location: Option<glow::UniformLocation>,
36    push_constant_descs: ArrayVec<super::PushConstantDesc, { super::MAX_PUSH_CONSTANT_COMMANDS }>,
37    // The current state of the push constant data block.
38    current_push_constant_data: [u32; super::MAX_PUSH_CONSTANTS],
39    end_of_pass_timestamp: Option<glow::Query>,
40}
41
42impl Default for State {
43    fn default() -> Self {
44        Self {
45            topology: Default::default(),
46            primitive: Default::default(),
47            index_format: Default::default(),
48            index_offset: Default::default(),
49            vertex_buffers: Default::default(),
50            vertex_attributes: Default::default(),
51            color_targets: Default::default(),
52            stencil: Default::default(),
53            depth_bias: Default::default(),
54            alpha_to_coverage_enabled: Default::default(),
55            samplers: Default::default(),
56            texture_slots: Default::default(),
57            render_size: Default::default(),
58            resolve_attachments: Default::default(),
59            invalidate_attachments: Default::default(),
60            has_pass_label: Default::default(),
61            instance_vbuf_mask: Default::default(),
62            dirty_vbuf_mask: Default::default(),
63            active_first_instance: Default::default(),
64            first_instance_location: Default::default(),
65            push_constant_descs: Default::default(),
66            current_push_constant_data: [0; super::MAX_PUSH_CONSTANTS],
67            end_of_pass_timestamp: Default::default(),
68        }
69    }
70}
71
72impl super::CommandBuffer {
73    fn clear(&mut self) {
74        self.label = None;
75        self.commands.clear();
76        self.data_bytes.clear();
77        self.queries.clear();
78    }
79
80    fn add_marker(&mut self, marker: &str) -> Range<u32> {
81        let start = self.data_bytes.len() as u32;
82        self.data_bytes.extend(marker.as_bytes());
83        start..self.data_bytes.len() as u32
84    }
85
86    fn add_push_constant_data(&mut self, data: &[u32]) -> Range<u32> {
87        let data_raw = bytemuck::cast_slice(data);
88        let start = self.data_bytes.len();
89        assert!(start < u32::MAX as usize);
90        self.data_bytes.extend_from_slice(data_raw);
91        let end = self.data_bytes.len();
92        assert!(end < u32::MAX as usize);
93        (start as u32)..(end as u32)
94    }
95}
96
97impl Drop for super::CommandEncoder {
98    fn drop(&mut self) {
99        use crate::CommandEncoder;
100        unsafe { self.discard_encoding() }
101        self.counters.command_encoders.sub(1);
102    }
103}
104
105impl super::CommandEncoder {
106    fn rebind_stencil_func(&mut self) {
107        fn make(s: &super::StencilSide, face: u32) -> C {
108            C::SetStencilFunc {
109                face,
110                function: s.function,
111                reference: s.reference,
112                read_mask: s.mask_read,
113            }
114        }
115
116        let s = &self.state.stencil;
117        if s.front.function == s.back.function
118            && s.front.mask_read == s.back.mask_read
119            && s.front.reference == s.back.reference
120        {
121            self.cmd_buffer
122                .commands
123                .push(make(&s.front, glow::FRONT_AND_BACK));
124        } else {
125            self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
126            self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
127        }
128    }
129
130    fn rebind_vertex_data(&mut self, first_instance: u32) {
131        if self
132            .private_caps
133            .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
134        {
135            for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
136                if self.state.dirty_vbuf_mask & (1 << index) == 0 {
137                    continue;
138                }
139                let (buffer_desc, vb) = match *pair {
140                    // Not all dirty bindings are necessarily filled. Some may be unused.
141                    (_, None) => continue,
142                    (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
143                };
144                let instance_offset = match buffer_desc.step {
145                    wgt::VertexStepMode::Vertex => 0,
146                    wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
147                };
148
149                self.cmd_buffer.commands.push(C::SetVertexBuffer {
150                    index: index as u32,
151                    buffer: super::BufferBinding {
152                        raw: vb.raw,
153                        offset: vb.offset + instance_offset as wgt::BufferAddress,
154                    },
155                    buffer_desc,
156                });
157                self.state.dirty_vbuf_mask ^= 1 << index;
158            }
159        } else {
160            let mut vbuf_mask = 0;
161            for attribute in self.state.vertex_attributes.iter() {
162                if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
163                    continue;
164                }
165                let (buffer_desc, vb) =
166                    match self.state.vertex_buffers[attribute.buffer_index as usize] {
167                        // Not all dirty bindings are necessarily filled. Some may be unused.
168                        (_, None) => continue,
169                        (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
170                    };
171
172                let mut attribute_desc = attribute.clone();
173                attribute_desc.offset += vb.offset as u32;
174                if buffer_desc.step == wgt::VertexStepMode::Instance {
175                    attribute_desc.offset += buffer_desc.stride * first_instance;
176                }
177
178                self.cmd_buffer.commands.push(C::SetVertexAttribute {
179                    buffer: Some(vb.raw),
180                    buffer_desc,
181                    attribute_desc,
182                });
183                vbuf_mask |= 1 << attribute.buffer_index;
184            }
185            self.state.dirty_vbuf_mask ^= vbuf_mask;
186        }
187    }
188
189    fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
190        for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
191            if dirty_textures & (1 << texture_index) != 0
192                || slot
193                    .sampler_index
194                    .is_some_and(|si| dirty_samplers & (1 << si) != 0)
195            {
196                let sampler = slot
197                    .sampler_index
198                    .and_then(|si| self.state.samplers[si as usize]);
199                self.cmd_buffer
200                    .commands
201                    .push(C::BindSampler(texture_index as u32, sampler));
202            }
203        }
204    }
205
206    fn prepare_draw(&mut self, first_instance: u32) {
207        // If we support fully featured instancing, we want to bind everything as normal
208        // and let the draw call sort it out.
209        let emulated_first_instance_value = if self
210            .private_caps
211            .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
212        {
213            0
214        } else {
215            first_instance
216        };
217
218        if emulated_first_instance_value != self.state.active_first_instance {
219            // rebind all per-instance buffers on first-instance change
220            self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
221            self.state.active_first_instance = emulated_first_instance_value;
222        }
223        if self.state.dirty_vbuf_mask != 0 {
224            self.rebind_vertex_data(emulated_first_instance_value);
225        }
226    }
227
228    #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
229    fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
230        self.cmd_buffer.commands.push(C::SetProgram(inner.program));
231
232        self.state
233            .first_instance_location
234            .clone_from(&inner.first_instance_location);
235        self.state
236            .push_constant_descs
237            .clone_from(&inner.push_constant_descs);
238
239        // rebind textures, if needed
240        let mut dirty_textures = 0u32;
241        for (texture_index, (slot, &sampler_index)) in self
242            .state
243            .texture_slots
244            .iter_mut()
245            .zip(inner.sampler_map.iter())
246            .enumerate()
247        {
248            if slot.sampler_index != sampler_index {
249                slot.sampler_index = sampler_index;
250                dirty_textures |= 1 << texture_index;
251            }
252        }
253        if dirty_textures != 0 {
254            self.rebind_sampler_states(dirty_textures, 0);
255        }
256    }
257}
258
259impl crate::CommandEncoder for super::CommandEncoder {
260    type A = super::Api;
261
262    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
263        self.state = State::default();
264        self.cmd_buffer.label = label.map(String::from);
265        Ok(())
266    }
267    unsafe fn discard_encoding(&mut self) {
268        self.cmd_buffer.clear();
269    }
270    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
271        Ok(mem::take(&mut self.cmd_buffer))
272    }
273    unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
274        //TODO: could re-use the allocations in all these command buffers
275    }
276
277    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
278    where
279        T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
280    {
281        if !self
282            .private_caps
283            .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
284        {
285            return;
286        }
287        for bar in barriers {
288            // GLES only synchronizes storage -> anything explicitly
289            if !bar.usage.from.contains(wgt::BufferUses::STORAGE_READ_WRITE) {
290                continue;
291            }
292            self.cmd_buffer
293                .commands
294                .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.to));
295        }
296    }
297
298    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
299    where
300        T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
301    {
302        if !self
303            .private_caps
304            .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
305        {
306            return;
307        }
308
309        let mut combined_usage = wgt::TextureUses::empty();
310        for bar in barriers {
311            // GLES only synchronizes storage -> anything explicitly
312            if !bar
313                .usage
314                .from
315                .contains(wgt::TextureUses::STORAGE_READ_WRITE)
316            {
317                continue;
318            }
319            // unlike buffers, there is no need for a concrete texture
320            // object to be bound anywhere for a barrier
321            combined_usage |= bar.usage.to;
322        }
323
324        if !combined_usage.is_empty() {
325            self.cmd_buffer
326                .commands
327                .push(C::TextureBarrier(combined_usage));
328        }
329    }
330
331    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
332        self.cmd_buffer.commands.push(C::ClearBuffer {
333            dst: buffer.clone(),
334            dst_target: buffer.target,
335            range,
336        });
337    }
338
339    unsafe fn copy_buffer_to_buffer<T>(
340        &mut self,
341        src: &super::Buffer,
342        dst: &super::Buffer,
343        regions: T,
344    ) where
345        T: Iterator<Item = crate::BufferCopy>,
346    {
347        let (src_target, dst_target) = if src.target == dst.target {
348            (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
349        } else {
350            (src.target, dst.target)
351        };
352        for copy in regions {
353            self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
354                src: src.clone(),
355                src_target,
356                dst: dst.clone(),
357                dst_target,
358                copy,
359            })
360        }
361    }
362
363    #[cfg(webgl)]
364    unsafe fn copy_external_image_to_texture<T>(
365        &mut self,
366        src: &wgt::CopyExternalImageSourceInfo,
367        dst: &super::Texture,
368        dst_premultiplication: bool,
369        regions: T,
370    ) where
371        T: Iterator<Item = crate::TextureCopy>,
372    {
373        let (dst_raw, dst_target) = dst.inner.as_native();
374        for copy in regions {
375            self.cmd_buffer
376                .commands
377                .push(C::CopyExternalImageToTexture {
378                    src: src.clone(),
379                    dst: dst_raw,
380                    dst_target,
381                    dst_format: dst.format,
382                    dst_premultiplication,
383                    copy,
384                })
385        }
386    }
387
388    unsafe fn copy_texture_to_texture<T>(
389        &mut self,
390        src: &super::Texture,
391        _src_usage: wgt::TextureUses,
392        dst: &super::Texture,
393        regions: T,
394    ) where
395        T: Iterator<Item = crate::TextureCopy>,
396    {
397        let (src_raw, src_target) = src.inner.as_native();
398        let (dst_raw, dst_target) = dst.inner.as_native();
399        for mut copy in regions {
400            copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
401            self.cmd_buffer.commands.push(C::CopyTextureToTexture {
402                src: src_raw,
403                src_target,
404                dst: dst_raw,
405                dst_target,
406                copy,
407            })
408        }
409    }
410
411    unsafe fn copy_buffer_to_texture<T>(
412        &mut self,
413        src: &super::Buffer,
414        dst: &super::Texture,
415        regions: T,
416    ) where
417        T: Iterator<Item = crate::BufferTextureCopy>,
418    {
419        let (dst_raw, dst_target) = dst.inner.as_native();
420
421        for mut copy in regions {
422            copy.clamp_size_to_virtual(&dst.copy_size);
423            self.cmd_buffer.commands.push(C::CopyBufferToTexture {
424                src: src.clone(),
425                src_target: src.target,
426                dst: dst_raw,
427                dst_target,
428                dst_format: dst.format,
429                copy,
430            })
431        }
432    }
433
434    unsafe fn copy_texture_to_buffer<T>(
435        &mut self,
436        src: &super::Texture,
437        _src_usage: wgt::TextureUses,
438        dst: &super::Buffer,
439        regions: T,
440    ) where
441        T: Iterator<Item = crate::BufferTextureCopy>,
442    {
443        let (src_raw, src_target) = src.inner.as_native();
444        for mut copy in regions {
445            copy.clamp_size_to_virtual(&src.copy_size);
446            self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
447                src: src_raw,
448                src_target,
449                src_format: src.format,
450                dst: dst.clone(),
451                dst_target: dst.target,
452                copy,
453            })
454        }
455    }
456
457    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
458        let query = set.queries[index as usize];
459        self.cmd_buffer
460            .commands
461            .push(C::BeginQuery(query, set.target));
462    }
463    unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
464        self.cmd_buffer.commands.push(C::EndQuery(set.target));
465    }
466    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
467        let query = set.queries[index as usize];
468        self.cmd_buffer.commands.push(C::TimestampQuery(query));
469    }
470    unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
471        //TODO: what do we do here?
472    }
473    unsafe fn copy_query_results(
474        &mut self,
475        set: &super::QuerySet,
476        range: Range<u32>,
477        buffer: &super::Buffer,
478        offset: wgt::BufferAddress,
479        _stride: wgt::BufferSize,
480    ) {
481        let start = self.cmd_buffer.queries.len();
482        self.cmd_buffer
483            .queries
484            .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
485        let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
486        self.cmd_buffer.commands.push(C::CopyQueryResults {
487            query_range,
488            dst: buffer.clone(),
489            dst_target: buffer.target,
490            dst_offset: offset,
491        });
492    }
493
494    // render
495
496    unsafe fn begin_render_pass(
497        &mut self,
498        desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
499    ) {
500        debug_assert!(self.state.end_of_pass_timestamp.is_none());
501        if let Some(ref t) = desc.timestamp_writes {
502            if let Some(index) = t.beginning_of_pass_write_index {
503                unsafe { self.write_timestamp(t.query_set, index) }
504            }
505            self.state.end_of_pass_timestamp = t
506                .end_of_pass_write_index
507                .map(|index| t.query_set.queries[index as usize]);
508        }
509
510        self.state.render_size = desc.extent;
511        self.state.resolve_attachments.clear();
512        self.state.invalidate_attachments.clear();
513        if let Some(label) = desc.label {
514            let range = self.cmd_buffer.add_marker(label);
515            self.cmd_buffer.commands.push(C::PushDebugGroup(range));
516            self.state.has_pass_label = true;
517        }
518
519        let rendering_to_external_framebuffer = desc
520            .color_attachments
521            .iter()
522            .filter_map(|at| at.as_ref())
523            .any(|at| match at.target.view.inner {
524                #[cfg(webgl)]
525                super::TextureInner::ExternalFramebuffer { .. } => true,
526                _ => false,
527            });
528
529        if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
530            panic!("Multiple render attachments with external framebuffers are not supported.");
531        }
532
533        // `COLOR_ATTACHMENT0` to `COLOR_ATTACHMENT31` gives 32 possible color attachments.
534        assert!(desc.color_attachments.len() <= 32);
535
536        match desc
537            .color_attachments
538            .first()
539            .filter(|at| at.is_some())
540            .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
541        {
542            // default framebuffer (provided externally)
543            Some(&super::TextureInner::DefaultRenderbuffer) => {
544                self.cmd_buffer
545                    .commands
546                    .push(C::ResetFramebuffer { is_default: true });
547            }
548            _ => {
549                // set the framebuffer
550                self.cmd_buffer
551                    .commands
552                    .push(C::ResetFramebuffer { is_default: false });
553
554                for (i, cat) in desc.color_attachments.iter().enumerate() {
555                    if let Some(cat) = cat.as_ref() {
556                        let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
557                        self.cmd_buffer.commands.push(C::BindAttachment {
558                            attachment,
559                            view: cat.target.view.clone(),
560                        });
561                        if let Some(ref rat) = cat.resolve_target {
562                            self.state
563                                .resolve_attachments
564                                .push((attachment, rat.view.clone()));
565                        }
566                        if !cat.ops.contains(crate::AttachmentOps::STORE) {
567                            self.state.invalidate_attachments.push(attachment);
568                        }
569                    }
570                }
571                if let Some(ref dsat) = desc.depth_stencil_attachment {
572                    let aspects = dsat.target.view.aspects;
573                    let attachment = match aspects {
574                        crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
575                        crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
576                        _ => glow::DEPTH_STENCIL_ATTACHMENT,
577                    };
578                    self.cmd_buffer.commands.push(C::BindAttachment {
579                        attachment,
580                        view: dsat.target.view.clone(),
581                    });
582                    if aspects.contains(crate::FormatAspects::DEPTH)
583                        && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
584                    {
585                        self.state
586                            .invalidate_attachments
587                            .push(glow::DEPTH_ATTACHMENT);
588                    }
589                    if aspects.contains(crate::FormatAspects::STENCIL)
590                        && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
591                    {
592                        self.state
593                            .invalidate_attachments
594                            .push(glow::STENCIL_ATTACHMENT);
595                    }
596                }
597            }
598        }
599
600        let rect = crate::Rect {
601            x: 0,
602            y: 0,
603            w: desc.extent.width as i32,
604            h: desc.extent.height as i32,
605        };
606        self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
607        self.cmd_buffer.commands.push(C::SetViewport {
608            rect,
609            depth: 0.0..1.0,
610        });
611
612        if !rendering_to_external_framebuffer {
613            // set the draw buffers and states
614            self.cmd_buffer
615                .commands
616                .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
617        }
618
619        // issue the clears
620        for (i, cat) in desc
621            .color_attachments
622            .iter()
623            .filter_map(|at| at.as_ref())
624            .enumerate()
625        {
626            if !cat.ops.contains(crate::AttachmentOps::LOAD) {
627                let c = &cat.clear_value;
628                self.cmd_buffer.commands.push(
629                    match cat.target.view.format.sample_type(None, None).unwrap() {
630                        wgt::TextureSampleType::Float { .. } => C::ClearColorF {
631                            draw_buffer: i as u32,
632                            color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
633                            is_srgb: cat.target.view.format.is_srgb(),
634                        },
635                        wgt::TextureSampleType::Uint => C::ClearColorU(
636                            i as u32,
637                            [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
638                        ),
639                        wgt::TextureSampleType::Sint => C::ClearColorI(
640                            i as u32,
641                            [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
642                        ),
643                        wgt::TextureSampleType::Depth => unreachable!(),
644                    },
645                );
646            }
647        }
648
649        if let Some(ref dsat) = desc.depth_stencil_attachment {
650            let clear_depth = !dsat.depth_ops.contains(crate::AttachmentOps::LOAD);
651            let clear_stencil = !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD);
652
653            if clear_depth && clear_stencil {
654                self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
655                    dsat.clear_value.0,
656                    dsat.clear_value.1,
657                ));
658            } else if clear_depth {
659                self.cmd_buffer
660                    .commands
661                    .push(C::ClearDepth(dsat.clear_value.0));
662            } else if clear_stencil {
663                self.cmd_buffer
664                    .commands
665                    .push(C::ClearStencil(dsat.clear_value.1));
666            }
667        }
668    }
669    unsafe fn end_render_pass(&mut self) {
670        for (attachment, dst) in self.state.resolve_attachments.drain(..) {
671            self.cmd_buffer.commands.push(C::ResolveAttachment {
672                attachment,
673                dst,
674                size: self.state.render_size,
675            });
676        }
677        if !self.state.invalidate_attachments.is_empty() {
678            self.cmd_buffer.commands.push(C::InvalidateAttachments(
679                self.state.invalidate_attachments.clone(),
680            ));
681            self.state.invalidate_attachments.clear();
682        }
683        if self.state.has_pass_label {
684            self.cmd_buffer.commands.push(C::PopDebugGroup);
685            self.state.has_pass_label = false;
686        }
687        self.state.instance_vbuf_mask = 0;
688        self.state.dirty_vbuf_mask = 0;
689        self.state.active_first_instance = 0;
690        self.state.color_targets.clear();
691        for vat in &self.state.vertex_attributes {
692            self.cmd_buffer
693                .commands
694                .push(C::UnsetVertexAttribute(vat.location));
695        }
696        self.state.vertex_attributes.clear();
697        self.state.primitive = super::PrimitiveState::default();
698
699        if let Some(query) = self.state.end_of_pass_timestamp.take() {
700            self.cmd_buffer.commands.push(C::TimestampQuery(query));
701        }
702    }
703
704    unsafe fn set_bind_group(
705        &mut self,
706        layout: &super::PipelineLayout,
707        index: u32,
708        group: &super::BindGroup,
709        dynamic_offsets: &[wgt::DynamicOffset],
710    ) {
711        let mut do_index = 0;
712        let mut dirty_textures = 0u32;
713        let mut dirty_samplers = 0u32;
714        let group_info = &layout.group_infos[index as usize];
715
716        for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
717            let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
718            match *raw_binding {
719                super::RawBinding::Buffer {
720                    raw,
721                    offset: base_offset,
722                    size,
723                } => {
724                    let mut offset = base_offset;
725                    let target = match binding_layout.ty {
726                        wgt::BindingType::Buffer {
727                            ty,
728                            has_dynamic_offset,
729                            min_binding_size: _,
730                        } => {
731                            if has_dynamic_offset {
732                                offset += dynamic_offsets[do_index] as i32;
733                                do_index += 1;
734                            }
735                            match ty {
736                                wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
737                                wgt::BufferBindingType::Storage { .. } => {
738                                    glow::SHADER_STORAGE_BUFFER
739                                }
740                            }
741                        }
742                        _ => unreachable!(),
743                    };
744                    self.cmd_buffer.commands.push(C::BindBuffer {
745                        target,
746                        slot,
747                        buffer: raw,
748                        offset,
749                        size,
750                    });
751                }
752                super::RawBinding::Sampler(sampler) => {
753                    dirty_samplers |= 1 << slot;
754                    self.state.samplers[slot as usize] = Some(sampler);
755                }
756                super::RawBinding::Texture {
757                    raw,
758                    target,
759                    aspects,
760                    ref mip_levels,
761                } => {
762                    dirty_textures |= 1 << slot;
763                    self.state.texture_slots[slot as usize].tex_target = target;
764                    self.cmd_buffer.commands.push(C::BindTexture {
765                        slot,
766                        texture: raw,
767                        target,
768                        aspects,
769                        mip_levels: mip_levels.clone(),
770                    });
771                }
772                super::RawBinding::Image(ref binding) => {
773                    self.cmd_buffer.commands.push(C::BindImage {
774                        slot,
775                        binding: binding.clone(),
776                    });
777                }
778            }
779        }
780
781        self.rebind_sampler_states(dirty_textures, dirty_samplers);
782    }
783
784    unsafe fn set_push_constants(
785        &mut self,
786        _layout: &super::PipelineLayout,
787        _stages: wgt::ShaderStages,
788        offset_bytes: u32,
789        data: &[u32],
790    ) {
791        // There is nothing preventing the user from trying to update a single value within
792        // a vector or matrix in the set_push_constant call, as to the user, all of this is
793        // just memory. However OpenGL does not allow partial uniform updates.
794        //
795        // As such, we locally keep a copy of the current state of the push constant memory
796        // block. If the user tries to update a single value, we have the data to update the entirety
797        // of the uniform.
798        let start_words = offset_bytes / 4;
799        let end_words = start_words + data.len() as u32;
800        self.state.current_push_constant_data[start_words as usize..end_words as usize]
801            .copy_from_slice(data);
802
803        // We iterate over the uniform list as there may be multiple uniforms that need
804        // updating from the same push constant memory (one for each shader stage).
805        //
806        // Additionally, any statically unused uniform descs will have been removed from this list
807        // by OpenGL, so the uniform list is not contiguous.
808        for uniform in self.state.push_constant_descs.iter().cloned() {
809            let uniform_size_words = uniform.size_bytes / 4;
810            let uniform_start_words = uniform.offset / 4;
811            let uniform_end_words = uniform_start_words + uniform_size_words;
812
813            // Is true if any word within the uniform binding was updated
814            let needs_updating =
815                start_words < uniform_end_words || uniform_start_words <= end_words;
816
817            if needs_updating {
818                let uniform_data = &self.state.current_push_constant_data
819                    [uniform_start_words as usize..uniform_end_words as usize];
820
821                let range = self.cmd_buffer.add_push_constant_data(uniform_data);
822
823                self.cmd_buffer.commands.push(C::SetPushConstants {
824                    uniform,
825                    offset: range.start,
826                });
827            }
828        }
829    }
830
831    unsafe fn insert_debug_marker(&mut self, label: &str) {
832        let range = self.cmd_buffer.add_marker(label);
833        self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
834    }
835    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
836        let range = self.cmd_buffer.add_marker(group_label);
837        self.cmd_buffer.commands.push(C::PushDebugGroup(range));
838    }
839    unsafe fn end_debug_marker(&mut self) {
840        self.cmd_buffer.commands.push(C::PopDebugGroup);
841    }
842
843    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
844        self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
845
846        if self
847            .private_caps
848            .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
849        {
850            for vat in pipeline.vertex_attributes.iter() {
851                let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
852                // set the layout
853                self.cmd_buffer.commands.push(C::SetVertexAttribute {
854                    buffer: None,
855                    buffer_desc: vb.clone(),
856                    attribute_desc: vat.clone(),
857                });
858            }
859        } else {
860            for vat in &self.state.vertex_attributes {
861                self.cmd_buffer
862                    .commands
863                    .push(C::UnsetVertexAttribute(vat.location));
864            }
865            self.state.vertex_attributes.clear();
866
867            self.state.dirty_vbuf_mask = 0;
868            // copy vertex attributes
869            for vat in pipeline.vertex_attributes.iter() {
870                //Note: we can invalidate more carefully here.
871                self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
872                self.state.vertex_attributes.push(vat.clone());
873            }
874        }
875
876        self.state.instance_vbuf_mask = 0;
877        // copy vertex state
878        for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
879            .state
880            .vertex_buffers
881            .iter_mut()
882            .zip(pipeline.vertex_buffers.iter())
883            .enumerate()
884        {
885            if pipe_desc.step == wgt::VertexStepMode::Instance {
886                self.state.instance_vbuf_mask |= 1 << index;
887            }
888            if state_desc != pipe_desc {
889                self.state.dirty_vbuf_mask |= 1 << index;
890                *state_desc = pipe_desc.clone();
891            }
892        }
893
894        self.set_pipeline_inner(&pipeline.inner);
895
896        // set primitive state
897        let prim_state = conv::map_primitive_state(&pipeline.primitive);
898        if prim_state != self.state.primitive {
899            self.cmd_buffer
900                .commands
901                .push(C::SetPrimitive(prim_state.clone()));
902            self.state.primitive = prim_state;
903        }
904
905        // set depth/stencil states
906        let mut aspects = crate::FormatAspects::empty();
907        if pipeline.depth_bias != self.state.depth_bias {
908            self.state.depth_bias = pipeline.depth_bias;
909            self.cmd_buffer
910                .commands
911                .push(C::SetDepthBias(pipeline.depth_bias));
912        }
913        if let Some(ref depth) = pipeline.depth {
914            aspects |= crate::FormatAspects::DEPTH;
915            self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
916        }
917        if let Some(ref stencil) = pipeline.stencil {
918            aspects |= crate::FormatAspects::STENCIL;
919            self.state.stencil = stencil.clone();
920            self.rebind_stencil_func();
921            if stencil.front.ops == stencil.back.ops
922                && stencil.front.mask_write == stencil.back.mask_write
923            {
924                self.cmd_buffer.commands.push(C::SetStencilOps {
925                    face: glow::FRONT_AND_BACK,
926                    write_mask: stencil.front.mask_write,
927                    ops: stencil.front.ops.clone(),
928                });
929            } else {
930                self.cmd_buffer.commands.push(C::SetStencilOps {
931                    face: glow::FRONT,
932                    write_mask: stencil.front.mask_write,
933                    ops: stencil.front.ops.clone(),
934                });
935                self.cmd_buffer.commands.push(C::SetStencilOps {
936                    face: glow::BACK,
937                    write_mask: stencil.back.mask_write,
938                    ops: stencil.back.ops.clone(),
939                });
940            }
941        }
942        self.cmd_buffer
943            .commands
944            .push(C::ConfigureDepthStencil(aspects));
945
946        // set multisampling state
947        if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
948            self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
949            self.cmd_buffer
950                .commands
951                .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
952        }
953
954        // set blend states
955        if self.state.color_targets[..] != pipeline.color_targets[..] {
956            if pipeline
957                .color_targets
958                .iter()
959                .skip(1)
960                .any(|ct| *ct != pipeline.color_targets[0])
961            {
962                for (index, ct) in pipeline.color_targets.iter().enumerate() {
963                    self.cmd_buffer.commands.push(C::SetColorTarget {
964                        draw_buffer_index: Some(index as u32),
965                        desc: ct.clone(),
966                    });
967                }
968            } else {
969                self.cmd_buffer.commands.push(C::SetColorTarget {
970                    draw_buffer_index: None,
971                    desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
972                });
973            }
974        }
975        self.state.color_targets.clear();
976        for ct in pipeline.color_targets.iter() {
977            self.state.color_targets.push(ct.clone());
978        }
979    }
980
981    unsafe fn set_index_buffer<'a>(
982        &mut self,
983        binding: crate::BufferBinding<'a, super::Buffer>,
984        format: wgt::IndexFormat,
985    ) {
986        self.state.index_offset = binding.offset;
987        self.state.index_format = format;
988        self.cmd_buffer
989            .commands
990            .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
991    }
992    unsafe fn set_vertex_buffer<'a>(
993        &mut self,
994        index: u32,
995        binding: crate::BufferBinding<'a, super::Buffer>,
996    ) {
997        self.state.dirty_vbuf_mask |= 1 << index;
998        let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
999        *vb = Some(super::BufferBinding {
1000            raw: binding.buffer.raw.unwrap(),
1001            offset: binding.offset,
1002        });
1003    }
1004    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1005        self.cmd_buffer.commands.push(C::SetViewport {
1006            rect: crate::Rect {
1007                x: rect.x as i32,
1008                y: rect.y as i32,
1009                w: rect.w as i32,
1010                h: rect.h as i32,
1011            },
1012            depth,
1013        });
1014    }
1015    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1016        self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1017            x: rect.x as i32,
1018            y: rect.y as i32,
1019            w: rect.w as i32,
1020            h: rect.h as i32,
1021        }));
1022    }
1023    unsafe fn set_stencil_reference(&mut self, value: u32) {
1024        self.state.stencil.front.reference = value;
1025        self.state.stencil.back.reference = value;
1026        self.rebind_stencil_func();
1027    }
1028    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1029        self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1030    }
1031
1032    unsafe fn draw(
1033        &mut self,
1034        first_vertex: u32,
1035        vertex_count: u32,
1036        first_instance: u32,
1037        instance_count: u32,
1038    ) {
1039        self.prepare_draw(first_instance);
1040        #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1041        self.cmd_buffer.commands.push(C::Draw {
1042            topology: self.state.topology,
1043            first_vertex,
1044            vertex_count,
1045            first_instance,
1046            instance_count,
1047            first_instance_location: self.state.first_instance_location.clone(),
1048        });
1049    }
1050    unsafe fn draw_indexed(
1051        &mut self,
1052        first_index: u32,
1053        index_count: u32,
1054        base_vertex: i32,
1055        first_instance: u32,
1056        instance_count: u32,
1057    ) {
1058        self.prepare_draw(first_instance);
1059        let (index_size, index_type) = match self.state.index_format {
1060            wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1061            wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1062        };
1063        let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1064        #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1065        self.cmd_buffer.commands.push(C::DrawIndexed {
1066            topology: self.state.topology,
1067            index_type,
1068            index_offset,
1069            index_count,
1070            base_vertex,
1071            first_instance,
1072            instance_count,
1073            first_instance_location: self.state.first_instance_location.clone(),
1074        });
1075    }
1076    unsafe fn draw_mesh_tasks(
1077        &mut self,
1078        _group_count_x: u32,
1079        _group_count_y: u32,
1080        _group_count_z: u32,
1081    ) {
1082        unreachable!()
1083    }
1084    unsafe fn draw_indirect(
1085        &mut self,
1086        buffer: &super::Buffer,
1087        offset: wgt::BufferAddress,
1088        draw_count: u32,
1089    ) {
1090        self.prepare_draw(0);
1091        for draw in 0..draw_count as wgt::BufferAddress {
1092            let indirect_offset =
1093                offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1094            #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1095            self.cmd_buffer.commands.push(C::DrawIndirect {
1096                topology: self.state.topology,
1097                indirect_buf: buffer.raw.unwrap(),
1098                indirect_offset,
1099                first_instance_location: self.state.first_instance_location.clone(),
1100            });
1101        }
1102    }
1103    unsafe fn draw_indexed_indirect(
1104        &mut self,
1105        buffer: &super::Buffer,
1106        offset: wgt::BufferAddress,
1107        draw_count: u32,
1108    ) {
1109        self.prepare_draw(0);
1110        let index_type = match self.state.index_format {
1111            wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1112            wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1113        };
1114        for draw in 0..draw_count as wgt::BufferAddress {
1115            let indirect_offset =
1116                offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1117            #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1118            self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1119                topology: self.state.topology,
1120                index_type,
1121                indirect_buf: buffer.raw.unwrap(),
1122                indirect_offset,
1123                first_instance_location: self.state.first_instance_location.clone(),
1124            });
1125        }
1126    }
1127    unsafe fn draw_mesh_tasks_indirect(
1128        &mut self,
1129        _buffer: &<Self::A as crate::Api>::Buffer,
1130        _offset: wgt::BufferAddress,
1131        _draw_count: u32,
1132    ) {
1133        unreachable!()
1134    }
1135    unsafe fn draw_indirect_count(
1136        &mut self,
1137        _buffer: &super::Buffer,
1138        _offset: wgt::BufferAddress,
1139        _count_buffer: &super::Buffer,
1140        _count_offset: wgt::BufferAddress,
1141        _max_count: u32,
1142    ) {
1143        unreachable!()
1144    }
1145    unsafe fn draw_indexed_indirect_count(
1146        &mut self,
1147        _buffer: &super::Buffer,
1148        _offset: wgt::BufferAddress,
1149        _count_buffer: &super::Buffer,
1150        _count_offset: wgt::BufferAddress,
1151        _max_count: u32,
1152    ) {
1153        unreachable!()
1154    }
1155    unsafe fn draw_mesh_tasks_indirect_count(
1156        &mut self,
1157        _buffer: &<Self::A as crate::Api>::Buffer,
1158        _offset: wgt::BufferAddress,
1159        _count_buffer: &<Self::A as crate::Api>::Buffer,
1160        _count_offset: wgt::BufferAddress,
1161        _max_count: u32,
1162    ) {
1163        unreachable!()
1164    }
1165
1166    // compute
1167
1168    unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::QuerySet>) {
1169        debug_assert!(self.state.end_of_pass_timestamp.is_none());
1170        if let Some(ref t) = desc.timestamp_writes {
1171            if let Some(index) = t.beginning_of_pass_write_index {
1172                unsafe { self.write_timestamp(t.query_set, index) }
1173            }
1174            self.state.end_of_pass_timestamp = t
1175                .end_of_pass_write_index
1176                .map(|index| t.query_set.queries[index as usize]);
1177        }
1178
1179        if let Some(label) = desc.label {
1180            let range = self.cmd_buffer.add_marker(label);
1181            self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1182            self.state.has_pass_label = true;
1183        }
1184    }
1185    unsafe fn end_compute_pass(&mut self) {
1186        if self.state.has_pass_label {
1187            self.cmd_buffer.commands.push(C::PopDebugGroup);
1188            self.state.has_pass_label = false;
1189        }
1190
1191        if let Some(query) = self.state.end_of_pass_timestamp.take() {
1192            self.cmd_buffer.commands.push(C::TimestampQuery(query));
1193        }
1194    }
1195
1196    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1197        self.set_pipeline_inner(&pipeline.inner);
1198    }
1199
1200    unsafe fn dispatch(&mut self, count: [u32; 3]) {
1201        // Empty dispatches are invalid in OpenGL, but valid in WebGPU.
1202        if count.contains(&0) {
1203            return;
1204        }
1205        self.cmd_buffer.commands.push(C::Dispatch(count));
1206    }
1207    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1208        self.cmd_buffer.commands.push(C::DispatchIndirect {
1209            indirect_buf: buffer.raw.unwrap(),
1210            indirect_offset: offset,
1211        });
1212    }
1213
1214    unsafe fn build_acceleration_structures<'a, T>(
1215        &mut self,
1216        _descriptor_count: u32,
1217        _descriptors: T,
1218    ) where
1219        super::Api: 'a,
1220        T: IntoIterator<
1221            Item = crate::BuildAccelerationStructureDescriptor<
1222                'a,
1223                super::Buffer,
1224                super::AccelerationStructure,
1225            >,
1226        >,
1227    {
1228        unimplemented!()
1229    }
1230
1231    unsafe fn place_acceleration_structure_barrier(
1232        &mut self,
1233        _barriers: crate::AccelerationStructureBarrier,
1234    ) {
1235        unimplemented!()
1236    }
1237
1238    unsafe fn copy_acceleration_structure_to_acceleration_structure(
1239        &mut self,
1240        _src: &super::AccelerationStructure,
1241        _dst: &super::AccelerationStructure,
1242        _copy: wgt::AccelerationStructureCopy,
1243    ) {
1244        unimplemented!()
1245    }
1246
1247    unsafe fn read_acceleration_structure_compact_size(
1248        &mut self,
1249        _acceleration_structure: &super::AccelerationStructure,
1250        _buf: &super::Buffer,
1251    ) {
1252        unimplemented!()
1253    }
1254}