1use arrayvec::ArrayVec;
2use ash::{extensions::khr, version::DeviceV1_0, vk};
3use inplace_it::inplace_or_alloc_from_iter;
4use smallvec::SmallVec;
5
6use hal::{
7 memory,
8 memory::{Requirements, Segment},
9 pool::CommandPoolCreateFlags,
10 pso::VertexInputRate,
11 window::SwapchainConfig,
12 {buffer, device as d, format, image, pass, pso, query, queue}, {Features, MemoryTypeId},
13};
14
15use std::{ffi::CString, marker::PhantomData, mem, ops::Range, ptr, sync::Arc};
16
17use crate::{command as cmd, conv, native as n, pool::RawCommandPool, window as w, Backend as B};
18use ash::vk::Handle;
19
20#[derive(Debug, Default)]
21struct GraphicsPipelineInfoBuf<'a> {
22 dynamic_states: ArrayVec<[vk::DynamicState; 10]>,
24
25 c_strings: ArrayVec<[CString; 5]>,
27 stages: ArrayVec<[vk::PipelineShaderStageCreateInfo; 5]>,
28 specializations: ArrayVec<[vk::SpecializationInfo; 5]>,
29 specialization_entries: ArrayVec<[SmallVec<[vk::SpecializationMapEntry; 4]>; 5]>,
30
31 vertex_bindings: Vec<vk::VertexInputBindingDescription>,
32 vertex_attributes: Vec<vk::VertexInputAttributeDescription>,
33 blend_states: Vec<vk::PipelineColorBlendAttachmentState>,
34
35 sample_mask: [u32; 2],
36 vertex_input_state: vk::PipelineVertexInputStateCreateInfo,
37 input_assembly_state: vk::PipelineInputAssemblyStateCreateInfo,
38 tessellation_state: Option<vk::PipelineTessellationStateCreateInfo>,
39 viewport_state: vk::PipelineViewportStateCreateInfo,
40 rasterization_state: vk::PipelineRasterizationStateCreateInfo,
41 rasterization_conservative_state: vk::PipelineRasterizationConservativeStateCreateInfoEXT, multisample_state: vk::PipelineMultisampleStateCreateInfo,
43 depth_stencil_state: vk::PipelineDepthStencilStateCreateInfo,
44 color_blend_state: vk::PipelineColorBlendStateCreateInfo,
45 pipeline_dynamic_state: vk::PipelineDynamicStateCreateInfo,
46 viewports: [vk::Viewport; 1],
47 scissors: [vk::Rect2D; 1],
48
49 lifetime: PhantomData<&'a vk::Pipeline>,
50}
51impl<'a> GraphicsPipelineInfoBuf<'a> {
52 unsafe fn add_stage(&mut self, stage: vk::ShaderStageFlags, source: &pso::EntryPoint<'a, B>) {
53 let string = CString::new(source.entry).unwrap();
54 self.c_strings.push(string);
55 let name = self.c_strings.last().unwrap().as_c_str();
56
57 self.specialization_entries.push(
58 source
59 .specialization
60 .constants
61 .iter()
62 .map(|c| vk::SpecializationMapEntry {
63 constant_id: c.id,
64 offset: c.range.start as _,
65 size: (c.range.end - c.range.start) as _,
66 })
67 .collect(),
68 );
69 let map_entries = self.specialization_entries.last().unwrap();
70
71 self.specializations.push(vk::SpecializationInfo {
72 map_entry_count: map_entries.len() as _,
73 p_map_entries: map_entries.as_ptr(),
74 data_size: source.specialization.data.len() as _,
75 p_data: source.specialization.data.as_ptr() as _,
76 });
77
78 self.stages.push(
79 vk::PipelineShaderStageCreateInfo::builder()
80 .flags(vk::PipelineShaderStageCreateFlags::empty())
81 .stage(stage)
82 .module(source.module.raw)
83 .name(name)
84 .specialization_info(self.specializations.last().unwrap())
85 .build(),
86 )
87 }
88
89 unsafe fn new(desc: &pso::GraphicsPipelineDesc<'a, B>, device: &super::RawDevice) -> Self {
90 let mut this = Self::default();
91
92 match desc.primitive_assembler {
93 pso::PrimitiveAssemblerDesc::Vertex {
94 ref buffers,
95 ref attributes,
96 ref input_assembler,
97 ref vertex,
98 ref tessellation,
99 ref geometry,
100 } => {
101 this.add_stage(vk::ShaderStageFlags::VERTEX, vertex);
104
105 if let Some(ref entry) = geometry {
107 this.add_stage(vk::ShaderStageFlags::GEOMETRY, entry);
108 }
109 if let Some(ts) = tessellation {
111 this.add_stage(vk::ShaderStageFlags::TESSELLATION_CONTROL, &ts.0);
112 this.add_stage(vk::ShaderStageFlags::TESSELLATION_EVALUATION, &ts.1);
113 }
114 this.vertex_bindings = buffers.iter().map(|vbuf| {
115 vk::VertexInputBindingDescription {
116 binding: vbuf.binding,
117 stride: vbuf.stride as u32,
118 input_rate: match vbuf.rate {
119 VertexInputRate::Vertex => vk::VertexInputRate::VERTEX,
120 VertexInputRate::Instance(divisor) => {
121 debug_assert_eq!(divisor, 1, "Custom vertex rate divisors not supported in Vulkan backend without extension");
122 vk::VertexInputRate::INSTANCE
123 },
124 },
125 }
126 }).collect();
127
128 this.vertex_attributes = attributes
129 .iter()
130 .map(|attr| vk::VertexInputAttributeDescription {
131 location: attr.location as u32,
132 binding: attr.binding as u32,
133 format: conv::map_format(attr.element.format),
134 offset: attr.element.offset as u32,
135 })
136 .collect();
137
138 this.vertex_input_state = vk::PipelineVertexInputStateCreateInfo::builder()
139 .flags(vk::PipelineVertexInputStateCreateFlags::empty())
140 .vertex_binding_descriptions(&this.vertex_bindings)
141 .vertex_attribute_descriptions(&this.vertex_attributes)
142 .build();
143
144 this.input_assembly_state = vk::PipelineInputAssemblyStateCreateInfo::builder()
145 .flags(vk::PipelineInputAssemblyStateCreateFlags::empty())
146 .topology(conv::map_topology(&input_assembler))
147 .primitive_restart_enable(input_assembler.restart_index.is_some())
148 .build();
149 }
150 pso::PrimitiveAssemblerDesc::Mesh { ref task, ref mesh } => {
151 this.vertex_bindings = Vec::new();
152 this.vertex_attributes = Vec::new();
153 this.vertex_input_state = vk::PipelineVertexInputStateCreateInfo::default();
154 this.input_assembly_state = vk::PipelineInputAssemblyStateCreateInfo::default();
155
156 if let Some(ref entry) = task {
158 this.add_stage(vk::ShaderStageFlags::TASK_NV, entry);
159 }
160
161 this.add_stage(vk::ShaderStageFlags::MESH_NV, mesh);
163 }
164 };
165
166 if let Some(ref entry) = desc.fragment {
168 this.add_stage(vk::ShaderStageFlags::FRAGMENT, entry);
169 }
170
171 let depth_bias = match desc.rasterizer.depth_bias {
172 Some(pso::State::Static(db)) => db,
173 Some(pso::State::Dynamic) => {
174 this.dynamic_states.push(vk::DynamicState::DEPTH_BIAS);
175 pso::DepthBias::default()
176 }
177 None => pso::DepthBias::default(),
178 };
179
180 let polygon_mode = match desc.rasterizer.polygon_mode {
181 pso::PolygonMode::Point => vk::PolygonMode::POINT,
182 pso::PolygonMode::Line => vk::PolygonMode::LINE,
183 pso::PolygonMode::Fill => vk::PolygonMode::FILL,
184 };
185
186 let line_width = match desc.rasterizer.line_width {
187 pso::State::Static(w) => w,
188 pso::State::Dynamic => {
189 this.dynamic_states.push(vk::DynamicState::LINE_WIDTH);
190 1.0
191 }
192 };
193
194 this.rasterization_conservative_state =
195 vk::PipelineRasterizationConservativeStateCreateInfoEXT::builder()
196 .conservative_rasterization_mode(match desc.rasterizer.conservative {
197 false => vk::ConservativeRasterizationModeEXT::DISABLED,
198 true => vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
199 })
200 .build();
201
202 this.rasterization_state = {
203 let mut rasterization_state_builder =
204 vk::PipelineRasterizationStateCreateInfo::builder()
205 .flags(vk::PipelineRasterizationStateCreateFlags::empty())
206 .depth_clamp_enable(if desc.rasterizer.depth_clamping {
207 if device.features.contains(Features::DEPTH_CLAMP) {
208 true
209 } else {
210 warn!("Depth clamping was requested on a device with disabled feature");
211 false
212 }
213 } else {
214 false
215 })
216 .rasterizer_discard_enable(
217 desc.fragment.is_none()
218 && desc.depth_stencil.depth.is_none()
219 && desc.depth_stencil.stencil.is_none(),
220 )
221 .polygon_mode(polygon_mode)
222 .cull_mode(conv::map_cull_face(desc.rasterizer.cull_face))
223 .front_face(conv::map_front_face(desc.rasterizer.front_face))
224 .depth_bias_enable(desc.rasterizer.depth_bias.is_some())
225 .depth_bias_constant_factor(depth_bias.const_factor)
226 .depth_bias_clamp(depth_bias.clamp)
227 .depth_bias_slope_factor(depth_bias.slope_factor)
228 .line_width(line_width);
229 if desc.rasterizer.conservative {
230 rasterization_state_builder = rasterization_state_builder
231 .push_next(&mut this.rasterization_conservative_state);
232 }
233
234 rasterization_state_builder.build()
235 };
236
237 this.tessellation_state = {
238 if let pso::PrimitiveAssemblerDesc::Vertex {
239 input_assembler, ..
240 } = &desc.primitive_assembler
241 {
242 if let pso::Primitive::PatchList(patch_control_points) = input_assembler.primitive {
243 Some(
244 vk::PipelineTessellationStateCreateInfo::builder()
245 .flags(vk::PipelineTessellationStateCreateFlags::empty())
246 .patch_control_points(patch_control_points as _)
247 .build(),
248 )
249 } else {
250 None
251 }
252 } else {
253 None
254 }
255 };
256
257 this.viewport_state = {
258 match desc.baked_states.scissor {
262 Some(ref rect) => {
263 this.scissors = [conv::map_rect(rect)];
264 }
265 None => {
266 this.dynamic_states.push(vk::DynamicState::SCISSOR);
267 }
268 }
269 match desc.baked_states.viewport {
270 Some(ref vp) => {
271 this.viewports = [device.map_viewport(vp)];
272 }
273 None => {
274 this.dynamic_states.push(vk::DynamicState::VIEWPORT);
275 }
276 }
277 vk::PipelineViewportStateCreateInfo::builder()
278 .flags(vk::PipelineViewportStateCreateFlags::empty())
279 .scissors(&this.scissors)
280 .viewports(&this.viewports)
281 .build()
282 };
283
284 this.multisample_state = match desc.multisampling {
285 Some(ref ms) => {
286 this.sample_mask = [
287 (ms.sample_mask & 0xFFFFFFFF) as u32,
288 ((ms.sample_mask >> 32) & 0xFFFFFFFF) as u32,
289 ];
290 vk::PipelineMultisampleStateCreateInfo::builder()
291 .flags(vk::PipelineMultisampleStateCreateFlags::empty())
292 .rasterization_samples(conv::map_sample_count_flags(ms.rasterization_samples))
293 .sample_shading_enable(ms.sample_shading.is_some())
294 .min_sample_shading(ms.sample_shading.unwrap_or(0.0))
295 .sample_mask(&this.sample_mask)
296 .alpha_to_coverage_enable(ms.alpha_coverage)
297 .alpha_to_one_enable(ms.alpha_to_one)
298 .build()
299 }
300 None => vk::PipelineMultisampleStateCreateInfo::builder()
301 .flags(vk::PipelineMultisampleStateCreateFlags::empty())
302 .rasterization_samples(vk::SampleCountFlags::TYPE_1)
303 .build(),
304 };
305
306 let depth_stencil = desc.depth_stencil;
307 let (depth_test_enable, depth_write_enable, depth_compare_op) = match depth_stencil.depth {
308 Some(ref depth) => (true, depth.write as _, conv::map_comparison(depth.fun)),
309 None => (false, false, vk::CompareOp::NEVER),
310 };
311 let (stencil_test_enable, front, back) = match depth_stencil.stencil {
312 Some(ref stencil) => {
313 let mut front = conv::map_stencil_side(&stencil.faces.front);
314 let mut back = conv::map_stencil_side(&stencil.faces.back);
315 match stencil.read_masks {
316 pso::State::Static(ref sides) => {
317 front.compare_mask = sides.front;
318 back.compare_mask = sides.back;
319 }
320 pso::State::Dynamic => {
321 this.dynamic_states
322 .push(vk::DynamicState::STENCIL_COMPARE_MASK);
323 }
324 }
325 match stencil.write_masks {
326 pso::State::Static(ref sides) => {
327 front.write_mask = sides.front;
328 back.write_mask = sides.back;
329 }
330 pso::State::Dynamic => {
331 this.dynamic_states
332 .push(vk::DynamicState::STENCIL_WRITE_MASK);
333 }
334 }
335 match stencil.reference_values {
336 pso::State::Static(ref sides) => {
337 front.reference = sides.front;
338 back.reference = sides.back;
339 }
340 pso::State::Dynamic => {
341 this.dynamic_states
342 .push(vk::DynamicState::STENCIL_REFERENCE);
343 }
344 }
345 (true, front, back)
346 }
347 None => mem::zeroed(),
348 };
349 let (min_depth_bounds, max_depth_bounds) = match desc.baked_states.depth_bounds {
350 Some(ref range) => (range.start, range.end),
351 None => {
352 this.dynamic_states.push(vk::DynamicState::DEPTH_BOUNDS);
353 (0.0, 1.0)
354 }
355 };
356
357 this.depth_stencil_state = vk::PipelineDepthStencilStateCreateInfo::builder()
358 .flags(vk::PipelineDepthStencilStateCreateFlags::empty())
359 .depth_test_enable(depth_test_enable)
360 .depth_write_enable(depth_write_enable)
361 .depth_compare_op(depth_compare_op)
362 .depth_bounds_test_enable(depth_stencil.depth_bounds)
363 .stencil_test_enable(stencil_test_enable)
364 .front(front)
365 .back(back)
366 .min_depth_bounds(min_depth_bounds)
367 .max_depth_bounds(max_depth_bounds)
368 .build();
369
370 this.blend_states = desc
371 .blender
372 .targets
373 .iter()
374 .map(|color_desc| {
375 let color_write_mask =
376 vk::ColorComponentFlags::from_raw(color_desc.mask.bits() as _);
377 match color_desc.blend {
378 Some(ref bs) => {
379 let (color_blend_op, src_color_blend_factor, dst_color_blend_factor) =
380 conv::map_blend_op(bs.color);
381 let (alpha_blend_op, src_alpha_blend_factor, dst_alpha_blend_factor) =
382 conv::map_blend_op(bs.alpha);
383 vk::PipelineColorBlendAttachmentState {
384 color_write_mask,
385 blend_enable: vk::TRUE,
386 src_color_blend_factor,
387 dst_color_blend_factor,
388 color_blend_op,
389 src_alpha_blend_factor,
390 dst_alpha_blend_factor,
391 alpha_blend_op,
392 }
393 }
394 None => vk::PipelineColorBlendAttachmentState {
395 color_write_mask,
396 ..mem::zeroed()
397 },
398 }
399 })
400 .collect();
401
402 this.color_blend_state = vk::PipelineColorBlendStateCreateInfo::builder()
403 .flags(vk::PipelineColorBlendStateCreateFlags::empty())
404 .logic_op_enable(false) .logic_op(vk::LogicOp::CLEAR)
406 .attachments(&this.blend_states) .blend_constants(match desc.baked_states.blend_constants {
408 Some(value) => value,
409 None => {
410 this.dynamic_states.push(vk::DynamicState::BLEND_CONSTANTS);
411 [0.0; 4]
412 }
413 })
414 .build();
415
416 this.pipeline_dynamic_state = vk::PipelineDynamicStateCreateInfo::builder()
417 .flags(vk::PipelineDynamicStateCreateFlags::empty())
418 .dynamic_states(&this.dynamic_states)
419 .build();
420
421 this
422 }
423}
424
425#[derive(Debug, Default)]
426struct ComputePipelineInfoBuf<'a> {
427 c_string: CString,
428 specialization: vk::SpecializationInfo,
429 entries: SmallVec<[vk::SpecializationMapEntry; 4]>,
430 lifetime: PhantomData<&'a vk::Pipeline>,
431}
432impl<'a> ComputePipelineInfoBuf<'a> {
433 unsafe fn new(desc: &pso::ComputePipelineDesc<'a, B>) -> Self {
434 let mut this = Self::default();
435 this.c_string = CString::new(desc.shader.entry).unwrap();
436 this.entries = desc
437 .shader
438 .specialization
439 .constants
440 .iter()
441 .map(|c| vk::SpecializationMapEntry {
442 constant_id: c.id,
443 offset: c.range.start as _,
444 size: (c.range.end - c.range.start) as _,
445 })
446 .collect();
447 this.specialization = vk::SpecializationInfo {
448 map_entry_count: this.entries.len() as _,
449 p_map_entries: this.entries.as_ptr(),
450 data_size: desc.shader.specialization.data.len() as _,
451 p_data: desc.shader.specialization.data.as_ptr() as _,
452 };
453 this
454 }
455}
456
457impl d::Device<B> for super::Device {
458 unsafe fn allocate_memory(
459 &self,
460 mem_type: MemoryTypeId,
461 size: u64,
462 ) -> Result<n::Memory, d::AllocationError> {
463 let info = vk::MemoryAllocateInfo::builder()
464 .allocation_size(size)
465 .memory_type_index(self.get_ash_memory_type_index(mem_type));
466
467 let result = self.shared.raw.allocate_memory(&info, None);
468
469 match result {
470 Ok(memory) => Ok(n::Memory { raw: memory }),
471 Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => Err(d::AllocationError::TooManyObjects),
472 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
473 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
474 _ => unreachable!(),
475 }
476 }
477
478 unsafe fn create_command_pool(
479 &self,
480 family: queue::QueueFamilyId,
481 create_flags: CommandPoolCreateFlags,
482 ) -> Result<RawCommandPool, d::OutOfMemory> {
483 let mut flags = vk::CommandPoolCreateFlags::empty();
484 if create_flags.contains(CommandPoolCreateFlags::TRANSIENT) {
485 flags |= vk::CommandPoolCreateFlags::TRANSIENT;
486 }
487 if create_flags.contains(CommandPoolCreateFlags::RESET_INDIVIDUAL) {
488 flags |= vk::CommandPoolCreateFlags::RESET_COMMAND_BUFFER;
489 }
490
491 let info = vk::CommandPoolCreateInfo::builder()
492 .flags(flags)
493 .queue_family_index(family.0 as _);
494
495 let result = self.shared.raw.create_command_pool(&info, None);
496
497 match result {
498 Ok(pool) => Ok(RawCommandPool {
499 raw: pool,
500 device: self.shared.clone(),
501 }),
502 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
503 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
504 _ => unreachable!(),
505 }
506 }
507
508 unsafe fn destroy_command_pool(&self, pool: RawCommandPool) {
509 self.shared.raw.destroy_command_pool(pool.raw, None);
510 }
511
512 unsafe fn create_render_pass<'a, Ia, Is, Id>(
513 &self,
514 attachments: Ia,
515 subpasses: Is,
516 dependencies: Id,
517 ) -> Result<n::RenderPass, d::OutOfMemory>
518 where
519 Ia: Iterator<Item = pass::Attachment>,
520 Is: Iterator<Item = pass::SubpassDesc<'a>>,
521 Id: Iterator<Item = pass::SubpassDependency>,
522 {
523 let attachments_iter = attachments.map(|attachment| vk::AttachmentDescription {
524 flags: vk::AttachmentDescriptionFlags::empty(), format: attachment
526 .format
527 .map_or(vk::Format::UNDEFINED, conv::map_format),
528 samples: conv::map_sample_count_flags(attachment.samples),
529 load_op: conv::map_attachment_load_op(attachment.ops.load),
530 store_op: conv::map_attachment_store_op(attachment.ops.store),
531 stencil_load_op: conv::map_attachment_load_op(attachment.stencil_ops.load),
532 stencil_store_op: conv::map_attachment_store_op(attachment.stencil_ops.store),
533 initial_layout: conv::map_image_layout(attachment.layouts.start),
534 final_layout: conv::map_image_layout(attachment.layouts.end),
535 });
536
537 let dependencies_iter = dependencies.map(|sdep|
538 vk::SubpassDependency {
540 src_subpass: sdep
541 .passes
542 .start
543 .map_or(vk::SUBPASS_EXTERNAL, |id| id as u32),
544 dst_subpass: sdep.passes.end.map_or(vk::SUBPASS_EXTERNAL, |id| id as u32),
545 src_stage_mask: conv::map_pipeline_stage(sdep.stages.start),
546 dst_stage_mask: conv::map_pipeline_stage(sdep.stages.end),
547 src_access_mask: conv::map_image_access(sdep.accesses.start),
548 dst_access_mask: conv::map_image_access(sdep.accesses.end),
549 dependency_flags: mem::transmute(sdep.flags),
550 });
551
552 let result = inplace_or_alloc_from_iter(attachments_iter, |attachments| {
553 let attachment_refs = subpasses
554 .map(|subpass| {
555 fn make_ref(&(id, layout): &pass::AttachmentRef) -> vk::AttachmentReference {
556 vk::AttachmentReference {
557 attachment: id as _,
558 layout: conv::map_image_layout(layout),
559 }
560 }
561 let colors = subpass.colors.iter().map(make_ref).collect::<Box<[_]>>();
562 let depth_stencil = subpass.depth_stencil.map(make_ref);
563 let inputs = subpass.inputs.iter().map(make_ref).collect::<Box<[_]>>();
564 let preserves = subpass
565 .preserves
566 .iter()
567 .map(|&id| id as u32)
568 .collect::<Box<[_]>>();
569 let resolves = subpass.resolves.iter().map(make_ref).collect::<Box<[_]>>();
570
571 (colors, depth_stencil, inputs, preserves, resolves)
572 })
573 .collect::<Box<[_]>>();
574
575 let subpasses = attachment_refs
576 .iter()
577 .map(|(colors, depth_stencil, inputs, preserves, resolves)| {
578 vk::SubpassDescription {
579 flags: vk::SubpassDescriptionFlags::empty(),
580 pipeline_bind_point: vk::PipelineBindPoint::GRAPHICS,
581 input_attachment_count: inputs.len() as u32,
582 p_input_attachments: inputs.as_ptr(),
583 color_attachment_count: colors.len() as u32,
584 p_color_attachments: colors.as_ptr(),
585 p_resolve_attachments: if resolves.is_empty() {
586 ptr::null()
587 } else {
588 resolves.as_ptr()
589 },
590 p_depth_stencil_attachment: match depth_stencil {
591 Some(ref aref) => aref as *const _,
592 None => ptr::null(),
593 },
594 preserve_attachment_count: preserves.len() as u32,
595 p_preserve_attachments: preserves.as_ptr(),
596 }
597 })
598 .collect::<Box<[_]>>();
599
600 inplace_or_alloc_from_iter(dependencies_iter, |dependencies| {
601 let info = vk::RenderPassCreateInfo::builder()
602 .flags(vk::RenderPassCreateFlags::empty())
603 .attachments(&attachments)
604 .subpasses(&subpasses)
605 .dependencies(&dependencies);
606
607 self.shared
608 .raw
609 .create_render_pass(&info, None)
610 .map(|raw| n::RenderPass {
611 raw,
612 attachment_count: attachments.len(),
613 })
614 })
615 });
616
617 match result {
618 Ok(renderpass) => Ok(renderpass),
619 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
620 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
621 _ => unreachable!(),
622 }
623 }
624
625 unsafe fn create_pipeline_layout<'a, Is, Ic>(
626 &self,
627 set_layouts: Is,
628 push_constant_ranges: Ic,
629 ) -> Result<n::PipelineLayout, d::OutOfMemory>
630 where
631 Is: Iterator<Item = &'a n::DescriptorSetLayout>,
632 Ic: Iterator<Item = (pso::ShaderStageFlags, Range<u32>)>,
633 {
634 let vk_set_layouts_iter = set_layouts.map(|set| set.raw);
635
636 let push_constant_ranges_iter =
637 push_constant_ranges.map(|(s, ref r)| vk::PushConstantRange {
638 stage_flags: conv::map_stage_flags(s),
639 offset: r.start,
640 size: r.end - r.start,
641 });
642
643 let result = inplace_or_alloc_from_iter(vk_set_layouts_iter, |set_layouts| {
644 inplace_or_alloc_from_iter(push_constant_ranges_iter, |push_constant_ranges| {
648 let info = vk::PipelineLayoutCreateInfo::builder()
649 .flags(vk::PipelineLayoutCreateFlags::empty())
650 .set_layouts(&set_layouts)
651 .push_constant_ranges(&push_constant_ranges);
652
653 self.shared.raw.create_pipeline_layout(&info, None)
654 })
655 });
656
657 match result {
658 Ok(raw) => Ok(n::PipelineLayout { raw }),
659 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
660 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
661 _ => unreachable!(),
662 }
663 }
664
665 unsafe fn create_pipeline_cache(
666 &self,
667 data: Option<&[u8]>,
668 ) -> Result<n::PipelineCache, d::OutOfMemory> {
669 let info =
670 vk::PipelineCacheCreateInfo::builder().flags(vk::PipelineCacheCreateFlags::empty());
671 let info = if let Some(d) = data {
672 info.initial_data(d)
673 } else {
674 info
675 };
676
677 let result = self.shared.raw.create_pipeline_cache(&info, None);
678
679 match result {
680 Ok(raw) => Ok(n::PipelineCache { raw }),
681 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
682 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
683 _ => unreachable!(),
684 }
685 }
686
687 unsafe fn get_pipeline_cache_data(
688 &self,
689 cache: &n::PipelineCache,
690 ) -> Result<Vec<u8>, d::OutOfMemory> {
691 let result = self.shared.raw.get_pipeline_cache_data(cache.raw);
692
693 match result {
694 Ok(data) => Ok(data),
695 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
696 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
697 _ => unreachable!(),
698 }
699 }
700
701 unsafe fn destroy_pipeline_cache(&self, cache: n::PipelineCache) {
702 self.shared.raw.destroy_pipeline_cache(cache.raw, None);
703 }
704
705 unsafe fn merge_pipeline_caches<'a, I>(
706 &self,
707 target: &mut n::PipelineCache,
708 sources: I,
709 ) -> Result<(), d::OutOfMemory>
710 where
711 I: Iterator<Item = &'a n::PipelineCache>,
712 {
713 let caches_iter = sources.map(|s| s.raw);
714
715 let result = inplace_or_alloc_from_iter(caches_iter, |caches| {
716 self.shared.raw.fp_v1_0().merge_pipeline_caches(
718 self.shared.raw.handle(),
719 target.raw,
720 caches.len() as u32,
721 caches.as_ptr(),
722 )
723 });
724
725 match result {
726 vk::Result::SUCCESS => Ok(()),
727 vk::Result::ERROR_OUT_OF_HOST_MEMORY => Err(d::OutOfMemory::Host),
728 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => Err(d::OutOfMemory::Device),
729 _ => unreachable!(),
730 }
731 }
732
733 unsafe fn create_graphics_pipeline<'a>(
734 &self,
735 desc: &pso::GraphicsPipelineDesc<'a, B>,
736 cache: Option<&n::PipelineCache>,
737 ) -> Result<n::GraphicsPipeline, pso::CreationError> {
738 debug!("create_graphics_pipeline {:?}", desc);
739 let buf = GraphicsPipelineInfoBuf::new(desc, &self.shared);
740
741 let info = {
742 let (base_handle, base_index) = match desc.parent {
743 pso::BasePipeline::Pipeline(pipeline) => (pipeline.0, -1),
744 pso::BasePipeline::Index(index) => (vk::Pipeline::null(), index as _),
745 pso::BasePipeline::None => (vk::Pipeline::null(), -1),
746 };
747
748 let mut flags = vk::PipelineCreateFlags::empty();
749 match desc.parent {
750 pso::BasePipeline::None => (),
751 _ => {
752 flags |= vk::PipelineCreateFlags::DERIVATIVE;
753 }
754 }
755 if desc
756 .flags
757 .contains(pso::PipelineCreationFlags::DISABLE_OPTIMIZATION)
758 {
759 flags |= vk::PipelineCreateFlags::DISABLE_OPTIMIZATION;
760 }
761 if desc
762 .flags
763 .contains(pso::PipelineCreationFlags::ALLOW_DERIVATIVES)
764 {
765 flags |= vk::PipelineCreateFlags::ALLOW_DERIVATIVES;
766 }
767
768 let builder = vk::GraphicsPipelineCreateInfo::builder()
769 .flags(flags)
770 .stages(&buf.stages)
771 .vertex_input_state(&buf.vertex_input_state)
772 .input_assembly_state(&buf.input_assembly_state)
773 .rasterization_state(&buf.rasterization_state);
774 let builder = match buf.tessellation_state.as_ref() {
775 Some(t) => builder.tessellation_state(t),
776 None => builder,
777 };
778 builder
779 .viewport_state(&buf.viewport_state)
780 .multisample_state(&buf.multisample_state)
781 .depth_stencil_state(&buf.depth_stencil_state)
782 .color_blend_state(&buf.color_blend_state)
783 .dynamic_state(&buf.pipeline_dynamic_state)
784 .layout(desc.layout.raw)
785 .render_pass(desc.subpass.main_pass.raw)
786 .subpass(desc.subpass.index as _)
787 .base_pipeline_handle(base_handle)
788 .base_pipeline_index(base_index)
789 };
790
791 let mut pipeline = vk::Pipeline::null();
792
793 match self.shared.raw.fp_v1_0().create_graphics_pipelines(
794 self.shared.raw.handle(),
795 cache.map_or(vk::PipelineCache::null(), |cache| cache.raw),
796 1,
797 &*info,
798 ptr::null(),
799 &mut pipeline,
800 ) {
801 vk::Result::SUCCESS => Ok(n::GraphicsPipeline(pipeline)),
802 vk::Result::ERROR_OUT_OF_HOST_MEMORY => Err(d::OutOfMemory::Host.into()),
803 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => Err(d::OutOfMemory::Device.into()),
804 _ => Err(pso::CreationError::Other),
805 }
806 }
807
808 unsafe fn create_compute_pipeline<'a>(
809 &self,
810 desc: &pso::ComputePipelineDesc<'a, B>,
811 cache: Option<&n::PipelineCache>,
812 ) -> Result<n::ComputePipeline, pso::CreationError> {
813 debug!("create_graphics_pipeline {:?}", desc);
814 let buf = ComputePipelineInfoBuf::new(desc);
815
816 let info = {
817 let stage = vk::PipelineShaderStageCreateInfo::builder()
818 .flags(vk::PipelineShaderStageCreateFlags::empty())
819 .stage(vk::ShaderStageFlags::COMPUTE)
820 .module(desc.shader.module.raw)
821 .name(buf.c_string.as_c_str())
822 .specialization_info(&buf.specialization);
823
824 let (base_handle, base_index) = match desc.parent {
825 pso::BasePipeline::Pipeline(pipeline) => (pipeline.0, -1),
826 pso::BasePipeline::Index(index) => (vk::Pipeline::null(), index as _),
827 pso::BasePipeline::None => (vk::Pipeline::null(), -1),
828 };
829
830 let mut flags = vk::PipelineCreateFlags::empty();
831 match desc.parent {
832 pso::BasePipeline::None => (),
833 _ => {
834 flags |= vk::PipelineCreateFlags::DERIVATIVE;
835 }
836 }
837 if desc
838 .flags
839 .contains(pso::PipelineCreationFlags::DISABLE_OPTIMIZATION)
840 {
841 flags |= vk::PipelineCreateFlags::DISABLE_OPTIMIZATION;
842 }
843 if desc
844 .flags
845 .contains(pso::PipelineCreationFlags::ALLOW_DERIVATIVES)
846 {
847 flags |= vk::PipelineCreateFlags::ALLOW_DERIVATIVES;
848 }
849
850 vk::ComputePipelineCreateInfo::builder()
851 .flags(flags)
852 .stage(*stage)
853 .layout(desc.layout.raw)
854 .base_pipeline_handle(base_handle)
855 .base_pipeline_index(base_index)
856 .build()
857 };
858
859 let mut pipeline = vk::Pipeline::null();
860
861 match self.shared.raw.fp_v1_0().create_compute_pipelines(
862 self.shared.raw.handle(),
863 cache.map_or(vk::PipelineCache::null(), |cache| cache.raw),
864 1,
865 &info,
866 ptr::null(),
867 &mut pipeline,
868 ) {
869 vk::Result::SUCCESS => {
870 if let Some(name) = desc.label {
871 self.shared
872 .set_object_name(vk::ObjectType::PIPELINE, pipeline, name);
873 }
874 Ok(n::ComputePipeline(pipeline))
875 }
876 vk::Result::ERROR_OUT_OF_HOST_MEMORY => Err(d::OutOfMemory::Host.into()),
877 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => Err(d::OutOfMemory::Device.into()),
878 _ => Err(pso::CreationError::Other),
879 }
880 }
881
882 unsafe fn create_framebuffer<T>(
883 &self,
884 renderpass: &n::RenderPass,
885 attachments: T,
886 extent: image::Extent,
887 ) -> Result<n::Framebuffer, d::OutOfMemory>
888 where
889 T: Iterator<Item = image::FramebufferAttachment>,
890 {
891 if !self.shared.imageless_framebuffers {
892 return Ok(n::Framebuffer::Legacy {
893 name: String::new(),
894 map: Default::default(),
895 extent,
896 });
897 }
898
899 let mut view_formats =
901 SmallVec::<[vk::Format; 5]>::with_capacity(renderpass.attachment_count);
902 let attachment_infos = attachments
903 .map(|fat| {
904 let mut info = vk::FramebufferAttachmentImageInfo::builder()
905 .usage(conv::map_image_usage(fat.usage))
906 .flags(conv::map_view_capabilities(fat.view_caps))
907 .width(extent.width)
908 .height(extent.height)
909 .layer_count(extent.depth)
910 .build();
911 info.view_format_count = 1;
912 info.p_view_formats = view_formats.as_ptr().add(view_formats.len());
913 view_formats.push(conv::map_format(fat.format));
914 info
915 })
916 .collect::<SmallVec<[_; 5]>>();
917
918 let mut attachments_info = vk::FramebufferAttachmentsCreateInfo::builder()
919 .attachment_image_infos(&attachment_infos)
920 .build();
921
922 let mut info = vk::FramebufferCreateInfo::builder()
923 .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
924 .render_pass(renderpass.raw)
925 .width(extent.width)
926 .height(extent.height)
927 .layers(extent.depth)
928 .push_next(&mut attachments_info);
929 info.attachment_count = renderpass.attachment_count as u32;
930 info.p_attachments = ptr::null();
931
932 let result = self.shared.raw.create_framebuffer(&info, None);
933
934 match result {
935 Ok(raw) => Ok(n::Framebuffer::ImageLess(raw)),
936 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
937 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
938 _ => unreachable!(),
939 }
940 }
941
942 unsafe fn create_shader_module(
943 &self,
944 spirv_data: &[u32],
945 ) -> Result<n::ShaderModule, d::ShaderError> {
946 let info = vk::ShaderModuleCreateInfo::builder()
947 .flags(vk::ShaderModuleCreateFlags::empty())
948 .code(spirv_data);
949
950 let module = self.shared.raw.create_shader_module(&info, None);
951
952 match module {
953 Ok(raw) => Ok(n::ShaderModule { raw }),
954 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
955 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
956 Err(_) => {
957 Err(d::ShaderError::CompilationFailed(String::new())) }
959 }
960 }
961
962 #[cfg(feature = "naga")]
963 unsafe fn create_shader_module_from_naga(
964 &self,
965 shader: d::NagaShader,
966 ) -> Result<n::ShaderModule, (d::ShaderError, d::NagaShader)> {
967 match naga::back::spv::write_vec(&shader.module, &shader.info, &self.naga_options) {
968 Ok(spv) => self.create_shader_module(&spv).map_err(|e| (e, shader)),
969 Err(e) => return Err((d::ShaderError::CompilationFailed(format!("{}", e)), shader)),
970 }
971 }
972
973 unsafe fn create_sampler(
974 &self,
975 desc: &image::SamplerDesc,
976 ) -> Result<n::Sampler, d::AllocationError> {
977 use hal::pso::Comparison;
978
979 let (anisotropy_enable, max_anisotropy) =
980 desc.anisotropy_clamp.map_or((false, 1.0), |aniso| {
981 if self.shared.features.contains(Features::SAMPLER_ANISOTROPY) {
982 (true, aniso as f32)
983 } else {
984 warn!(
985 "Anisotropy({}) was requested on a device with disabled feature",
986 aniso
987 );
988 (false, 1.0)
989 }
990 });
991
992 let mut reduction_info;
993 let mut info = vk::SamplerCreateInfo::builder()
994 .flags(vk::SamplerCreateFlags::empty())
995 .mag_filter(conv::map_filter(desc.mag_filter))
996 .min_filter(conv::map_filter(desc.min_filter))
997 .mipmap_mode(conv::map_mip_filter(desc.mip_filter))
998 .address_mode_u(conv::map_wrap(desc.wrap_mode.0))
999 .address_mode_v(conv::map_wrap(desc.wrap_mode.1))
1000 .address_mode_w(conv::map_wrap(desc.wrap_mode.2))
1001 .mip_lod_bias(desc.lod_bias.0)
1002 .anisotropy_enable(anisotropy_enable)
1003 .max_anisotropy(max_anisotropy)
1004 .compare_enable(desc.comparison.is_some())
1005 .compare_op(conv::map_comparison(
1006 desc.comparison.unwrap_or(Comparison::Never),
1007 ))
1008 .min_lod(desc.lod_range.start.0)
1009 .max_lod(desc.lod_range.end.0)
1010 .border_color(conv::map_border_color(desc.border))
1011 .unnormalized_coordinates(!desc.normalized);
1012
1013 if self.shared.features.contains(Features::SAMPLER_REDUCTION) {
1014 reduction_info = vk::SamplerReductionModeCreateInfo::builder()
1015 .reduction_mode(conv::map_reduction(desc.reduction_mode))
1016 .build();
1017 info = info.push_next(&mut reduction_info);
1018 }
1019
1020 let result = self.shared.raw.create_sampler(&info, None);
1021
1022 match result {
1023 Ok(sampler) => Ok(n::Sampler(sampler)),
1024 Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => Err(d::AllocationError::TooManyObjects),
1025 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1026 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1027 _ => unreachable!(),
1028 }
1029 }
1030
1031 unsafe fn create_buffer(
1033 &self,
1034 size: u64,
1035 usage: buffer::Usage,
1036 sparse: memory::SparseFlags,
1037 ) -> Result<n::Buffer, buffer::CreationError> {
1038 let info = vk::BufferCreateInfo::builder()
1039 .flags(conv::map_buffer_create_flags(sparse))
1040 .size(size)
1041 .usage(conv::map_buffer_usage(usage))
1042 .sharing_mode(vk::SharingMode::EXCLUSIVE); let result = self.shared.raw.create_buffer(&info, None);
1045
1046 match result {
1047 Ok(raw) => Ok(n::Buffer { raw }),
1048 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1049 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1050 _ => unreachable!(),
1051 }
1052 }
1053
1054 unsafe fn get_buffer_requirements(&self, buffer: &n::Buffer) -> Requirements {
1055 let req = self.shared.raw.get_buffer_memory_requirements(buffer.raw);
1056
1057 Requirements {
1058 size: req.size,
1059 alignment: req.alignment,
1060 type_mask: self.filter_memory_requirements(req.memory_type_bits),
1061 }
1062 }
1063
1064 unsafe fn bind_buffer_memory(
1065 &self,
1066 memory: &n::Memory,
1067 offset: u64,
1068 buffer: &mut n::Buffer,
1069 ) -> Result<(), d::BindError> {
1070 let result = self
1071 .shared
1072 .raw
1073 .bind_buffer_memory(buffer.raw, memory.raw, offset);
1074
1075 match result {
1076 Ok(()) => Ok(()),
1077 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1078 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1079 _ => unreachable!(),
1080 }
1081 }
1082
1083 unsafe fn create_buffer_view(
1084 &self,
1085 buffer: &n::Buffer,
1086 format: Option<format::Format>,
1087 range: buffer::SubRange,
1088 ) -> Result<n::BufferView, buffer::ViewCreationError> {
1089 let info = vk::BufferViewCreateInfo::builder()
1090 .flags(vk::BufferViewCreateFlags::empty())
1091 .buffer(buffer.raw)
1092 .format(format.map_or(vk::Format::UNDEFINED, conv::map_format))
1093 .offset(range.offset)
1094 .range(range.size.unwrap_or(vk::WHOLE_SIZE));
1095
1096 let result = self.shared.raw.create_buffer_view(&info, None);
1097
1098 match result {
1099 Ok(raw) => Ok(n::BufferView { raw }),
1100 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1101 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1102 _ => unreachable!(),
1103 }
1104 }
1105
1106 unsafe fn create_image(
1107 &self,
1108 kind: image::Kind,
1109 mip_levels: image::Level,
1110 format: format::Format,
1111 tiling: image::Tiling,
1112 usage: image::Usage,
1113 sparse: memory::SparseFlags,
1114 view_caps: image::ViewCapabilities,
1115 ) -> Result<n::Image, image::CreationError> {
1116 let flags = conv::map_view_capabilities_sparse(sparse, view_caps);
1117 let extent = conv::map_extent(kind.extent());
1118 let array_layers = kind.num_layers();
1119 let samples = kind.num_samples();
1120 let image_type = match kind {
1121 image::Kind::D1(..) => vk::ImageType::TYPE_1D,
1122 image::Kind::D2(..) => vk::ImageType::TYPE_2D,
1123 image::Kind::D3(..) => vk::ImageType::TYPE_3D,
1124 };
1125
1126 let layout = match tiling {
1128 image::Tiling::Linear => vk::ImageLayout::PREINITIALIZED,
1129 image::Tiling::Optimal => vk::ImageLayout::UNDEFINED,
1130 };
1131
1132 let info = vk::ImageCreateInfo::builder()
1133 .flags(flags)
1134 .image_type(image_type)
1135 .format(conv::map_format(format))
1136 .extent(extent.clone())
1137 .mip_levels(mip_levels as u32)
1138 .array_layers(array_layers as u32)
1139 .samples(conv::map_sample_count_flags(samples))
1140 .tiling(conv::map_tiling(tiling))
1141 .usage(conv::map_image_usage(usage))
1142 .sharing_mode(vk::SharingMode::EXCLUSIVE) .initial_layout(layout);
1144
1145 let result = self.shared.raw.create_image(&info, None);
1146
1147 match result {
1148 Ok(raw) => Ok(n::Image {
1149 raw,
1150 ty: image_type,
1151 flags,
1152 extent,
1153 }),
1154 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1155 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1156 _ => unreachable!(),
1157 }
1158 }
1159
1160 unsafe fn get_image_requirements(&self, image: &n::Image) -> Requirements {
1161 let req = self.shared.raw.get_image_memory_requirements(image.raw);
1162
1163 Requirements {
1164 size: req.size,
1165 alignment: req.alignment,
1166 type_mask: self.filter_memory_requirements(req.memory_type_bits),
1167 }
1168 }
1169
1170 unsafe fn get_image_subresource_footprint(
1171 &self,
1172 image: &n::Image,
1173 subresource: image::Subresource,
1174 ) -> image::SubresourceFootprint {
1175 let sub = conv::map_subresource(&subresource);
1176 let layout = self.shared.raw.get_image_subresource_layout(image.raw, sub);
1177
1178 image::SubresourceFootprint {
1179 slice: layout.offset..layout.offset + layout.size,
1180 row_pitch: layout.row_pitch,
1181 array_pitch: layout.array_pitch,
1182 depth_pitch: layout.depth_pitch,
1183 }
1184 }
1185
1186 unsafe fn bind_image_memory(
1187 &self,
1188 memory: &n::Memory,
1189 offset: u64,
1190 image: &mut n::Image,
1191 ) -> Result<(), d::BindError> {
1192 let result = self
1195 .shared
1196 .raw
1197 .bind_image_memory(image.raw, memory.raw, offset);
1198
1199 match result {
1200 Ok(()) => Ok(()),
1201 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1202 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1203 _ => unreachable!(),
1204 }
1205 }
1206
1207 unsafe fn create_image_view(
1208 &self,
1209 image: &n::Image,
1210 kind: image::ViewKind,
1211 format: format::Format,
1212 swizzle: format::Swizzle,
1213 usage: image::Usage,
1214 range: image::SubresourceRange,
1215 ) -> Result<n::ImageView, image::ViewCreationError> {
1216 let is_cube = image
1217 .flags
1218 .intersects(vk::ImageCreateFlags::CUBE_COMPATIBLE);
1219 let view_type = match conv::map_view_kind(kind, image.ty, is_cube) {
1220 Some(ty) => ty,
1221 None => return Err(image::ViewCreationError::BadKind(kind)),
1222 };
1223
1224 self.image_view_from_raw(image.raw, view_type, format, swizzle, usage, range)
1225 }
1226
1227 unsafe fn create_descriptor_pool<T>(
1228 &self,
1229 max_sets: usize,
1230 descriptor_ranges: T,
1231 flags: pso::DescriptorPoolCreateFlags,
1232 ) -> Result<n::DescriptorPool, d::OutOfMemory>
1233 where
1234 T: Iterator<Item = pso::DescriptorRangeDesc>,
1235 {
1236 let pools_iter = descriptor_ranges.map(|pool| vk::DescriptorPoolSize {
1237 ty: conv::map_descriptor_type(pool.ty),
1238 descriptor_count: pool.count as u32,
1239 });
1240
1241 let result = inplace_or_alloc_from_iter(pools_iter, |pools| {
1242 let info = vk::DescriptorPoolCreateInfo::builder()
1243 .flags(conv::map_descriptor_pool_create_flags(flags))
1244 .max_sets(max_sets as u32)
1245 .pool_sizes(&pools);
1246
1247 self.shared.raw.create_descriptor_pool(&info, None)
1248 });
1249
1250 match result {
1251 Ok(pool) => Ok(n::DescriptorPool::new(pool, &self.shared)),
1252 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1253 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1254 _ => unreachable!(),
1255 }
1256 }
1257
1258 unsafe fn create_descriptor_set_layout<'a, I, J>(
1259 &self,
1260 binding_iter: I,
1261 immutable_samplers: J,
1262 ) -> Result<n::DescriptorSetLayout, d::OutOfMemory>
1263 where
1264 I: Iterator<Item = pso::DescriptorSetLayoutBinding>,
1265 J: Iterator<Item = &'a n::Sampler>,
1266 {
1267 let vk_immutable_samplers_iter = immutable_samplers.map(|is| is.0);
1268 let mut sampler_offset = 0;
1269
1270 let mut bindings = binding_iter.collect::<Vec<_>>();
1271 bindings.sort_by_key(|b| b.binding);
1273
1274 let result = inplace_or_alloc_from_iter(vk_immutable_samplers_iter, |immutable_samplers| {
1275 let raw_bindings_iter = bindings.iter().map(|b| vk::DescriptorSetLayoutBinding {
1276 binding: b.binding,
1277 descriptor_type: conv::map_descriptor_type(b.ty),
1278 descriptor_count: b.count as _,
1279 stage_flags: conv::map_stage_flags(b.stage_flags),
1280 p_immutable_samplers: if b.immutable_samplers {
1281 let slice = &immutable_samplers[sampler_offset..];
1282 sampler_offset += b.count;
1283 slice.as_ptr()
1284 } else {
1285 ptr::null()
1286 },
1287 });
1288
1289 inplace_or_alloc_from_iter(raw_bindings_iter, |raw_bindings| {
1290 let info = vk::DescriptorSetLayoutCreateInfo::builder()
1294 .flags(vk::DescriptorSetLayoutCreateFlags::empty())
1295 .bindings(&raw_bindings);
1296
1297 self.shared.raw.create_descriptor_set_layout(&info, None)
1298 })
1299 });
1300
1301 match result {
1302 Ok(layout) => Ok(n::DescriptorSetLayout {
1303 raw: layout,
1304 bindings: Arc::new(bindings),
1305 }),
1306 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1307 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1308 _ => unreachable!(),
1309 }
1310 }
1311
1312 unsafe fn write_descriptor_set<'a, I>(&self, op: pso::DescriptorSetWrite<'a, B, I>)
1313 where
1314 I: Iterator<Item = pso::Descriptor<'a, B>>,
1315 {
1316 let descriptors = op.descriptors;
1317 let mut raw_writes =
1318 Vec::<vk::WriteDescriptorSet>::with_capacity(descriptors.size_hint().0);
1319 let mut image_infos = Vec::new();
1320 let mut buffer_infos = Vec::new();
1321 let mut texel_buffer_views = Vec::new();
1322
1323 let mut last_type = vk::DescriptorType::SAMPLER;
1326 let mut last_stages = pso::ShaderStageFlags::empty();
1327
1328 let mut binding_pos = op
1329 .set
1330 .bindings
1331 .binary_search_by_key(&op.binding, |b| b.binding)
1332 .expect("Descriptor set writes don't match the set layout!");
1333 let mut array_offset = op.array_offset;
1334
1335 for descriptor in descriptors {
1336 let layout_binding = &op.set.bindings[binding_pos];
1337 array_offset += 1;
1338 if array_offset == layout_binding.count {
1339 array_offset = 0;
1340 binding_pos += 1;
1341 }
1342
1343 let descriptor_type = conv::map_descriptor_type(layout_binding.ty);
1344 if descriptor_type == last_type && layout_binding.stage_flags == last_stages {
1345 raw_writes.last_mut().unwrap().descriptor_count += 1;
1346 } else {
1347 last_type = descriptor_type;
1348 last_stages = layout_binding.stage_flags;
1349 raw_writes.push(vk::WriteDescriptorSet {
1350 s_type: vk::StructureType::WRITE_DESCRIPTOR_SET,
1351 p_next: ptr::null(),
1352 dst_set: op.set.raw,
1353 dst_binding: layout_binding.binding,
1354 dst_array_element: if layout_binding.binding == op.binding {
1355 op.array_offset as _
1356 } else {
1357 0
1358 },
1359 descriptor_count: 1,
1360 descriptor_type,
1361 p_image_info: image_infos.len() as _,
1362 p_buffer_info: buffer_infos.len() as _,
1363 p_texel_buffer_view: texel_buffer_views.len() as _,
1364 });
1365 }
1366
1367 match descriptor {
1368 pso::Descriptor::Sampler(sampler) => {
1369 image_infos.push(
1370 vk::DescriptorImageInfo::builder()
1371 .sampler(sampler.0)
1372 .image_view(vk::ImageView::null())
1373 .image_layout(vk::ImageLayout::GENERAL)
1374 .build(),
1375 );
1376 }
1377 pso::Descriptor::Image(view, layout) => {
1378 image_infos.push(
1379 vk::DescriptorImageInfo::builder()
1380 .sampler(vk::Sampler::null())
1381 .image_view(view.raw)
1382 .image_layout(conv::map_image_layout(layout))
1383 .build(),
1384 );
1385 }
1386 pso::Descriptor::CombinedImageSampler(view, layout, sampler) => {
1387 image_infos.push(
1388 vk::DescriptorImageInfo::builder()
1389 .sampler(sampler.0)
1390 .image_view(view.raw)
1391 .image_layout(conv::map_image_layout(layout))
1392 .build(),
1393 );
1394 }
1395 pso::Descriptor::Buffer(buffer, ref sub) => {
1396 buffer_infos.push(
1397 vk::DescriptorBufferInfo::builder()
1398 .buffer(buffer.raw)
1399 .offset(sub.offset)
1400 .range(sub.size.unwrap_or(vk::WHOLE_SIZE))
1401 .build(),
1402 );
1403 }
1404 pso::Descriptor::TexelBuffer(view) => {
1405 texel_buffer_views.push(view.raw);
1406 }
1407 }
1408 }
1409
1410 for raw in raw_writes.iter_mut() {
1412 use crate::vk::DescriptorType as Dt;
1413 match raw.descriptor_type {
1414 Dt::SAMPLER
1415 | Dt::SAMPLED_IMAGE
1416 | Dt::STORAGE_IMAGE
1417 | Dt::COMBINED_IMAGE_SAMPLER
1418 | Dt::INPUT_ATTACHMENT => {
1419 raw.p_buffer_info = ptr::null();
1420 raw.p_texel_buffer_view = ptr::null();
1421 raw.p_image_info = image_infos[raw.p_image_info as usize..].as_ptr();
1422 }
1423 Dt::UNIFORM_TEXEL_BUFFER | Dt::STORAGE_TEXEL_BUFFER => {
1424 raw.p_buffer_info = ptr::null();
1425 raw.p_image_info = ptr::null();
1426 raw.p_texel_buffer_view =
1427 texel_buffer_views[raw.p_texel_buffer_view as usize..].as_ptr();
1428 }
1429 Dt::UNIFORM_BUFFER
1430 | Dt::STORAGE_BUFFER
1431 | Dt::STORAGE_BUFFER_DYNAMIC
1432 | Dt::UNIFORM_BUFFER_DYNAMIC => {
1433 raw.p_image_info = ptr::null();
1434 raw.p_texel_buffer_view = ptr::null();
1435 raw.p_buffer_info = buffer_infos[raw.p_buffer_info as usize..].as_ptr();
1436 }
1437 _ => panic!("unknown descriptor type"),
1438 }
1439 }
1440
1441 self.shared.raw.update_descriptor_sets(&raw_writes, &[]);
1442 }
1443
1444 unsafe fn copy_descriptor_set<'a>(&self, op: pso::DescriptorSetCopy<'a, B>) {
1445 let copy = vk::CopyDescriptorSet::builder()
1446 .src_set(op.src_set.raw)
1447 .src_binding(op.src_binding as u32)
1448 .src_array_element(op.src_array_offset as u32)
1449 .dst_set(op.dst_set.raw)
1450 .dst_binding(op.dst_binding as u32)
1451 .dst_array_element(op.dst_array_offset as u32)
1452 .descriptor_count(op.count as u32)
1453 .build();
1454
1455 self.shared.raw.update_descriptor_sets(&[], &[copy]);
1456 }
1457
1458 unsafe fn map_memory(
1459 &self,
1460 memory: &mut n::Memory,
1461 segment: Segment,
1462 ) -> Result<*mut u8, d::MapError> {
1463 let result = self.shared.raw.map_memory(
1464 memory.raw,
1465 segment.offset,
1466 segment.size.unwrap_or(vk::WHOLE_SIZE),
1467 vk::MemoryMapFlags::empty(),
1468 );
1469
1470 match result {
1471 Ok(ptr) => Ok(ptr as *mut _),
1472 Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(d::MapError::MappingFailed),
1473 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1474 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1475 _ => unreachable!(),
1476 }
1477 }
1478
1479 unsafe fn unmap_memory(&self, memory: &mut n::Memory) {
1480 self.shared.raw.unmap_memory(memory.raw)
1481 }
1482
1483 unsafe fn flush_mapped_memory_ranges<'a, I>(&self, ranges: I) -> Result<(), d::OutOfMemory>
1484 where
1485 I: Iterator<Item = (&'a n::Memory, Segment)>,
1486 {
1487 let vk_ranges_iter = ranges.map(conv::map_memory_range);
1488 let result = inplace_or_alloc_from_iter(vk_ranges_iter, |ranges| {
1489 self.shared.raw.flush_mapped_memory_ranges(&ranges)
1490 });
1491
1492 match result {
1493 Ok(()) => Ok(()),
1494 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
1495 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
1496 _ => unreachable!(),
1497 }
1498 }
1499
1500 unsafe fn invalidate_mapped_memory_ranges<'a, I>(&self, ranges: I) -> Result<(), d::OutOfMemory>
1501 where
1502 I: Iterator<Item = (&'a n::Memory, Segment)>,
1503 {
1504 let vk_ranges_iter = ranges.map(conv::map_memory_range);
1505 let result = inplace_or_alloc_from_iter(vk_ranges_iter, |ranges| {
1506 self.shared.raw.invalidate_mapped_memory_ranges(&ranges)
1507 });
1508
1509 match result {
1510 Ok(()) => Ok(()),
1511 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
1512 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
1513 _ => unreachable!(),
1514 }
1515 }
1516
1517 fn create_semaphore(&self) -> Result<n::Semaphore, d::OutOfMemory> {
1518 let info = vk::SemaphoreCreateInfo::builder().flags(vk::SemaphoreCreateFlags::empty());
1519
1520 let result = unsafe { self.shared.raw.create_semaphore(&info, None) };
1521
1522 match result {
1523 Ok(semaphore) => Ok(n::Semaphore(semaphore)),
1524 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1525 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1526 _ => unreachable!(),
1527 }
1528 }
1529
1530 fn create_fence(&self, signaled: bool) -> Result<n::Fence, d::OutOfMemory> {
1531 let info = vk::FenceCreateInfo::builder().flags(if signaled {
1532 vk::FenceCreateFlags::SIGNALED
1533 } else {
1534 vk::FenceCreateFlags::empty()
1535 });
1536
1537 let result = unsafe { self.shared.raw.create_fence(&info, None) };
1538
1539 match result {
1540 Ok(fence) => Ok(n::Fence(fence)),
1541 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1542 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1543 _ => unreachable!(),
1544 }
1545 }
1546
1547 unsafe fn reset_fence(&self, fence: &mut n::Fence) -> Result<(), d::OutOfMemory> {
1548 match self.shared.raw.reset_fences(&[fence.0]) {
1549 Ok(()) => Ok(()),
1550 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1551 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1552 _ => unreachable!(),
1553 }
1554 }
1555
1556 unsafe fn wait_for_fences<'a, I>(
1557 &self,
1558 fences_iter: I,
1559 wait: d::WaitFor,
1560 timeout_ns: u64,
1561 ) -> Result<bool, d::WaitError>
1562 where
1563 I: Iterator<Item = &'a n::Fence>,
1564 {
1565 let vk_fences_iter = fences_iter.map(|fence| fence.0);
1566
1567 let all = match wait {
1568 d::WaitFor::Any => false,
1569 d::WaitFor::All => true,
1570 };
1571
1572 let result = inplace_or_alloc_from_iter(vk_fences_iter, |fences| {
1573 self.shared.raw.wait_for_fences(fences, all, timeout_ns)
1574 });
1575
1576 match result {
1577 Ok(()) => Ok(true),
1578 Err(vk::Result::TIMEOUT) => Ok(false),
1579 Err(vk::Result::ERROR_DEVICE_LOST) => Err(d::DeviceLost.into()),
1580 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1581 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1582 _ => unreachable!(),
1583 }
1584 }
1585
1586 unsafe fn get_fence_status(&self, fence: &n::Fence) -> Result<bool, d::DeviceLost> {
1587 let result = self.shared.raw.get_fence_status(fence.0);
1588 match result {
1589 Ok(ok) => Ok(ok),
1590 Err(vk::Result::NOT_READY) => Ok(false), Err(vk::Result::ERROR_DEVICE_LOST) => Err(d::DeviceLost),
1592 _ => unreachable!(),
1593 }
1594 }
1595
1596 fn create_event(&self) -> Result<n::Event, d::OutOfMemory> {
1597 let info = vk::EventCreateInfo::builder().flags(vk::EventCreateFlags::empty());
1598
1599 let result = unsafe { self.shared.raw.create_event(&info, None) };
1600 match result {
1601 Ok(e) => Ok(n::Event(e)),
1602 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1603 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1604 _ => unreachable!(),
1605 }
1606 }
1607
1608 unsafe fn get_event_status(&self, event: &n::Event) -> Result<bool, d::WaitError> {
1609 let result = self.shared.raw.get_event_status(event.0);
1610 match result {
1611 Ok(b) => Ok(b),
1612 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1613 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1614 Err(vk::Result::ERROR_DEVICE_LOST) => Err(d::DeviceLost.into()),
1615 _ => unreachable!(),
1616 }
1617 }
1618
1619 unsafe fn set_event(&self, event: &mut n::Event) -> Result<(), d::OutOfMemory> {
1620 let result = self.shared.raw.set_event(event.0);
1621 match result {
1622 Ok(()) => Ok(()),
1623 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1624 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1625 _ => unreachable!(),
1626 }
1627 }
1628
1629 unsafe fn reset_event(&self, event: &mut n::Event) -> Result<(), d::OutOfMemory> {
1630 let result = self.shared.raw.reset_event(event.0);
1631 match result {
1632 Ok(()) => Ok(()),
1633 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1634 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1635 _ => unreachable!(),
1636 }
1637 }
1638
1639 unsafe fn free_memory(&self, memory: n::Memory) {
1640 self.shared.raw.free_memory(memory.raw, None);
1641 }
1642
1643 unsafe fn create_query_pool(
1644 &self,
1645 ty: query::Type,
1646 query_count: query::Id,
1647 ) -> Result<n::QueryPool, query::CreationError> {
1648 let (query_type, pipeline_statistics) = match ty {
1649 query::Type::Occlusion => (
1650 vk::QueryType::OCCLUSION,
1651 vk::QueryPipelineStatisticFlags::empty(),
1652 ),
1653 query::Type::PipelineStatistics(statistics) => (
1654 vk::QueryType::PIPELINE_STATISTICS,
1655 conv::map_pipeline_statistics(statistics),
1656 ),
1657 query::Type::Timestamp => (
1658 vk::QueryType::TIMESTAMP,
1659 vk::QueryPipelineStatisticFlags::empty(),
1660 ),
1661 };
1662
1663 let info = vk::QueryPoolCreateInfo::builder()
1664 .flags(vk::QueryPoolCreateFlags::empty())
1665 .query_type(query_type)
1666 .query_count(query_count)
1667 .pipeline_statistics(pipeline_statistics);
1668
1669 let result = self.shared.raw.create_query_pool(&info, None);
1670
1671 match result {
1672 Ok(pool) => Ok(n::QueryPool(pool)),
1673 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
1674 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
1675 _ => unreachable!(),
1676 }
1677 }
1678
1679 unsafe fn get_query_pool_results(
1680 &self,
1681 pool: &n::QueryPool,
1682 queries: Range<query::Id>,
1683 data: &mut [u8],
1684 stride: buffer::Stride,
1685 flags: query::ResultFlags,
1686 ) -> Result<bool, d::WaitError> {
1687 let result = self.shared.raw.fp_v1_0().get_query_pool_results(
1688 self.shared.raw.handle(),
1689 pool.0,
1690 queries.start,
1691 queries.end - queries.start,
1692 data.len(),
1693 data.as_mut_ptr() as *mut _,
1694 stride as vk::DeviceSize,
1695 conv::map_query_result_flags(flags),
1696 );
1697
1698 match result {
1699 vk::Result::SUCCESS => Ok(true),
1700 vk::Result::NOT_READY => Ok(false),
1701 vk::Result::ERROR_DEVICE_LOST => Err(d::DeviceLost.into()),
1702 vk::Result::ERROR_OUT_OF_HOST_MEMORY => Err(d::OutOfMemory::Host.into()),
1703 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => Err(d::OutOfMemory::Device.into()),
1704 _ => unreachable!(),
1705 }
1706 }
1707
1708 unsafe fn destroy_query_pool(&self, pool: n::QueryPool) {
1709 self.shared.raw.destroy_query_pool(pool.0, None);
1710 }
1711
1712 unsafe fn destroy_shader_module(&self, module: n::ShaderModule) {
1713 self.shared.raw.destroy_shader_module(module.raw, None);
1714 }
1715
1716 unsafe fn destroy_render_pass(&self, rp: n::RenderPass) {
1717 self.shared.raw.destroy_render_pass(rp.raw, None);
1718 }
1719
1720 unsafe fn destroy_pipeline_layout(&self, pl: n::PipelineLayout) {
1721 self.shared.raw.destroy_pipeline_layout(pl.raw, None);
1722 }
1723
1724 unsafe fn destroy_graphics_pipeline(&self, pipeline: n::GraphicsPipeline) {
1725 self.shared.raw.destroy_pipeline(pipeline.0, None);
1726 }
1727
1728 unsafe fn destroy_compute_pipeline(&self, pipeline: n::ComputePipeline) {
1729 self.shared.raw.destroy_pipeline(pipeline.0, None);
1730 }
1731
1732 unsafe fn destroy_framebuffer(&self, fb: n::Framebuffer) {
1733 match fb {
1734 n::Framebuffer::ImageLess(raw) => {
1735 self.shared.raw.destroy_framebuffer(raw, None);
1736 }
1737 n::Framebuffer::Legacy { map, .. } => {
1738 for (_, raw) in map.into_inner() {
1739 self.shared.raw.destroy_framebuffer(raw, None);
1740 }
1741 }
1742 }
1743 }
1744
1745 unsafe fn destroy_buffer(&self, buffer: n::Buffer) {
1746 self.shared.raw.destroy_buffer(buffer.raw, None);
1747 }
1748
1749 unsafe fn destroy_buffer_view(&self, view: n::BufferView) {
1750 self.shared.raw.destroy_buffer_view(view.raw, None);
1751 }
1752
1753 unsafe fn destroy_image(&self, image: n::Image) {
1754 self.shared.raw.destroy_image(image.raw, None);
1755 }
1756
1757 unsafe fn destroy_image_view(&self, view: n::ImageView) {
1758 self.shared.raw.destroy_image_view(view.raw, None);
1759 }
1760
1761 unsafe fn destroy_sampler(&self, sampler: n::Sampler) {
1762 self.shared.raw.destroy_sampler(sampler.0, None);
1763 }
1764
1765 unsafe fn destroy_descriptor_pool(&self, pool: n::DescriptorPool) {
1766 self.shared.raw.destroy_descriptor_pool(pool.finish(), None);
1767 }
1768
1769 unsafe fn destroy_descriptor_set_layout(&self, layout: n::DescriptorSetLayout) {
1770 self.shared
1771 .raw
1772 .destroy_descriptor_set_layout(layout.raw, None);
1773 }
1774
1775 unsafe fn destroy_fence(&self, fence: n::Fence) {
1776 self.shared.raw.destroy_fence(fence.0, None);
1777 }
1778
1779 unsafe fn destroy_semaphore(&self, semaphore: n::Semaphore) {
1780 self.shared.raw.destroy_semaphore(semaphore.0, None);
1781 }
1782
1783 unsafe fn destroy_event(&self, event: n::Event) {
1784 self.shared.raw.destroy_event(event.0, None);
1785 }
1786
1787 fn wait_idle(&self) -> Result<(), d::OutOfMemory> {
1788 match unsafe { self.shared.raw.device_wait_idle() } {
1789 Ok(()) => Ok(()),
1790 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host),
1791 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device),
1792 _ => unreachable!(),
1793 }
1794 }
1795
1796 unsafe fn set_image_name(&self, image: &mut n::Image, name: &str) {
1797 self.shared
1798 .set_object_name(vk::ObjectType::IMAGE, image.raw, name)
1799 }
1800
1801 unsafe fn set_buffer_name(&self, buffer: &mut n::Buffer, name: &str) {
1802 self.shared
1803 .set_object_name(vk::ObjectType::BUFFER, buffer.raw, name)
1804 }
1805
1806 unsafe fn set_command_buffer_name(&self, command_buffer: &mut cmd::CommandBuffer, name: &str) {
1807 self.shared
1808 .set_object_name(vk::ObjectType::COMMAND_BUFFER, command_buffer.raw, name)
1809 }
1810
1811 unsafe fn set_semaphore_name(&self, semaphore: &mut n::Semaphore, name: &str) {
1812 self.shared
1813 .set_object_name(vk::ObjectType::SEMAPHORE, semaphore.0, name)
1814 }
1815
1816 unsafe fn set_fence_name(&self, fence: &mut n::Fence, name: &str) {
1817 self.shared
1818 .set_object_name(vk::ObjectType::FENCE, fence.0, name)
1819 }
1820
1821 unsafe fn set_framebuffer_name(&self, framebuffer: &mut n::Framebuffer, name: &str) {
1822 match *framebuffer {
1823 n::Framebuffer::ImageLess(raw) => {
1824 self.shared
1825 .set_object_name(vk::ObjectType::FRAMEBUFFER, raw, name);
1826 }
1827 n::Framebuffer::Legacy {
1828 name: ref mut old_name,
1829 ref mut map,
1830 extent: _,
1831 } => {
1832 old_name.clear();
1833 old_name.push_str(name);
1834 for &raw in map.get_mut().values() {
1835 self.shared
1836 .set_object_name(vk::ObjectType::FRAMEBUFFER, raw, name);
1837 }
1838 }
1839 }
1840 }
1841
1842 unsafe fn set_render_pass_name(&self, render_pass: &mut n::RenderPass, name: &str) {
1843 self.shared
1844 .set_object_name(vk::ObjectType::RENDER_PASS, render_pass.raw, name)
1845 }
1846
1847 unsafe fn set_descriptor_set_name(&self, descriptor_set: &mut n::DescriptorSet, name: &str) {
1848 self.shared
1849 .set_object_name(vk::ObjectType::DESCRIPTOR_SET, descriptor_set.raw, name)
1850 }
1851
1852 unsafe fn set_descriptor_set_layout_name(
1853 &self,
1854 descriptor_set_layout: &mut n::DescriptorSetLayout,
1855 name: &str,
1856 ) {
1857 self.shared.set_object_name(
1858 vk::ObjectType::DESCRIPTOR_SET_LAYOUT,
1859 descriptor_set_layout.raw,
1860 name,
1861 )
1862 }
1863
1864 unsafe fn set_pipeline_layout_name(&self, pipeline_layout: &mut n::PipelineLayout, name: &str) {
1865 self.shared
1866 .set_object_name(vk::ObjectType::PIPELINE_LAYOUT, pipeline_layout.raw, name)
1867 }
1868
1869 unsafe fn set_display_power_state(
1870 &self,
1871 display: &hal::display::Display<B>,
1872 power_state: &hal::display::control::PowerState,
1873 ) -> Result<(), hal::display::control::DisplayControlError> {
1874 let display_control_extension = match self.shared.extension_fns.display_control {
1875 Some(ref display_control_extension) => display_control_extension,
1876 _ => return Err(hal::display::control::DisplayControlError::UnsupportedFeature),
1877 };
1878
1879 let vk_power_state = match power_state {
1880 hal::display::control::PowerState::Off => vk::DisplayPowerStateEXT::OFF,
1881 hal::display::control::PowerState::Suspend => vk::DisplayPowerStateEXT::SUSPEND,
1882 hal::display::control::PowerState::On => vk::DisplayPowerStateEXT::ON,
1883 };
1884
1885 let vk_power_info = vk::DisplayPowerInfoEXT::builder()
1886 .power_state(vk_power_state)
1887 .build();
1888
1889 match display_control_extension.display_power_control_ext(
1890 self.shared.raw.handle(),
1891 display.handle.0,
1892 &vk_power_info,
1893 ) {
1894 vk::Result::SUCCESS => Ok(()),
1895 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
1896 Err(hal::display::control::DisplayControlError::OutOfHostMemory)
1897 }
1898 _ => unreachable!(),
1899 }
1900 }
1901
1902 unsafe fn register_device_event(
1903 &self,
1904 device_event: &hal::display::control::DeviceEvent,
1905 fence: &mut <B as hal::Backend>::Fence,
1906 ) -> Result<(), hal::display::control::DisplayControlError> {
1907 let display_control_extension = match self.shared.extension_fns.display_control {
1908 Some(ref display_control_extension) => display_control_extension,
1909 _ => return Err(hal::display::control::DisplayControlError::UnsupportedFeature),
1910 };
1911
1912 let vk_device_event = match device_event {
1913 hal::display::control::DeviceEvent::DisplayHotplug => {
1914 vk::DeviceEventTypeEXT::DISPLAY_HOTPLUG
1915 }
1916 };
1917
1918 let vk_device_event_info = vk::DeviceEventInfoEXT::builder()
1919 .device_event(vk_device_event)
1920 .build();
1921
1922 match display_control_extension.register_device_event_ext(
1923 self.shared.raw.handle(),
1924 &vk_device_event_info,
1925 std::ptr::null(),
1926 &mut fence.0,
1927 ) {
1928 vk::Result::SUCCESS => Ok(()),
1929 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
1930 Err(hal::display::control::DisplayControlError::OutOfHostMemory)
1931 }
1932 vk::Result::ERROR_FEATURE_NOT_PRESENT => {
1933 Err(hal::display::control::DisplayControlError::UnsupportedFeature)
1934 } err => {
1936 error!("Unexpected error: {:#?}", err);
1937 Err(hal::display::control::DisplayControlError::UnsupportedFeature)
1938 }
1939 }
1940 }
1941
1942 unsafe fn register_display_event(
1943 &self,
1944 display: &hal::display::Display<B>,
1945 display_event: &hal::display::control::DisplayEvent,
1946 fence: &mut <B as hal::Backend>::Fence,
1947 ) -> Result<(), hal::display::control::DisplayControlError> {
1948 let display_control_extension = match self.shared.extension_fns.display_control {
1949 Some(ref display_control_extension) => display_control_extension,
1950 _ => return Err(hal::display::control::DisplayControlError::UnsupportedFeature),
1951 };
1952
1953 let vk_display_event = match display_event {
1954 hal::display::control::DisplayEvent::FirstPixelOut => {
1955 vk::DisplayEventTypeEXT::FIRST_PIXEL_OUT
1956 }
1957 };
1958
1959 let vk_display_event_info = vk::DisplayEventInfoEXT::builder()
1960 .display_event(vk_display_event)
1961 .build();
1962
1963 match display_control_extension.register_display_event_ext(
1964 self.shared.raw.handle(),
1965 display.handle.0,
1966 &vk_display_event_info,
1967 std::ptr::null(),
1968 &mut fence.0,
1969 ) {
1970 vk::Result::SUCCESS => Ok(()),
1971 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
1972 Err(hal::display::control::DisplayControlError::OutOfHostMemory)
1973 }
1974 _ => unreachable!(),
1975 }
1976 }
1977
1978 unsafe fn create_allocate_external_buffer(
1979 &self,
1980 external_memory_type: hal::external_memory::ExternalBufferMemoryType,
1981 usage: hal::buffer::Usage,
1982 sparse: hal::memory::SparseFlags,
1983 type_mask: u32,
1984 size: u64,
1985 ) -> Result<(n::Buffer, n::Memory), hal::external_memory::ExternalResourceError>
1986 {
1987 if self.shared.extension_fns.external_memory.is_none() {
1988 panic!(
1989 "This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled"
1990 );
1991 }
1992
1993 let external_memory_type_flags: hal::external_memory::ExternalMemoryTypeFlags =
1994 external_memory_type.into();
1995 let vk_external_memory_type =
1996 vk::ExternalMemoryHandleTypeFlags::from_raw(external_memory_type_flags.bits());
1997
1998 let mut external_buffer_ci = vk::ExternalMemoryBufferCreateInfo::builder()
1999 .handle_types(vk_external_memory_type)
2000 .build();
2001
2002 let info = vk::BufferCreateInfo::builder()
2003 .push_next(&mut external_buffer_ci)
2004 .flags(conv::map_buffer_create_flags(sparse))
2005 .size(size)
2006 .usage(conv::map_buffer_usage(usage))
2007 .sharing_mode(vk::SharingMode::EXCLUSIVE); let mut buffer = match self.shared.raw.create_buffer(&info, None) {
2010 Ok(raw) => n::Buffer { raw },
2011 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => return Err(d::OutOfMemory::Host.into()),
2012 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
2013 return Err(d::OutOfMemory::Device.into())
2014 }
2015 Err(vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR) => {
2016 return Err(
2017 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2018 )
2019 }
2020 _ => unreachable!(),
2021 };
2022
2023 let buffer_req = self.get_buffer_requirements(&buffer);
2024
2025 let mem_type = match (0..32)
2026 .into_iter()
2027 .find(|id| buffer_req.type_mask & type_mask & (1 << id) != 0)
2028 {
2029 Some(id) => id.into(),
2030 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2031 };
2032
2033 let mut export_memori_ai = vk::ExportMemoryAllocateInfo::builder()
2034 .handle_types(vk_external_memory_type)
2035 .build();
2036
2037 let mut dedicated_allocation_info =
2038 if self.shared.extension_fns.dedicated_allocation.is_some() {
2039 let dedicated_allocation_info = vk::MemoryDedicatedAllocateInfo::builder()
2040 .buffer(buffer.raw)
2041 .build();
2042 Some(dedicated_allocation_info)
2043 } else {
2044 None
2045 };
2046
2047 let allocate_info = if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info
2048 {
2049 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2050 } else {
2051 vk::MemoryAllocateInfo::builder()
2052 }
2053 .push_next(&mut export_memori_ai)
2054 .allocation_size(buffer_req.size)
2055 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2056
2057 let result = self.shared.raw.allocate_memory(&allocate_info, None);
2058
2059 if result.is_err() {
2060 self.destroy_buffer(buffer);
2061 }
2062
2063 let memory = match result {
2064 Ok(memory) => n::Memory { raw: memory },
2065 Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => {
2066 return Err(hal::external_memory::ExternalResourceError::TooManyObjects)
2067 }
2068 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => return Err(d::OutOfMemory::Host.into()),
2069 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
2070 return Err(d::OutOfMemory::Device.into())
2071 }
2072 _ => unreachable!(),
2073 };
2074
2075 if let Err(err) = self.bind_buffer_memory(&memory, 0, &mut buffer) {
2076 error!("Failed to `bind_buffer_memory`: {:#?}", err);
2077 return Err(match err {
2078 d::BindError::OutOfMemory(out_of_memory) => out_of_memory.into(),
2079 d::BindError::WrongMemory => {
2080 panic!("This error should never happen and it is likely a vulkan backend bug.")
2081 }
2082 d::BindError::OutOfBounds => {
2083 panic!("Since external memory use a dedicated allocation, this should never happen and it is likely a vulkan backend bug.")
2084 }
2085 });
2086 }
2087
2088 Ok((buffer, memory))
2089 }
2090
2091 unsafe fn import_external_buffer(
2092 &self,
2093 external_memory: hal::external_memory::ExternalBufferMemory,
2094 usage: hal::buffer::Usage,
2095 sparse: hal::memory::SparseFlags,
2096 type_mask: u32,
2097 size: u64,
2098 ) -> Result<(n::Buffer, n::Memory), hal::external_memory::ExternalResourceError> {
2099 if self.shared.extension_fns.external_memory.is_none() {
2100 panic!(
2101 "This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled"
2102 );
2103 }
2104
2105 let external_memory_type = external_memory.external_memory_type();
2106 let external_memory_type_flags: hal::external_memory::ExternalMemoryTypeFlags =
2107 external_memory_type.into();
2108 let vk_external_memory_type =
2109 vk::ExternalMemoryHandleTypeFlags::from_raw(external_memory_type_flags.bits());
2110
2111 let mut external_buffer_ci = vk::ExternalMemoryBufferCreateInfo::builder()
2112 .handle_types(vk_external_memory_type)
2113 .build();
2114
2115 let info = vk::BufferCreateInfo::builder()
2116 .push_next(&mut external_buffer_ci)
2117 .flags(conv::map_buffer_create_flags(sparse))
2118 .size(size)
2119 .usage(conv::map_buffer_usage(usage))
2120 .sharing_mode(vk::SharingMode::EXCLUSIVE); let mut buffer = match self.shared.raw.create_buffer(&info, None) {
2123 Ok(raw) => n::Buffer { raw },
2124 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => return Err(d::OutOfMemory::Host.into()),
2125 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
2126 return Err(d::OutOfMemory::Device.into())
2127 }
2128 Err(vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR) => {
2129 return Err(hal::external_memory::ExternalResourceError::InvalidExternalHandle)
2130 }
2131 _ => unreachable!(),
2132 };
2133
2134 let buffer_req = self.get_buffer_requirements(&buffer);
2135
2136 let mut dedicated_allocation_info =
2137 if self.shared.extension_fns.dedicated_allocation.is_some() {
2138 let dedicated_allocation_info = vk::MemoryDedicatedAllocateInfo::builder()
2139 .buffer(buffer.raw)
2140 .build();
2141 Some(dedicated_allocation_info)
2142 } else {
2143 None
2144 };
2145
2146 let result = match external_memory.platform_memory_type() {
2147 #[cfg(unix)]
2148 hal::external_memory::PlatformMemoryType::Fd => {
2149 let fd = external_memory.fd().unwrap();
2150 let external_memory_extension = self.shared.extension_fns.external_memory_fd.as_ref().expect("This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled");
2151
2152 #[cfg(any(target_os = "linux", target_os = "android", doc))]
2153 if self.shared.extension_fns.external_memory_dma_buf.is_none()
2154 && external_memory_type_flags
2155 .contains(hal::external_memory::ExternalMemoryTypeFlags::DMA_BUF)
2156 {
2157 panic!("Requested to import a dma buf that is not supported by the system. Use `PhysicalDevice::external_image_properties` to check what is supported on the system.");
2158 }
2159
2160 let vk_memory_bits = if external_memory_type
2161 == hal::external_memory::ExternalMemoryType::OpaqueFd
2162 {
2163 u32::MAX
2164 } else {
2165 use std::os::unix::io::AsRawFd;
2166 match external_memory_extension
2167 .get_memory_fd_properties_khr(vk_external_memory_type, fd.as_raw_fd())
2168 {
2169 Ok(memory_handle_properties) => memory_handle_properties.memory_type_bits,
2170 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
2171 return Err(d::OutOfMemory::Host.into())
2172 }
2173 Err(vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR) => {
2174 error!("Failed to get memory fd properties");
2175 return Err(
2176 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2177 );
2178 }
2179 err => {
2180 panic!("Unexpected error: {:#?}", err);
2181 }
2182 }
2183 };
2184
2185 let mem_type = match (0..32)
2186 .into_iter()
2187 .find(|id| buffer_req.type_mask & type_mask & (1 << id) & vk_memory_bits != 0)
2188 {
2189 Some(id) => id.into(),
2190 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2191 };
2192
2193 let mut import_memory_info = vk::ImportMemoryFdInfoKHR::builder()
2194 .handle_type(vk_external_memory_type)
2195 .fd(**fd)
2196 .build();
2197
2198 let allocate_info =
2199 if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info {
2200 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2201 } else {
2202 vk::MemoryAllocateInfo::builder()
2203 }
2204 .push_next(&mut import_memory_info)
2205 .allocation_size(buffer_req.size)
2206 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2207
2208 self.shared.raw.allocate_memory(&allocate_info, None)
2209 }
2210 #[cfg(windows)]
2211 hal::external_memory::PlatformMemoryType::Handle => {
2212 let handle = external_memory.handle().unwrap();
2213 let external_memory_extension = self.shared.extension_fns.external_memory_win32.as_ref().expect("This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled");
2214
2215 let vk_memory_bits = {
2216 use std::os::windows::io::AsRawHandle;
2217 let mut memory_handle_properties =
2218 vk::MemoryWin32HandlePropertiesKHR::builder().build();
2219 match external_memory_extension.get_memory_win32_handle_properties_khr(
2220 self.shared.raw.handle(),
2221 vk_external_memory_type,
2222 handle.as_raw_handle(),
2223 &mut memory_handle_properties,
2224 ) {
2225 vk::Result::SUCCESS => (),
2226 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
2227 return Err(d::OutOfMemory::Host.into())
2228 }
2229 vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR => return Err(
2230 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2231 ),
2232 _ => unreachable!(),
2233 };
2234 memory_handle_properties.memory_type_bits
2235 };
2236
2237 let mem_type = match (0..32)
2238 .into_iter()
2239 .find(|id| buffer_req.type_mask & type_mask & (1 << id) & vk_memory_bits != 0)
2240 {
2241 Some(id) => id.into(),
2242 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2243 };
2244
2245 let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::builder()
2246 .handle_type(vk_external_memory_type)
2247 .handle(**handle)
2248 .build();
2249
2250 let allocate_info =
2251 if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info {
2252 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2253 } else {
2254 vk::MemoryAllocateInfo::builder()
2255 }
2256 .push_next(&mut import_memory_info)
2257 .allocation_size(buffer_req.size)
2258 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2259
2260 self.shared.raw.allocate_memory(&allocate_info, None)
2261 }
2262 hal::external_memory::PlatformMemoryType::Ptr => {
2263 let ptr = external_memory.ptr().unwrap();
2264 let external_memory_extension = self.shared.extension_fns.external_memory_host.as_ref().expect("This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled");
2265
2266 let vk_memory_bits = {
2267 let mut memory_ptr_properties =
2268 vk::MemoryHostPointerPropertiesEXT::builder().build();
2269 match external_memory_extension.get_memory_host_pointer_properties_ext(
2270 self.shared.raw.handle(),
2271 vk_external_memory_type,
2272 ptr.as_raw_ptr(),
2273 &mut memory_ptr_properties,
2274 ) {
2275 vk::Result::SUCCESS => (),
2276 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
2277 return Err(d::OutOfMemory::Host.into())
2278 }
2279 vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR => return Err(
2280 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2281 ),
2282 err => {
2283 panic!("Unexpected error: {:#?}", err);
2284 }
2285 };
2286 memory_ptr_properties.memory_type_bits
2287 };
2288
2289 let mem_type = match (0..32)
2290 .into_iter()
2291 .find(|id| buffer_req.type_mask & type_mask & (1 << id) & vk_memory_bits != 0)
2292 {
2293 Some(id) => id.into(),
2294 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2295 };
2296
2297 let mut import_memory_info = vk::ImportMemoryHostPointerInfoEXT::builder()
2298 .handle_type(vk_external_memory_type)
2299 .host_pointer(**ptr)
2300 .build();
2301
2302 let allocate_info =
2303 if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info {
2304 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2305 } else {
2306 vk::MemoryAllocateInfo::builder()
2307 }
2308 .push_next(&mut import_memory_info)
2309 .allocation_size(buffer_req.size)
2310 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2311
2312 self.shared.raw.allocate_memory(&allocate_info, None)
2313 }
2314 };
2315
2316 let memory = match result {
2317 Ok(memory) => n::Memory { raw: memory },
2318 Err(err) => {
2319 self.destroy_buffer(buffer);
2320 match err {
2321 vk::Result::ERROR_TOO_MANY_OBJECTS => {
2322 return Err(hal::external_memory::ExternalResourceError::TooManyObjects)
2323 }
2324 vk::Result::ERROR_OUT_OF_HOST_MEMORY => return Err(d::OutOfMemory::Host.into()),
2325 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
2326 return Err(d::OutOfMemory::Device.into())
2327 }
2328 vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR => {
2329 return Err(
2330 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2331 )
2332 }
2333 unexpected_error => {
2334 panic!(
2335 "Unexpected error on `allocate_memory`: {:#?}",
2336 unexpected_error
2337 );
2338 }
2339 }
2340 }
2341 };
2342
2343 if let Err(err) = self.bind_buffer_memory(&memory, 0, &mut buffer) {
2344 error!("Failed to `bind_buffer_memory`: {:#?}", err);
2345 return Err(match err {
2346 d::BindError::OutOfMemory(out_of_memory) => out_of_memory.into(),
2347 d::BindError::WrongMemory => {
2348 panic!("This error should never happen and it is likely a vulkan backend bug.")
2349 }
2350 d::BindError::OutOfBounds => {
2351 panic!("Since external memory use a dedicated allocation, this should never happen and it is likely a vulkan backend bug.")
2352 }
2353 });
2354 }
2355
2356 Ok((buffer, memory))
2357 }
2358 unsafe fn create_allocate_external_image(
2359 &self,
2360 external_memory_type: hal::external_memory::ExternalImageMemoryType,
2361 kind: image::Kind,
2362 mip_levels: image::Level,
2363 format: format::Format,
2364 tiling: image::Tiling,
2365 usage: image::Usage,
2366 sparse: memory::SparseFlags,
2367 view_caps: image::ViewCapabilities,
2368 type_mask: u32,
2369 ) -> Result<(n::Image, n::Memory), hal::external_memory::ExternalResourceError> {
2370 if self.shared.extension_fns.external_memory.is_none() {
2371 panic!(
2372 "This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled"
2373 );
2374 }
2375
2376 let flags = conv::map_view_capabilities_sparse(sparse, view_caps);
2377 let extent = conv::map_extent(kind.extent());
2378 let array_layers = kind.num_layers();
2379 let samples = kind.num_samples();
2380 let image_type = match kind {
2381 image::Kind::D1(..) => vk::ImageType::TYPE_1D,
2382 image::Kind::D2(..) => vk::ImageType::TYPE_2D,
2383 image::Kind::D3(..) => vk::ImageType::TYPE_3D,
2384 };
2385
2386 let layout = match tiling {
2388 image::Tiling::Linear => vk::ImageLayout::PREINITIALIZED,
2389 image::Tiling::Optimal => vk::ImageLayout::UNDEFINED,
2390 };
2391
2392 let external_memory_type_flags: hal::external_memory::ExternalMemoryTypeFlags =
2393 external_memory_type.external_memory_type().into();
2394 let vk_external_memory_type =
2395 vk::ExternalMemoryHandleTypeFlags::from_raw(external_memory_type_flags.bits());
2396
2397 #[cfg(any(target_os = "linux", target_os = "android"))]
2398 let (mut drm_modifiers_info, _drm_modifier_list) =
2400 if let hal::external_memory::ExternalImageMemoryType::DmaBuf(dma_modifiers) =
2401 external_memory_type
2402 {
2403 if dma_modifiers.is_empty() {(None, None)}
2404 else {
2405 let drm_modifier_list: Vec<u64> = dma_modifiers
2406 .iter()
2407 .filter_map(|drm_modifier| {
2408 use std::convert::TryInto;
2409 match drm_modifier.clone().try_into() {
2410 Ok(drm_modifier) => Some(drm_modifier),
2411 Err(err) => {
2412 error!("Invalid drm format modifier: {:#?}", err);
2413 None
2414 }
2415 }
2416 })
2417 .collect();
2418 let image_format_modifier_list = vk::ImageDrmFormatModifierListCreateInfoEXT::builder()
2419 .drm_format_modifiers(drm_modifier_list.as_slice())
2420 .build();
2421 (Some(image_format_modifier_list),Some(drm_modifier_list),)
2422 }
2423 } else {
2424 (None, None)
2425 };
2426
2427 let mut external_memory_ci = vk::ExternalMemoryImageCreateInfo::builder()
2428 .handle_types(vk_external_memory_type)
2429 .build();
2430
2431 let info = vk::ImageCreateInfo::builder()
2432 .push_next(&mut external_memory_ci)
2433 .flags(flags)
2434 .image_type(image_type)
2435 .format(conv::map_format(format))
2436 .extent(extent.clone())
2437 .mip_levels(mip_levels as u32)
2438 .array_layers(array_layers as u32)
2439 .samples(conv::map_sample_count_flags(samples))
2440 .tiling(conv::map_tiling(tiling))
2441 .usage(conv::map_image_usage(usage))
2442 .sharing_mode(vk::SharingMode::EXCLUSIVE) .initial_layout(layout);
2444
2445 #[cfg(any(target_os = "linux", target_os = "android"))]
2446 let info = if let Some(ref mut drm_modifiers_info) = drm_modifiers_info {
2447 info.push_next(drm_modifiers_info)
2448 } else {
2449 info
2450 };
2451
2452 let mut image = match self.shared.raw.create_image(&info, None) {
2453 Ok(raw) => n::Image {
2454 raw,
2455 ty: image_type,
2456 flags,
2457 extent,
2458 },
2459 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => return Err(d::OutOfMemory::Host.into()),
2460 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
2461 return Err(d::OutOfMemory::Device.into())
2462 }
2463 Err(unexpected_error) => {
2464 panic!(
2465 "Unexpected error on `create_image`: {:#?}",
2466 unexpected_error
2467 );
2468 }
2469 };
2470
2471 let image_req = self.get_image_requirements(&image);
2472
2473 let mem_type = match (0..32)
2474 .into_iter()
2475 .find(|id| image_req.type_mask & type_mask & (1 << id) != 0)
2476 {
2477 Some(id) => id.into(),
2478 None => unreachable!(),
2479 };
2480
2481 let mut export_memori_ai = vk::ExportMemoryAllocateInfo::builder()
2482 .handle_types(vk_external_memory_type)
2483 .build();
2484
2485 let mut dedicated_allocation_info =
2486 if self.shared.extension_fns.dedicated_allocation.is_some() {
2487 let dedicated_allocation_info = vk::MemoryDedicatedAllocateInfo::builder()
2488 .image(image.raw)
2489 .build();
2490 Some(dedicated_allocation_info)
2491 } else {
2492 None
2493 };
2494
2495 let allocate_info = if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info
2496 {
2497 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2498 } else {
2499 vk::MemoryAllocateInfo::builder()
2500 }
2501 .push_next(&mut export_memori_ai)
2502 .allocation_size(image_req.size)
2503 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2504
2505 let memory = match self.shared.raw.allocate_memory(&allocate_info, None) {
2506 Ok(memory) => n::Memory { raw: memory },
2507 Err(err) => {
2508 self.destroy_image(image);
2509 match err {
2510 vk::Result::ERROR_TOO_MANY_OBJECTS => {
2511 return Err(
2512 hal::external_memory::ExternalResourceError::TooManyObjects,
2513 );
2514 }
2515 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
2516 return Err(d::OutOfMemory::Host.into());
2517 }
2518 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
2519 return Err(d::OutOfMemory::Device.into());
2520 }
2521 unexpected_error => {
2522 panic!("Unexpected error: {:#?}", unexpected_error);
2523 }
2524 }
2525 }
2526 };
2527
2528 if let Err(err) = self.bind_image_memory(&memory, 0, &mut image) {
2529 error!("Failed to `bind_image_memory`: {:#?}", err);
2530 return Err(match err {
2531 d::BindError::OutOfMemory(out_of_memory) => out_of_memory.into(),
2532 d::BindError::WrongMemory => {
2533 panic!("This error should never happen and it is likely a vulkan backend bug.")
2534 }
2535 d::BindError::OutOfBounds => {
2536 panic!("Since external memory use a dedicated allocation, this should never happen and it is likely a vulkan backend bug.")
2537 }
2538 });
2539 }
2540
2541 Ok((image, memory))
2542 }
2543
2544 unsafe fn import_external_image(
2545 &self,
2546 external_memory: hal::external_memory::ExternalImageMemory,
2547 kind: image::Kind,
2548 mip_levels: image::Level,
2549 format: format::Format,
2550 tiling: image::Tiling,
2551 usage: image::Usage,
2552 sparse: memory::SparseFlags,
2553 view_caps: image::ViewCapabilities,
2554 type_mask: u32,
2555 ) -> Result<(n::Image, n::Memory), hal::external_memory::ExternalResourceError> {
2556 if self.shared.extension_fns.external_memory.is_none() {
2557 panic!(
2558 "This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled"
2559 );
2560 }
2561
2562 let flags = conv::map_view_capabilities_sparse(sparse, view_caps);
2563 let extent = conv::map_extent(kind.extent());
2564 let array_layers = kind.num_layers();
2565 let samples = kind.num_samples();
2566 let image_type = match kind {
2567 image::Kind::D1(..) => vk::ImageType::TYPE_1D,
2568 image::Kind::D2(..) => vk::ImageType::TYPE_2D,
2569 image::Kind::D3(..) => vk::ImageType::TYPE_3D,
2570 };
2571
2572 let layout = match tiling {
2574 image::Tiling::Linear => vk::ImageLayout::PREINITIALIZED,
2575 image::Tiling::Optimal => vk::ImageLayout::UNDEFINED,
2576 };
2577
2578 let external_memory_type = external_memory.external_memory_type();
2579 let external_memory_type_flags: hal::external_memory::ExternalMemoryTypeFlags =
2580 external_memory_type.into();
2581 let vk_external_memory_type =
2582 vk::ExternalMemoryHandleTypeFlags::from_raw(external_memory_type_flags.bits());
2583
2584 let mut external_memory_ci = vk::ExternalMemoryImageCreateInfo::builder()
2585 .handle_types(vk_external_memory_type)
2586 .build();
2587
2588 #[cfg(any(target_os = "linux", target_os = "android"))]
2589 let (mut drm_format_properties, _subresource_layouts) =
2591 if let hal::external_memory::ExternalImageMemory::DmaBuf(
2592 _fd,
2593 Some(drm_format_properties),
2594 ) = &external_memory
2595 {
2596 use std::convert::TryInto;
2597 match drm_format_properties.drm_modifier.try_into() {
2598 Ok(drm_format_modifier) => {
2599 let subresource_layouts: Vec<vk::SubresourceLayout> = drm_format_properties
2600 .plane_layouts
2601 .iter()
2602 .map(|subresource_footprint| vk::SubresourceLayout {
2603 offset: subresource_footprint.slice.start,
2604 size: subresource_footprint.slice.end,
2605 row_pitch: subresource_footprint.row_pitch,
2606 array_pitch: subresource_footprint.array_pitch,
2607 depth_pitch: subresource_footprint.depth_pitch,
2608 })
2609 .collect();
2610
2611 let vk_drm_format_properties =
2612 vk::ImageDrmFormatModifierExplicitCreateInfoEXT::builder()
2613 .drm_format_modifier(drm_format_modifier)
2614 .plane_layouts(subresource_layouts.as_slice())
2615 .build();
2616
2617 (Some(vk_drm_format_properties), subresource_layouts)
2618 }
2619 Err(err) => {
2620 error!("Unknow drm format modifier: {}", err);
2621 (None, Vec::new())
2622 }
2623 }
2624 } else {
2625 (None, Vec::new())
2626 };
2627
2628 let info = vk::ImageCreateInfo::builder()
2629 .push_next(&mut external_memory_ci)
2630 .flags(flags)
2631 .image_type(image_type)
2632 .format(conv::map_format(format))
2633 .extent(extent.clone())
2634 .mip_levels(mip_levels as u32)
2635 .array_layers(array_layers as u32)
2636 .samples(conv::map_sample_count_flags(samples))
2637 .tiling(conv::map_tiling(tiling))
2638 .usage(conv::map_image_usage(usage))
2639 .sharing_mode(vk::SharingMode::EXCLUSIVE) .initial_layout(layout);
2641
2642 #[cfg(any(target_os = "linux", target_os = "android"))]
2643 let info = if let Some(drm_format_properties) = &mut drm_format_properties {
2644 info.push_next(drm_format_properties)
2645 } else {
2646 info
2647 };
2648
2649 let mut image = match self.shared.raw.create_image(&info, None) {
2650 Ok(raw) => n::Image {
2651 raw,
2652 ty: image_type,
2653 flags,
2654 extent,
2655 },
2656 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => return Err(d::OutOfMemory::Host.into()),
2657 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
2658 return Err(d::OutOfMemory::Device.into())
2659 }
2660 Err(unexpected_error) => {
2661 panic!(
2662 "Unexpected error on `create_image`: {:#?}",
2663 unexpected_error
2664 );
2665 }
2666 };
2667
2668 let image_req = self.get_image_requirements(&image);
2669
2670 let mut dedicated_allocation_info =
2671 if self.shared.extension_fns.dedicated_allocation.is_some() {
2672 let dedicated_allocation_info = vk::MemoryDedicatedAllocateInfo::builder()
2673 .image(image.raw)
2674 .build();
2675 Some(dedicated_allocation_info)
2676 } else {
2677 None
2678 };
2679
2680 let result = match external_memory.platform_memory_type() {
2681 #[cfg(unix)]
2682 hal::external_memory::PlatformMemoryType::Fd => {
2683 let fd = external_memory.fd().unwrap();
2684 let external_memory_extension = self.shared.extension_fns.external_memory_fd.as_ref().expect("This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled");
2685
2686 #[cfg(any(target_os = "linux", target_os = "android", doc))]
2687 if self.shared.extension_fns.external_memory_dma_buf.is_none()
2688 && external_memory_type == hal::external_memory::ExternalMemoryType::DmaBuf
2689 {
2690 panic!("Requested to import a dma buf that is not supported by the system. Use `PhysicalDevice::external_image_properties` to check what is supported on the system.");
2691 }
2692
2693 let vk_memory_bits = if external_memory_type
2694 == hal::external_memory::ExternalMemoryType::OpaqueFd
2695 {
2696 u32::MAX
2697 } else {
2698 use std::os::unix::io::AsRawFd;
2699 match external_memory_extension
2700 .get_memory_fd_properties_khr(vk_external_memory_type, fd.as_raw_fd())
2701 {
2702 Ok(memory_handle_properties) => memory_handle_properties.memory_type_bits,
2703 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
2704 return Err(d::OutOfMemory::Host.into())
2705 }
2706 Err(vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR) => {
2707 error!("Failed to get memory fd properties");
2708 return Err(
2709 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2710 );
2711 }
2712 unexpected_error => {
2713 panic!(
2714 "Unexpected error on `get_memory_fd_properties_khr`: {:#?}",
2715 unexpected_error
2716 );
2717 }
2718 }
2719 };
2720
2721 let mem_type = match (0..32)
2722 .into_iter()
2723 .find(|id| image_req.type_mask & type_mask & (1 << id) & vk_memory_bits != 0)
2724 {
2725 Some(id) => id.into(),
2726 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2727 };
2728
2729 let mut import_memory_info = vk::ImportMemoryFdInfoKHR::builder()
2730 .handle_type(vk_external_memory_type)
2731 .fd(**fd)
2732 .build();
2733
2734 let allocate_info = vk::MemoryAllocateInfo::builder()
2735 .push_next(&mut import_memory_info)
2736 .allocation_size(image_req.size)
2737 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2738
2739 let allocate_info =
2740 if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info {
2741 allocate_info.push_next(dedicated_allocation_info)
2742 } else {
2743 allocate_info
2744 };
2745
2746 self.shared.raw.allocate_memory(&allocate_info, None)
2747 }
2748 #[cfg(windows)]
2749 hal::external_memory::PlatformMemoryType::Handle => {
2750 let handle = external_memory.handle().unwrap();
2751 let external_memory_extension = match self
2752 .shared
2753 .extension_fns
2754 .external_memory_win32
2755 {
2756 Some(ref functor) => functor,
2757 _ => {
2758 panic!("External memory windows handle extension not supported");
2759 }
2760 };
2761
2762 let vk_memory_bits = {
2763 use std::os::windows::io::AsRawHandle;
2764 let mut memory_handle_properties =
2765 vk::MemoryWin32HandlePropertiesKHR::builder().build();
2766 match external_memory_extension.get_memory_win32_handle_properties_khr(
2767 self.shared.raw.handle(),
2768 vk_external_memory_type,
2769 handle.as_raw_handle(),
2770 &mut memory_handle_properties,
2771 ) {
2772 vk::Result::SUCCESS => (),
2773 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
2774 return Err(d::OutOfMemory::Host.into())
2775 }
2776 vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR => return Err(
2777 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2778 ),
2779 _ => unreachable!(),
2780 };
2781 memory_handle_properties.memory_type_bits
2782 };
2783
2784 let mem_type = match (0..32)
2785 .into_iter()
2786 .find(|id| image_req.type_mask & type_mask & (1 << id) & vk_memory_bits != 0)
2787 {
2788 Some(id) => id.into(),
2789 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2790 };
2791
2792 let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::builder()
2793 .handle_type(vk_external_memory_type)
2794 .handle(**handle)
2795 .build();
2796
2797 let allocate_info =
2798 if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info {
2799 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2800 } else {
2801 vk::MemoryAllocateInfo::builder()
2802 }
2803 .push_next(&mut import_memory_info)
2804 .allocation_size(image_req.size)
2805 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2806
2807 self.shared.raw.allocate_memory(&allocate_info, None)
2808 }
2809 hal::external_memory::PlatformMemoryType::Ptr => {
2810 let ptr = external_memory.ptr().unwrap();
2811 let external_memory_extension = self.shared.extension_fns.external_memory_host.as_ref().expect("This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled");
2812
2813 let vk_memory_bits = {
2814 let mut memory_ptr_properties =
2815 vk::MemoryHostPointerPropertiesEXT::builder().build();
2816 match external_memory_extension.get_memory_host_pointer_properties_ext(
2817 self.shared.raw.handle(),
2818 vk_external_memory_type,
2819 ptr.as_raw_ptr(),
2820 &mut memory_ptr_properties,
2821 ) {
2822 vk::Result::SUCCESS => (),
2823 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
2824 return Err(d::OutOfMemory::Host.into())
2825 }
2826 vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR => return Err(
2827 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2828 ),
2829 unexpected_error => {
2830 panic!("Unexpected error on `get_memory_host_pointer_properties_ext`: {:#?}", unexpected_error);
2831 }
2832 };
2833 memory_ptr_properties.memory_type_bits
2834 };
2835
2836 let mem_type = match (0..32)
2837 .into_iter()
2838 .find(|id| image_req.type_mask & type_mask & (1 << id) & vk_memory_bits != 0)
2839 {
2840 Some(id) => id.into(),
2841 None => return Err(hal::external_memory::ExternalResourceError::NoValidMemoryTypeId),
2842 };
2843
2844 let mut import_memory_info = vk::ImportMemoryHostPointerInfoEXT::builder()
2845 .handle_type(vk_external_memory_type)
2846 .host_pointer(**ptr)
2847 .build();
2848
2849 let allocate_info =
2850 if let Some(dedicated_allocation_info) = &mut dedicated_allocation_info {
2851 vk::MemoryAllocateInfo::builder().push_next(dedicated_allocation_info)
2852 } else {
2853 vk::MemoryAllocateInfo::builder()
2854 }
2855 .push_next(&mut import_memory_info)
2856 .allocation_size(image_req.size)
2857 .memory_type_index(self.get_ash_memory_type_index(mem_type));
2858
2859 self.shared.raw.allocate_memory(&allocate_info, None)
2860 }
2861 };
2862
2863 let memory = match result {
2864 Ok(memory) => n::Memory { raw: memory },
2865 Err(err) => {
2866 self.destroy_image(image);
2867 match err {
2868 vk::Result::ERROR_TOO_MANY_OBJECTS => {
2869 return Err(hal::external_memory::ExternalResourceError::TooManyObjects)
2870 }
2871 vk::Result::ERROR_OUT_OF_HOST_MEMORY => return Err(d::OutOfMemory::Host.into()),
2872 vk::Result::ERROR_OUT_OF_DEVICE_MEMORY => {
2873 return Err(d::OutOfMemory::Device.into())
2874 }
2875 vk::Result::ERROR_INVALID_EXTERNAL_HANDLE_KHR => {
2876 return Err(
2877 hal::external_memory::ExternalResourceError::InvalidExternalHandle,
2878 )
2879 }
2880 unexpected_error => {
2881 panic!(
2882 "Unexpected error on `allocate_memory`: {:#?}",
2883 unexpected_error
2884 );
2885 }
2886 }
2887 }
2888 };
2889
2890 if let Err(err) = self.bind_image_memory(&memory, 0, &mut image) {
2891 self.destroy_image(image);
2892 self.free_memory(memory);
2893 error!("Failed to `bind_image_memory`: {:#?}", err);
2894 return Err(match err {
2895 d::BindError::OutOfMemory(out_of_memory) => out_of_memory.into(),
2896 d::BindError::WrongMemory => {
2897 panic!("This error should never happen and it is likely a vulkan backend bug.")
2898 }
2899 d::BindError::OutOfBounds => {
2900 panic!("Since external memory use a dedicated allocation, this should never happen and it is likely a vulkan backend bug.")
2901 }
2902 });
2903 }
2904
2905 Ok((image, memory))
2906 }
2907
2908 unsafe fn export_memory(
2909 &self,
2910 external_memory_type: hal::external_memory::ExternalMemoryType,
2911 memory: &n::Memory,
2912 ) -> Result<hal::external_memory::PlatformMemory, hal::external_memory::ExternalMemoryExportError>
2913 {
2914 if self.shared.instance.external_memory_capabilities.is_none() {
2916 panic!("External memory not supported");
2917 };
2918
2919 let platform_memory_type: hal::external_memory::PlatformMemoryType =
2920 external_memory_type.clone().into();
2921
2922 let external_memory_type_flags: hal::external_memory::ExternalMemoryTypeFlags =
2923 external_memory_type.into();
2924 let vk_external_memory_type =
2925 vk::ExternalMemoryHandleTypeFlags::from_raw(external_memory_type_flags.bits());
2926
2927 match platform_memory_type {
2928 #[cfg(unix)]
2929 hal::external_memory::PlatformMemoryType::Fd => {
2930 let external_memory_extension = match self.shared.extension_fns.external_memory_fd {
2931 Some(ref functor) => functor,
2932 _ => {
2933 panic!("External memory fd not supported");
2934 }
2935 };
2936
2937 let memory_get_info = vk::MemoryGetFdInfoKHR::builder()
2938 .memory(memory.raw)
2939 .handle_type(vk_external_memory_type)
2940 .build();
2941
2942 let fd = match external_memory_extension.get_memory_fd(&memory_get_info) {
2943 Ok(fd) => hal::external_memory::Fd::from(fd),
2944 Err(vk::Result::ERROR_TOO_MANY_OBJECTS) => {
2945 return Err(hal::external_memory::ExternalMemoryExportError::TooManyObjects)
2946 }
2947 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
2948 return Err(
2949 hal::external_memory::ExternalMemoryExportError::OutOfHostMemory,
2950 )
2951 }
2952 unexpected_error => {
2953 panic!(
2954 "Unexpected error on `get_memory_fd`: {:#?}",
2955 unexpected_error
2956 );
2957 }
2958 };
2959 Ok(hal::external_memory::PlatformMemory::Fd(fd.into()))
2960 }
2961 #[cfg(windows)]
2962 hal::external_memory::PlatformMemoryType::Handle => {
2963 let external_memory_extension = self.shared.extension_fns.external_memory_win32.as_ref().expect("This function rely on `Feature::EXTERNAL_MEMORY`, but the feature is not enabled");
2964
2965 let memory_get_info = vk::MemoryGetWin32HandleInfoKHR::builder()
2966 .memory(memory.raw)
2967 .handle_type(vk_external_memory_type)
2968 .build();
2969
2970 let mut handle = std::ptr::null_mut();
2971 match external_memory_extension.get_memory_win32_handle_khr(
2972 self.shared.raw.handle(),
2973 &memory_get_info,
2974 &mut handle,
2975 ) {
2976 vk::Result::SUCCESS => (),
2977 vk::Result::ERROR_TOO_MANY_OBJECTS => {
2978 return Err(hal::external_memory::ExternalMemoryExportError::TooManyObjects)
2979 }
2980 vk::Result::ERROR_OUT_OF_HOST_MEMORY => {
2981 return Err(
2982 hal::external_memory::ExternalMemoryExportError::OutOfHostMemory,
2983 )
2984 }
2985 unexpected_error => {
2986 panic!(
2987 "Unexpected error on `get_memory_win32_handle_khr`: {:#?}",
2988 unexpected_error
2989 );
2990 }
2991 }
2992 let handle = hal::external_memory::Handle::from(handle);
2993 Ok(hal::external_memory::PlatformMemory::Handle(handle.into()))
2994 }
2995 hal::external_memory::PlatformMemoryType::Ptr => {
2996 error!("Memory cannot be \"exported\" as host memory pointer. Use intead `Device::map_memory`.");
2997 Err(hal::external_memory::ExternalMemoryExportError::InvalidExternalHandle)
2998 }
2999 }
3000 }
3001
3002 #[allow(unused_variables)]
3005 unsafe fn drm_format_modifier(&self, image: &n::Image) -> Option<hal::format::DrmModifier> {
3006 #[cfg(any(target_os = "linux", target_os = "android"))]
3007 if let Some(ref extension) = self.shared.extension_fns.image_drm_format_modifier {
3008 let mut image_drm_format_modifier =
3009 vk::ImageDrmFormatModifierPropertiesEXT::builder().build();
3010 match extension.get_image_drm_format_modifier_properties_ext(
3011 self.shared.raw.handle(),
3012 image.raw,
3013 &mut image_drm_format_modifier,
3014 ) {
3015 vk::Result::SUCCESS => {
3016 return Some(image_drm_format_modifier.drm_format_modifier.into());
3017 }
3018 vk::Result::ERROR_OUT_OF_HOST_MEMORY => (),
3019 unexpected_error => {
3020 error!(
3021 "Unexpected error on `drm_format_modifier`: {:#?}",
3022 unexpected_error
3023 );
3024 }
3025 }
3026 }
3027
3028 None
3029 }
3030
3031 fn start_capture(&self) {
3032 unsafe {
3033 self.render_doc
3034 .start_frame_capture(self.shared.raw.handle().as_raw() as *mut _, ptr::null_mut())
3035 }
3036 }
3037
3038 fn stop_capture(&self) {
3039 unsafe {
3040 self.render_doc
3041 .end_frame_capture(self.shared.raw.handle().as_raw() as *mut _, ptr::null_mut())
3042 }
3043 }
3044}
3045
3046impl super::Device {
3047 fn filter_memory_requirements(&self, ash_mask: u32) -> u32 {
3050 let mut hal_index = 0;
3051 let mut mask = 0;
3052 for ash_index in 0..32 {
3053 if self.valid_ash_memory_types & (1 << ash_index) != 0 {
3054 if ash_mask & (1 << ash_index) != 0 {
3055 mask |= 1 << hal_index;
3056 }
3057 hal_index += 1;
3058 }
3059 }
3060 mask
3061 }
3062
3063 fn get_ash_memory_type_index(&self, hal_type: MemoryTypeId) -> u32 {
3064 let mut hal_count = hal_type.0;
3065 for ash_index in 0..32 {
3066 if self.valid_ash_memory_types & (1 << ash_index) != 0 {
3067 if hal_count == 0 {
3068 return ash_index;
3069 }
3070 hal_count -= 1;
3071 }
3072 }
3073 panic!("Unable to get Ash memory type for {:?}", hal_type);
3074 }
3075
3076 pub(crate) unsafe fn create_swapchain(
3077 &self,
3078 surface: &mut w::Surface,
3079 config: SwapchainConfig,
3080 provided_old_swapchain: Option<w::Swapchain>,
3081 ) -> Result<(w::Swapchain, Vec<n::Image>), hal::window::SwapchainError> {
3082 let functor = khr::Swapchain::new(&surface.raw.instance.inner, &self.shared.raw);
3083
3084 let old_swapchain = match provided_old_swapchain {
3085 Some(osc) => osc.raw,
3086 None => vk::SwapchainKHR::null(),
3087 };
3088
3089 let info = vk::SwapchainCreateInfoKHR::builder()
3090 .flags(vk::SwapchainCreateFlagsKHR::empty())
3091 .surface(surface.raw.handle)
3092 .min_image_count(config.image_count)
3093 .image_format(conv::map_format(config.format))
3094 .image_color_space(vk::ColorSpaceKHR::SRGB_NONLINEAR)
3095 .image_extent(vk::Extent2D {
3096 width: config.extent.width,
3097 height: config.extent.height,
3098 })
3099 .image_array_layers(1)
3100 .image_usage(conv::map_image_usage(config.image_usage))
3101 .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
3102 .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
3103 .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
3104 .present_mode(conv::map_present_mode(config.present_mode))
3105 .clipped(true)
3106 .old_swapchain(old_swapchain);
3107
3108 let result = functor.create_swapchain(&info, None);
3109
3110 if old_swapchain != vk::SwapchainKHR::null() {
3111 functor.destroy_swapchain(old_swapchain, None)
3112 }
3113
3114 let swapchain_raw = match result {
3115 Ok(swapchain_raw) => swapchain_raw,
3116 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
3117 return Err(d::OutOfMemory::Host.into());
3118 }
3119 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
3120 return Err(d::OutOfMemory::Device.into());
3121 }
3122 Err(vk::Result::ERROR_DEVICE_LOST) => return Err(d::DeviceLost.into()),
3123 Err(vk::Result::ERROR_SURFACE_LOST_KHR) => return Err(hal::window::SurfaceLost.into()),
3124 Err(vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR) => {
3125 return Err(hal::window::SwapchainError::WindowInUse)
3126 }
3127 Err(other) => {
3128 error!("Unexpected result - driver bug? {:?}", other);
3129 return Err(hal::window::SwapchainError::Unknown);
3130 }
3131 };
3132
3133 let result = functor.get_swapchain_images(swapchain_raw);
3134
3135 let backbuffer_images = match result {
3136 Ok(backbuffer_images) => backbuffer_images,
3137 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
3138 return Err(d::OutOfMemory::Host.into());
3139 }
3140 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
3141 return Err(d::OutOfMemory::Device.into());
3142 }
3143 _ => unreachable!(),
3144 };
3145
3146 let extent = vk::Extent3D {
3147 width: config.extent.width,
3148 height: config.extent.height,
3149 depth: 1,
3150 };
3151 let swapchain = w::Swapchain {
3152 raw: swapchain_raw,
3153 functor,
3154 vendor_id: self.vendor_id,
3155 extent,
3156 };
3157
3158 let images = backbuffer_images
3159 .into_iter()
3160 .map(|image| n::Image {
3161 raw: image,
3162 ty: vk::ImageType::TYPE_2D,
3163 flags: vk::ImageCreateFlags::empty(),
3164 extent,
3165 })
3166 .collect();
3167
3168 Ok((swapchain, images))
3169 }
3170
3171 pub unsafe fn image_view_from_raw(
3172 &self,
3173 raw_image: vk::Image,
3174 view_type: vk::ImageViewType,
3175 format: format::Format,
3176 swizzle: format::Swizzle,
3177 usage: image::Usage,
3178 range: image::SubresourceRange,
3179 ) -> Result<n::ImageView, image::ViewCreationError> {
3180 let mut image_view_info;
3181 let mut info = vk::ImageViewCreateInfo::builder()
3182 .flags(vk::ImageViewCreateFlags::empty())
3183 .image(raw_image)
3184 .view_type(view_type)
3185 .format(conv::map_format(format))
3186 .components(conv::map_swizzle(swizzle))
3187 .subresource_range(conv::map_subresource_range(&range));
3188
3189 if self.shared.image_view_usage {
3190 image_view_info = vk::ImageViewUsageCreateInfo::builder()
3191 .usage(conv::map_image_usage(usage))
3192 .build();
3193 info = info.push_next(&mut image_view_info);
3194 }
3195
3196 let result = self.shared.raw.create_image_view(&info, None);
3197
3198 match result {
3199 Ok(raw) => Ok(n::ImageView {
3200 image: raw_image,
3201 raw,
3202 range,
3203 }),
3204 Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => Err(d::OutOfMemory::Host.into()),
3205 Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => Err(d::OutOfMemory::Device.into()),
3206 _ => unreachable!(),
3207 }
3208 }
3209}
3210
3211#[test]
3212fn test_send_sync() {
3213 fn foo<T: Send + Sync>() {}
3214 foo::<super::Device>()
3215}