1use std::{
2 fmt,
3 ops::{Deref, DerefMut},
4 ptr,
5 time::Duration,
6};
7
8use skia_bindings::{self as sb, GrDirectContext, GrDirectContext_DirectContextID, SkRefCntBase};
9
10#[cfg(feature = "d3d")]
11use super::d3d;
12#[cfg(feature = "gl")]
13use super::gl;
14#[cfg(feature = "vulkan")]
15use super::vk;
16use super::{
17 BackendFormat, BackendRenderTarget, BackendTexture, ContextOptions, FlushInfo,
18 MutableTextureState, PurgeResourceOptions, RecordingContext, SemaphoresSubmitted, SyncCpu,
19};
20use crate::{prelude::*, surfaces, Data, Image, Surface, TextureCompressionType};
21
22#[repr(C)]
23#[derive(Copy, Clone, PartialEq, Eq, Debug)]
24pub struct DirectContextId {
25 id: u32,
26}
27
28native_transmutable!(
29 GrDirectContext_DirectContextID,
30 DirectContextId,
31 direct_context_id_layout
32);
33
34pub type DirectContext = RCHandle<GrDirectContext>;
35
36impl NativeRefCountedBase for GrDirectContext {
37 type Base = SkRefCntBase;
38}
39
40impl Deref for DirectContext {
41 type Target = RecordingContext;
42
43 fn deref(&self) -> &Self::Target {
44 unsafe { transmute_ref(self) }
45 }
46}
47
48impl DerefMut for DirectContext {
49 fn deref_mut(&mut self) -> &mut Self::Target {
50 unsafe { transmute_ref_mut(self) }
51 }
52}
53
54#[derive(Copy, Clone, PartialEq, Eq, Debug)]
55pub struct ResourceCacheLimits {
56 pub max_resources: usize,
57 pub max_resource_bytes: usize,
58}
59
60#[derive(Copy, Clone, PartialEq, Eq, Debug)]
61pub struct ResourceCacheUsage {
62 pub resource_count: usize,
63 pub resource_bytes: usize,
64}
65
66impl fmt::Debug for DirectContext {
67 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
68 f&mut DebugStruct<'_, '_>.debug_struct("DirectContext")
69 .field("base", self as &RecordingContext)
70 .field("resource_cache_limit", &self.resource_cache_limit())
71 .field("resource_cache_usage", &self.resource_cache_usage())
72 .field(
73 "resource_cache_purgeable_bytes",
74 &self.resource_cache_purgeable_bytes(),
75 )
76 .field(
77 name:"supports_distance_field_text",
78 &self.supports_distance_field_text(),
79 )
80 .finish()
81 }
82}
83
84impl DirectContext {
85 // Deprecated in Skia
86 #[cfg(feature = "gl")]
87 pub fn new_gl<'a>(
88 interface: impl Into<gl::Interface>,
89 options: impl Into<Option<&'a ContextOptions>>,
90 ) -> Option<DirectContext> {
91 crate::gpu::direct_contexts::make_gl(interface, options)
92 }
93
94 // Deprecated in Skia
95 #[cfg(feature = "vulkan")]
96 pub fn new_vulkan<'a>(
97 backend_context: &vk::BackendContext,
98 options: impl Into<Option<&'a ContextOptions>>,
99 ) -> Option<DirectContext> {
100 crate::gpu::direct_contexts::make_vulkan(backend_context, options)
101 }
102
103 #[cfg(feature = "metal")]
104 pub fn new_metal<'a>(
105 backend: &crate::gpu::mtl::BackendContext,
106 options: impl Into<Option<&'a ContextOptions>>,
107 ) -> Option<DirectContext> {
108 DirectContext::from_ptr(unsafe {
109 sb::C_GrContext_MakeMetal(backend.native(), options.into().native_ptr_or_null())
110 })
111 }
112
113 #[cfg(feature = "d3d")]
114 #[allow(clippy::missing_safety_doc)]
115 pub unsafe fn new_d3d<'a>(
116 backend_context: &d3d::BackendContext,
117 options: impl Into<Option<&'a ContextOptions>>,
118 ) -> Option<DirectContext> {
119 DirectContext::from_ptr(sb::C_GrDirectContext_MakeDirect3D(
120 backend_context.native(),
121 options.into().native_ptr_or_null(),
122 ))
123 }
124
125 pub fn reset(&mut self, backend_state: Option<u32>) -> &mut Self {
126 unsafe {
127 self.native_mut()
128 .resetContext(backend_state.unwrap_or(sb::kAll_GrBackendState))
129 }
130 self
131 }
132
133 pub fn reset_gl_texture_bindings(&mut self) -> &mut Self {
134 unsafe { self.native_mut().resetGLTextureBindings() }
135 self
136 }
137
138 pub fn abandon(&mut self) -> &mut Self {
139 unsafe {
140 // self.native_mut().abandonContext()
141 sb::GrDirectContext_abandonContext(self.native_mut() as *mut _ as _)
142 }
143 self
144 }
145
146 pub fn is_device_lost(&mut self) -> bool {
147 unsafe { self.native_mut().isDeviceLost() }
148 }
149
150 // TODO: threadSafeProxy()
151
152 pub fn oomed(&mut self) -> bool {
153 unsafe { self.native_mut().oomed() }
154 }
155
156 pub fn release_resources_and_abandon(&mut self) -> &mut Self {
157 unsafe {
158 sb::GrDirectContext_releaseResourcesAndAbandonContext(self.native_mut() as *mut _ as _)
159 }
160 self
161 }
162
163 pub fn resource_cache_limit(&self) -> usize {
164 unsafe { self.native().getResourceCacheLimit() }
165 }
166
167 pub fn resource_cache_usage(&self) -> ResourceCacheUsage {
168 let mut resource_count = 0;
169 let mut resource_bytes = 0;
170 unsafe {
171 self.native()
172 .getResourceCacheUsage(&mut resource_count, &mut resource_bytes)
173 }
174 ResourceCacheUsage {
175 resource_count: resource_count.try_into().unwrap(),
176 resource_bytes,
177 }
178 }
179
180 pub fn resource_cache_purgeable_bytes(&self) -> usize {
181 unsafe { self.native().getResourceCachePurgeableBytes() }
182 }
183
184 pub fn set_resource_cache_limits(&mut self, limits: ResourceCacheLimits) {
185 unsafe {
186 self.native_mut().setResourceCacheLimits(
187 limits.max_resources.try_into().unwrap(),
188 limits.max_resource_bytes,
189 )
190 }
191 }
192
193 pub fn set_resource_cache_limit(&mut self, max_resource_bytes: usize) {
194 unsafe { self.native_mut().setResourceCacheLimit(max_resource_bytes) }
195 }
196
197 pub fn free_gpu_resources(&mut self) -> &mut Self {
198 unsafe { sb::GrDirectContext_freeGpuResources(self.native_mut() as *mut _ as _) }
199 self
200 }
201
202 pub fn perform_deferred_cleanup(
203 &mut self,
204 not_used: Duration,
205 opts: impl Into<Option<PurgeResourceOptions>>,
206 ) -> &mut Self {
207 unsafe {
208 sb::C_GrDirectContext_performDeferredCleanup(
209 self.native_mut(),
210 not_used.as_millis().try_into().unwrap(),
211 opts.into().unwrap_or(PurgeResourceOptions::AllResources),
212 )
213 }
214 self
215 }
216
217 pub fn purge_unlocked_resource_bytes(
218 &mut self,
219 bytes_to_purge: usize,
220 prefer_scratch_resources: bool,
221 ) -> &mut Self {
222 unsafe {
223 self.native_mut()
224 .purgeUnlockedResources(bytes_to_purge, prefer_scratch_resources)
225 }
226 self
227 }
228
229 pub fn purge_unlocked_resources(&mut self, opts: PurgeResourceOptions) -> &mut Self {
230 unsafe { self.native_mut().purgeUnlockedResources1(opts) }
231 self
232 }
233
234 // TODO: wait()
235
236 pub fn flush_and_submit(&mut self) -> &mut Self {
237 unsafe { sb::C_GrDirectContext_flushAndSubmit(self.native_mut()) }
238 self
239 }
240
241 pub fn flush_submit_and_sync_cpu(&mut self) -> &mut Self {
242 self.flush(&FlushInfo::default());
243 self.submit(SyncCpu::Yes);
244 self
245 }
246
247 #[deprecated(since = "0.37.0", note = "Use flush()")]
248 pub fn flush_with_info(&mut self, info: &FlushInfo) -> SemaphoresSubmitted {
249 self.flush(info)
250 }
251
252 pub fn flush<'a>(&mut self, info: impl Into<Option<&'a FlushInfo>>) -> SemaphoresSubmitted {
253 let n = self.native_mut();
254 if let Some(info) = info.into() {
255 unsafe { n.flush(info.native()) }
256 } else {
257 let info = FlushInfo::default();
258 unsafe { n.flush(info.native()) }
259 }
260 }
261
262 pub fn flush_image_with_info(
263 &mut self,
264 image: &Image,
265 info: &FlushInfo,
266 ) -> SemaphoresSubmitted {
267 unsafe {
268 sb::C_GrDirectContext_flushImageWithInfo(
269 self.native_mut(),
270 image.clone().into_ptr(),
271 info.native(),
272 )
273 }
274 }
275
276 pub fn flush_image(&mut self, image: &Image) {
277 unsafe { sb::C_GrDirectContext_flushImage(self.native_mut(), image.clone().into_ptr()) }
278 }
279
280 pub fn flush_and_submit_image(&mut self, image: &Image) {
281 unsafe {
282 sb::C_GrDirectContext_flushAndSubmitImage(self.native_mut(), image.clone().into_ptr())
283 }
284 }
285
286 pub fn flush_surface_with_access(
287 &mut self,
288 surface: &mut Surface,
289 access: surfaces::BackendSurfaceAccess,
290 info: &FlushInfo,
291 ) -> SemaphoresSubmitted {
292 unsafe {
293 self.native_mut()
294 .flush3(surface.native_mut(), access, info.native())
295 }
296 }
297
298 pub fn flush_surface_with_texture_state(
299 &mut self,
300 surface: &mut Surface,
301 info: &FlushInfo,
302 new_state: Option<&MutableTextureState>,
303 ) -> SemaphoresSubmitted {
304 unsafe {
305 self.native_mut().flush4(
306 surface.native_mut(),
307 info.native(),
308 new_state.native_ptr_or_null(),
309 )
310 }
311 }
312
313 pub fn flush_and_submit_surface(
314 &mut self,
315 surface: &mut Surface,
316 sync_cpu: impl Into<Option<SyncCpu>>,
317 ) {
318 unsafe {
319 self.native_mut()
320 .flushAndSubmit1(surface.native_mut(), sync_cpu.into().unwrap_or(SyncCpu::No))
321 }
322 }
323
324 pub fn flush_surface(&mut self, surface: &mut Surface) {
325 unsafe { self.native_mut().flush5(surface.native_mut()) }
326 }
327
328 pub fn submit(&mut self, sync_cpu: impl Into<Option<SyncCpu>>) -> bool {
329 unsafe {
330 self.native_mut()
331 .submit(sync_cpu.into().unwrap_or(SyncCpu::No))
332 }
333 }
334
335 pub fn check_async_work_completion(&mut self) {
336 unsafe { self.native_mut().checkAsyncWorkCompletion() }
337 }
338
339 // TODO: dumpMemoryStatistics()
340
341 pub fn supports_distance_field_text(&self) -> bool {
342 unsafe { self.native().supportsDistanceFieldText() }
343 }
344
345 #[cfg(feature = "vulkan")]
346 pub fn store_vk_pipeline_cache_data(&mut self) -> &mut Self {
347 unsafe {
348 self.native_mut().storeVkPipelineCacheData();
349 }
350 self
351 }
352
353 // TODO: wrap createBackendTexture (several variants)
354 // introduced in m76, m77, and m79
355 // extended in m84 with finishedProc and finishedContext
356 // extended in m107 with label
357
358 // TODO: wrap updateBackendTexture (several variants)
359 // introduced in m84
360
361 pub fn compressed_backend_format(&self, compression: TextureCompressionType) -> BackendFormat {
362 let mut backend_format = BackendFormat::new_invalid();
363 unsafe {
364 sb::C_GrDirectContext_compressedBackendFormat(
365 self.native(),
366 compression,
367 backend_format.native_mut(),
368 )
369 };
370 backend_format
371 }
372
373 // TODO: wrap createCompressedBackendTexture (several variants)
374 // introduced in m81
375 // extended in m84 with finishedProc and finishedContext
376
377 // TODO: wrap updateCompressedBackendTexture (two variants)
378 // introduced in m86
379
380 // TODO: add variant with GpuFinishedProc / GpuFinishedContext
381 pub fn set_backend_texture_state(
382 &mut self,
383 backend_texture: &BackendTexture,
384 state: &MutableTextureState,
385 ) -> bool {
386 self.set_backend_texture_state_and_return_previous(backend_texture, state)
387 .is_some()
388 }
389
390 pub fn set_backend_texture_state_and_return_previous(
391 &mut self,
392 backend_texture: &BackendTexture,
393 state: &MutableTextureState,
394 ) -> Option<MutableTextureState> {
395 let mut previous = MutableTextureState::default();
396 unsafe {
397 self.native_mut().setBackendTextureState(
398 backend_texture.native(),
399 state.native(),
400 previous.native_mut(),
401 None,
402 ptr::null_mut(),
403 )
404 }
405 .if_true_some(previous)
406 }
407
408 // TODO: add variant with GpuFinishedProc / GpuFinishedContext
409 pub fn set_backend_render_target_state(
410 &mut self,
411 target: &BackendRenderTarget,
412 state: &MutableTextureState,
413 ) -> bool {
414 self.set_backend_render_target_state_and_return_previous(target, state)
415 .is_some()
416 }
417
418 pub fn set_backend_render_target_state_and_return_previous(
419 &mut self,
420 target: &BackendRenderTarget,
421 state: &MutableTextureState,
422 ) -> Option<MutableTextureState> {
423 let mut previous = MutableTextureState::default();
424 unsafe {
425 self.native_mut().setBackendRenderTargetState(
426 target.native(),
427 state.native(),
428 previous.native_mut(),
429 None,
430 ptr::null_mut(),
431 )
432 }
433 .if_true_some(previous)
434 }
435
436 pub fn delete_backend_texture(&mut self, texture: &BackendTexture) {
437 unsafe { self.native_mut().deleteBackendTexture(texture.native()) }
438 }
439
440 pub fn precompile_shader(&mut self, key: &Data, data: &Data) -> bool {
441 unsafe {
442 self.native_mut()
443 .precompileShader(key.native(), data.native())
444 }
445 }
446
447 pub fn id(&self) -> DirectContextId {
448 let mut id = DirectContextId { id: 0 };
449 unsafe { sb::C_GrDirectContext_directContextId(self.native(), id.native_mut()) }
450 id
451 }
452}
453