1 | use super::*;
|
2 | use crate::{gl46 as native_gl, version::Version};
|
3 | use std::ffi::CStr;
|
4 | use std::ptr;
|
5 | use std::{collections::HashSet, ffi::CString, num::NonZeroU32};
|
6 |
|
7 | #[derive (Default)]
|
8 | struct Constants {
|
9 | max_label_length: i32,
|
10 | }
|
11 |
|
12 | /// Store a boxed callback (i.e., `Box<Box<dyn FnMut(...)>>`) as a raw pointer, so that it can be
|
13 | /// referenced by the C API and later converted back into a `Box` and dropped.
|
14 | ///
|
15 | /// We use a raw pointer here because `Box` aliasing rules are not fully defined, so we can'
|
16 | /// guarantee that it's not undefined behavior to keep a `Box` here while it's used as a raw
|
17 | /// pointer in the C API.
|
18 | struct DebugCallbackRawPtr {
|
19 | callback: *mut std::os::raw::c_void,
|
20 | }
|
21 |
|
22 | impl Drop for DebugCallbackRawPtr {
|
23 | fn drop(&mut self) {
|
24 | unsafe {
|
25 | // Convert callback back into `Box` and drop it.
|
26 | let thin_ptr: Box> = Box::from_raw(self.callback as *mut DebugCallback);
|
27 | let callback: Box = *thin_ptr;
|
28 | drop(callback);
|
29 | }
|
30 | }
|
31 | }
|
32 |
|
33 | pub struct Context {
|
34 | raw: native_gl::GlFns,
|
35 | extensions: HashSet<String>,
|
36 | constants: Constants,
|
37 | version: Version,
|
38 | debug_callback: Option<DebugCallbackRawPtr>,
|
39 | }
|
40 |
|
41 | impl Context {
|
42 | pub unsafe fn from_loader_function_cstr<F>(mut loader_function: F) -> Self
|
43 | where
|
44 | F: FnMut(&CStr) -> *const std::os::raw::c_void,
|
45 | {
|
46 | let raw: native_gl::GlFns =
|
47 | native_gl::GlFns::load_with(|p: *const std::os::raw::c_char| {
|
48 | let c_str = std::ffi::CStr::from_ptr(p);
|
49 | loader_function(c_str) as *mut std::os::raw::c_void
|
50 | });
|
51 |
|
52 | // Retrieve and parse `GL_VERSION`
|
53 | let raw_string = raw.GetString(VERSION);
|
54 |
|
55 | if raw_string.is_null() {
|
56 | panic!("Reading GL_VERSION failed. Make sure there is a valid GL context currently active." )
|
57 | }
|
58 |
|
59 | let raw_version = std::ffi::CStr::from_ptr(raw_string as *const native_gl::GLchar)
|
60 | .to_str()
|
61 | .unwrap()
|
62 | .to_owned();
|
63 | let version = Version::parse(&raw_version).unwrap();
|
64 |
|
65 | // Setup extensions and constants after the context has been built
|
66 | let mut context = Self {
|
67 | raw,
|
68 | extensions: HashSet::new(),
|
69 | constants: Constants::default(),
|
70 | version,
|
71 | debug_callback: None,
|
72 | };
|
73 |
|
74 | // Use core-only functions to populate extension list
|
75 | if (context.version >= Version::new(3, 0, None, String::from("" )))
|
76 | || (context.version >= Version::new_embedded(3, 0, String::from("" )))
|
77 | {
|
78 | let num_extensions = context.get_parameter_i32(NUM_EXTENSIONS);
|
79 | for i in 0..num_extensions {
|
80 | let extension_name = context.get_parameter_indexed_string(EXTENSIONS, i as u32);
|
81 | context.extensions.insert(extension_name);
|
82 | }
|
83 | } else {
|
84 | // Fallback
|
85 | context.extensions.extend(
|
86 | context
|
87 | .get_parameter_string(EXTENSIONS)
|
88 | .split(' ' )
|
89 | .map(|s| s.to_string()),
|
90 | );
|
91 | };
|
92 |
|
93 | // After the extensions are known, we can populate constants (including
|
94 | // constants that depend on extensions being enabled)
|
95 | context.constants.max_label_length = if context.supports_debug() {
|
96 | context.get_parameter_i32(MAX_LABEL_LENGTH)
|
97 | } else {
|
98 | 0
|
99 | };
|
100 |
|
101 | context
|
102 | }
|
103 |
|
104 | pub unsafe fn from_loader_function<F>(mut loader_function: F) -> Self
|
105 | where
|
106 | F: FnMut(&str) -> *const std::os::raw::c_void,
|
107 | {
|
108 | Self::from_loader_function_cstr(move |name| loader_function(name.to_str().unwrap()))
|
109 | }
|
110 |
|
111 | /// Creates a texture from an external GL name.
|
112 | ///
|
113 | /// This can be useful when a texture is created outside of glow (e.g. OpenXR surface) but glow
|
114 | /// still needs access to it for rendering.
|
115 | #[deprecated = "Use the NativeTexture constructor instead" ]
|
116 | pub unsafe fn create_texture_from_gl_name(gl_name: native_gl::GLuint) -> NativeTexture {
|
117 | NativeTexture(non_zero_gl_name(gl_name))
|
118 | }
|
119 |
|
120 | /// Creates a framebuffer from an external GL name.
|
121 | ///
|
122 | /// This can be useful when a framebuffer is created outside of glow (e.g: via `surfman` or another
|
123 | /// crate that supports sharing of buffers between GL contexts), but glow needs to set it as a target.
|
124 | #[deprecated = "Use the NativeFramebuffer constructor instead" ]
|
125 | pub unsafe fn create_framebuffer_from_gl_name(gl_name: native_gl::GLuint) -> NativeFramebuffer {
|
126 | NativeFramebuffer(non_zero_gl_name(gl_name))
|
127 | }
|
128 | }
|
129 |
|
130 | impl std::fmt::Debug for Context {
|
131 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
132 | write!(f, "Native_GL_Context" )
|
133 | }
|
134 | }
|
135 |
|
136 | fn non_zero_gl_name(value: native_gl::GLuint) -> NonZeroU32 {
|
137 | NonZeroU32::new(value as u32).expect(msg:"expected non-zero GL name" )
|
138 | }
|
139 |
|
140 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
141 | pub struct NativeShader(pub NonZeroU32);
|
142 |
|
143 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
144 | pub struct NativeProgram(pub NonZeroU32);
|
145 |
|
146 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
147 | pub struct NativeBuffer(pub NonZeroU32);
|
148 |
|
149 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
150 | pub struct NativeVertexArray(pub NonZeroU32);
|
151 |
|
152 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
153 | pub struct NativeTexture(pub NonZeroU32);
|
154 |
|
155 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
156 | pub struct NativeSampler(pub NonZeroU32);
|
157 |
|
158 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
159 | pub struct NativeFence(pub native_gl::GLsync);
|
160 |
|
161 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
162 | pub struct NativeFramebuffer(pub NonZeroU32);
|
163 |
|
164 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
165 | pub struct NativeRenderbuffer(pub NonZeroU32);
|
166 |
|
167 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
168 | pub struct NativeQuery(pub NonZeroU32);
|
169 |
|
170 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
171 | pub struct NativeUniformLocation(pub native_gl::GLuint);
|
172 |
|
173 | #[derive (Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
|
174 | pub struct NativeTransformFeedback(pub NonZeroU32);
|
175 |
|
176 | impl HasContext for Context {
|
177 | type Shader = NativeShader;
|
178 | type Program = NativeProgram;
|
179 | type Buffer = NativeBuffer;
|
180 | type VertexArray = NativeVertexArray;
|
181 | type Texture = NativeTexture;
|
182 | type Sampler = NativeSampler;
|
183 | type Fence = NativeFence;
|
184 | type Framebuffer = NativeFramebuffer;
|
185 | type Renderbuffer = NativeRenderbuffer;
|
186 | type Query = NativeQuery;
|
187 | type UniformLocation = NativeUniformLocation;
|
188 | type TransformFeedback = NativeTransformFeedback;
|
189 |
|
190 | fn supported_extensions(&self) -> &HashSet<String> {
|
191 | &self.extensions
|
192 | }
|
193 |
|
194 | fn supports_debug(&self) -> bool {
|
195 | if self.extensions.contains("GL_KHR_debug" ) {
|
196 | // Supports extension (either GL or GL ES)
|
197 | true
|
198 | } else if self.version.is_embedded {
|
199 | // GL ES >= 3.2
|
200 | self.version.major == 3 && self.version.minor >= 2
|
201 | } else {
|
202 | // GL >= 4.3
|
203 | self.version.major == 4 && self.version.minor >= 3
|
204 | }
|
205 | }
|
206 |
|
207 | fn version(&self) -> &Version {
|
208 | &self.version
|
209 | }
|
210 |
|
211 | unsafe fn create_framebuffer(&self) -> Result<Self::Framebuffer, String> {
|
212 | let gl = &self.raw;
|
213 | let mut name = 0;
|
214 | gl.GenFramebuffers(1, &mut name);
|
215 | Ok(NativeFramebuffer(non_zero_gl_name(name)))
|
216 | }
|
217 |
|
218 | unsafe fn is_framebuffer(&self, framebuffer: Self::Framebuffer) -> bool {
|
219 | let gl = &self.raw;
|
220 | gl.IsFramebuffer(framebuffer.0.get()) != 0
|
221 | }
|
222 |
|
223 | unsafe fn create_query(&self) -> Result<Self::Query, String> {
|
224 | let gl = &self.raw;
|
225 | let mut name = 0;
|
226 | gl.GenQueries(1, &mut name);
|
227 | Ok(NativeQuery(non_zero_gl_name(name)))
|
228 | }
|
229 |
|
230 | unsafe fn create_renderbuffer(&self) -> Result<Self::Renderbuffer, String> {
|
231 | let gl = &self.raw;
|
232 | let mut name = 0;
|
233 | gl.GenRenderbuffers(1, &mut name);
|
234 | Ok(NativeRenderbuffer(non_zero_gl_name(name)))
|
235 | }
|
236 |
|
237 | unsafe fn is_renderbuffer(&self, renderbuffer: Self::Renderbuffer) -> bool {
|
238 | let gl = &self.raw;
|
239 | gl.IsRenderbuffer(renderbuffer.0.get()) != 0
|
240 | }
|
241 |
|
242 | unsafe fn create_sampler(&self) -> Result<Self::Sampler, String> {
|
243 | let gl = &self.raw;
|
244 | let mut name = 0;
|
245 | gl.GenSamplers(1, &mut name);
|
246 | Ok(NativeSampler(non_zero_gl_name(name)))
|
247 | }
|
248 |
|
249 | unsafe fn create_shader(&self, shader_type: u32) -> Result<Self::Shader, String> {
|
250 | let gl = &self.raw;
|
251 | Ok(NativeShader(non_zero_gl_name(
|
252 | gl.CreateShader(shader_type as u32),
|
253 | )))
|
254 | }
|
255 |
|
256 | unsafe fn is_shader(&self, shader: Self::Shader) -> bool {
|
257 | let gl = &self.raw;
|
258 | gl.IsShader(shader.0.get()) != 0
|
259 | }
|
260 |
|
261 | unsafe fn create_texture(&self) -> Result<Self::Texture, String> {
|
262 | let gl = &self.raw;
|
263 | let mut name = 0;
|
264 | gl.GenTextures(1, &mut name);
|
265 | Ok(NativeTexture(non_zero_gl_name(name)))
|
266 | }
|
267 |
|
268 | unsafe fn create_named_texture(&self, target: u32) -> Result<Self::Texture, String> {
|
269 | let gl = &self.raw;
|
270 | let mut name = 0;
|
271 | gl.CreateTextures(target, 1, &mut name);
|
272 | Ok(NativeTexture(non_zero_gl_name(name)))
|
273 | }
|
274 |
|
275 | unsafe fn is_texture(&self, texture: Self::Texture) -> bool {
|
276 | let gl = &self.raw;
|
277 | gl.IsTexture(texture.0.get()) != 0
|
278 | }
|
279 |
|
280 | unsafe fn delete_shader(&self, shader: Self::Shader) {
|
281 | let gl = &self.raw;
|
282 | gl.DeleteShader(shader.0.get());
|
283 | }
|
284 |
|
285 | unsafe fn shader_source(&self, shader: Self::Shader, source: &str) {
|
286 | let gl = &self.raw;
|
287 | gl.ShaderSource(
|
288 | shader.0.get(),
|
289 | 1,
|
290 | &(source.as_ptr() as *const native_gl::GLchar),
|
291 | &(source.len() as native_gl::GLint),
|
292 | );
|
293 | }
|
294 |
|
295 | unsafe fn compile_shader(&self, shader: Self::Shader) {
|
296 | let gl = &self.raw;
|
297 | gl.CompileShader(shader.0.get());
|
298 | }
|
299 |
|
300 | unsafe fn get_shader_completion_status(&self, shader: Self::Shader) -> bool {
|
301 | let gl = &self.raw;
|
302 | let mut status = 0;
|
303 | gl.GetShaderiv(shader.0.get(), COMPLETION_STATUS, &mut status);
|
304 | 1 == status
|
305 | }
|
306 |
|
307 | unsafe fn get_shader_compile_status(&self, shader: Self::Shader) -> bool {
|
308 | let gl = &self.raw;
|
309 | let mut status = 0;
|
310 | gl.GetShaderiv(shader.0.get(), COMPILE_STATUS, &mut status);
|
311 | 1 == status
|
312 | }
|
313 |
|
314 | unsafe fn get_shader_info_log(&self, shader: Self::Shader) -> String {
|
315 | let gl = &self.raw;
|
316 | let mut length = 0;
|
317 | gl.GetShaderiv(shader.0.get(), INFO_LOG_LENGTH, &mut length);
|
318 | if length > 0 {
|
319 | let mut log = String::with_capacity(length as usize);
|
320 | log.extend(std::iter::repeat(' \0' ).take(length as usize));
|
321 | gl.GetShaderInfoLog(
|
322 | shader.0.get(),
|
323 | length,
|
324 | &mut length,
|
325 | (&log[..]).as_ptr() as *mut native_gl::GLchar,
|
326 | );
|
327 | log.truncate(length as usize);
|
328 | log
|
329 | } else {
|
330 | String::from("" )
|
331 | }
|
332 | }
|
333 |
|
334 | unsafe fn get_tex_image(
|
335 | &self,
|
336 | target: u32,
|
337 | level: i32,
|
338 | format: u32,
|
339 | ty: u32,
|
340 | pixels: PixelPackData,
|
341 | ) {
|
342 | let gl = &self.raw;
|
343 | gl.GetTexImage(
|
344 | target,
|
345 | level,
|
346 | format,
|
347 | ty,
|
348 | match pixels {
|
349 | PixelPackData::BufferOffset(offset) => offset as *mut std::ffi::c_void,
|
350 | PixelPackData::Slice(data) => data.as_mut_ptr() as *mut std::ffi::c_void,
|
351 | },
|
352 | );
|
353 | }
|
354 |
|
355 | unsafe fn create_program(&self) -> Result<Self::Program, String> {
|
356 | let gl = &self.raw;
|
357 | Ok(NativeProgram(non_zero_gl_name(gl.CreateProgram())))
|
358 | }
|
359 |
|
360 | unsafe fn is_program(&self, program: Self::Program) -> bool {
|
361 | let gl = &self.raw;
|
362 | gl.IsProgram(program.0.get()) != 0
|
363 | }
|
364 |
|
365 | unsafe fn delete_program(&self, program: Self::Program) {
|
366 | let gl = &self.raw;
|
367 | gl.DeleteProgram(program.0.get());
|
368 | }
|
369 |
|
370 | unsafe fn attach_shader(&self, program: Self::Program, shader: Self::Shader) {
|
371 | let gl = &self.raw;
|
372 | gl.AttachShader(program.0.get(), shader.0.get());
|
373 | }
|
374 |
|
375 | unsafe fn detach_shader(&self, program: Self::Program, shader: Self::Shader) {
|
376 | let gl = &self.raw;
|
377 | gl.DetachShader(program.0.get(), shader.0.get());
|
378 | }
|
379 |
|
380 | unsafe fn link_program(&self, program: Self::Program) {
|
381 | let gl = &self.raw;
|
382 | gl.LinkProgram(program.0.get());
|
383 | }
|
384 |
|
385 | unsafe fn get_program_completion_status(&self, program: Self::Program) -> bool {
|
386 | let gl = &self.raw;
|
387 | let mut status = 0;
|
388 | gl.GetProgramiv(program.0.get(), COMPLETION_STATUS, &mut status);
|
389 | 1 == status
|
390 | }
|
391 |
|
392 | unsafe fn get_program_link_status(&self, program: Self::Program) -> bool {
|
393 | let gl = &self.raw;
|
394 | let mut status = 0;
|
395 | gl.GetProgramiv(program.0.get(), LINK_STATUS, &mut status);
|
396 | 1 == status
|
397 | }
|
398 |
|
399 | unsafe fn get_program_info_log(&self, program: Self::Program) -> String {
|
400 | let gl = &self.raw;
|
401 | let mut length = 0;
|
402 | gl.GetProgramiv(program.0.get(), INFO_LOG_LENGTH, &mut length);
|
403 | if length > 0 {
|
404 | let mut log = String::with_capacity(length as usize);
|
405 | log.extend(std::iter::repeat(' \0' ).take(length as usize));
|
406 | gl.GetProgramInfoLog(
|
407 | program.0.get(),
|
408 | length,
|
409 | &mut length,
|
410 | (&log[..]).as_ptr() as *mut native_gl::GLchar,
|
411 | );
|
412 | log.truncate(length as usize);
|
413 | log
|
414 | } else {
|
415 | String::from("" )
|
416 | }
|
417 | }
|
418 |
|
419 | unsafe fn get_program_resource_i32(
|
420 | &self,
|
421 | program: Self::Program,
|
422 | interface: u32,
|
423 | index: u32,
|
424 | properties: &[u32],
|
425 | ) -> Vec<i32> {
|
426 | let gl = &self.raw;
|
427 | // query the number of output parameters first
|
428 | let mut length = 0i32;
|
429 | gl.GetProgramResourceiv(
|
430 | program.0.get(),
|
431 | interface,
|
432 | index,
|
433 | properties.len() as i32,
|
434 | properties.as_ptr(),
|
435 | 0,
|
436 | &mut length,
|
437 | ptr::null_mut(),
|
438 | );
|
439 | // get the parameter values
|
440 | let mut params = vec![0i32; length as usize];
|
441 | gl.GetProgramResourceiv(
|
442 | program.0.get(),
|
443 | interface,
|
444 | index,
|
445 | properties.len() as i32,
|
446 | properties.as_ptr(),
|
447 | length,
|
448 | &mut length,
|
449 | params.as_mut_ptr(),
|
450 | );
|
451 | params
|
452 | }
|
453 |
|
454 | unsafe fn get_active_uniforms(&self, program: Self::Program) -> u32 {
|
455 | let gl = &self.raw;
|
456 | let mut count = 0;
|
457 | gl.GetProgramiv(program.0.get(), ACTIVE_UNIFORMS, &mut count);
|
458 | count as u32
|
459 | }
|
460 |
|
461 | unsafe fn get_active_uniform(
|
462 | &self,
|
463 | program: Self::Program,
|
464 | index: u32,
|
465 | ) -> Option<ActiveUniform> {
|
466 | let gl = &self.raw;
|
467 | let mut uniform_max_size = 0;
|
468 | gl.GetProgramiv(
|
469 | program.0.get(),
|
470 | ACTIVE_UNIFORM_MAX_LENGTH,
|
471 | &mut uniform_max_size,
|
472 | );
|
473 |
|
474 | let mut name = String::with_capacity(uniform_max_size as usize);
|
475 | name.extend(std::iter::repeat(' \0' ).take(uniform_max_size as usize));
|
476 | let mut length = 0;
|
477 | let mut size = 0;
|
478 | let mut utype = 0;
|
479 | gl.GetActiveUniform(
|
480 | program.0.get(),
|
481 | index,
|
482 | uniform_max_size,
|
483 | &mut length,
|
484 | &mut size,
|
485 | &mut utype,
|
486 | name.as_ptr() as *mut native_gl::GLchar,
|
487 | );
|
488 | name.truncate(length as usize);
|
489 |
|
490 | Some(ActiveUniform { size, utype, name })
|
491 | }
|
492 |
|
493 | unsafe fn use_program(&self, program: Option<Self::Program>) {
|
494 | let gl = &self.raw;
|
495 | gl.UseProgram(program.map(|p| p.0.get()).unwrap_or(0));
|
496 | }
|
497 |
|
498 | unsafe fn create_buffer(&self) -> Result<Self::Buffer, String> {
|
499 | let gl = &self.raw;
|
500 | let mut buffer = 0;
|
501 | gl.GenBuffers(1, &mut buffer);
|
502 | Ok(NativeBuffer(non_zero_gl_name(buffer)))
|
503 | }
|
504 |
|
505 | unsafe fn create_named_buffer(&self) -> Result<Self::Buffer, String> {
|
506 | let gl = &self.raw;
|
507 | let mut buffer = 0;
|
508 | gl.CreateBuffers(1, &mut buffer);
|
509 | Ok(NativeBuffer(non_zero_gl_name(buffer)))
|
510 | }
|
511 |
|
512 | unsafe fn is_buffer(&self, buffer: Self::Buffer) -> bool {
|
513 | let gl = &self.raw;
|
514 | gl.IsBuffer(buffer.0.get()) != 0
|
515 | }
|
516 |
|
517 | unsafe fn bind_buffer(&self, target: u32, buffer: Option<Self::Buffer>) {
|
518 | let gl = &self.raw;
|
519 | gl.BindBuffer(target, buffer.map(|b| b.0.get()).unwrap_or(0));
|
520 | }
|
521 |
|
522 | unsafe fn bind_buffer_base(&self, target: u32, index: u32, buffer: Option<Self::Buffer>) {
|
523 | let gl = &self.raw;
|
524 | gl.BindBufferBase(target, index, buffer.map(|b| b.0.get()).unwrap_or(0));
|
525 | }
|
526 |
|
527 | unsafe fn bind_buffer_range(
|
528 | &self,
|
529 | target: u32,
|
530 | index: u32,
|
531 | buffer: Option<Self::Buffer>,
|
532 | offset: i32,
|
533 | size: i32,
|
534 | ) {
|
535 | let gl = &self.raw;
|
536 | gl.BindBufferRange(
|
537 | target,
|
538 | index,
|
539 | buffer.map(|b| b.0.get()).unwrap_or(0),
|
540 | offset as isize,
|
541 | size as isize,
|
542 | );
|
543 | }
|
544 |
|
545 | unsafe fn bind_vertex_buffer(
|
546 | &self,
|
547 | binding_index: u32,
|
548 | buffer: Option<Buffer>,
|
549 | offset: i32,
|
550 | stride: i32,
|
551 | ) {
|
552 | let gl = &self.raw;
|
553 | gl.BindVertexBuffer(
|
554 | binding_index,
|
555 | buffer.map(|b| b.0.get()).unwrap_or(0),
|
556 | offset as isize,
|
557 | stride,
|
558 | );
|
559 | }
|
560 |
|
561 | unsafe fn bind_framebuffer(&self, target: u32, framebuffer: Option<Self::Framebuffer>) {
|
562 | let gl = &self.raw;
|
563 | gl.BindFramebuffer(target, framebuffer.map(|fb| fb.0.get()).unwrap_or(0));
|
564 | }
|
565 |
|
566 | unsafe fn bind_renderbuffer(&self, target: u32, renderbuffer: Option<Self::Renderbuffer>) {
|
567 | let gl = &self.raw;
|
568 | gl.BindRenderbuffer(target, renderbuffer.map(|rb| rb.0.get()).unwrap_or(0));
|
569 | }
|
570 |
|
571 | unsafe fn blit_framebuffer(
|
572 | &self,
|
573 | src_x0: i32,
|
574 | src_y0: i32,
|
575 | src_x1: i32,
|
576 | src_y1: i32,
|
577 | dst_x0: i32,
|
578 | dst_y0: i32,
|
579 | dst_x1: i32,
|
580 | dst_y1: i32,
|
581 | mask: u32,
|
582 | filter: u32,
|
583 | ) {
|
584 | let gl = &self.raw;
|
585 | gl.BlitFramebuffer(
|
586 | src_x0, src_y0, src_x1, src_y1, dst_x0, dst_y0, dst_x1, dst_y1, mask, filter,
|
587 | );
|
588 | }
|
589 |
|
590 | unsafe fn create_vertex_array(&self) -> Result<Self::VertexArray, String> {
|
591 | let gl = &self.raw;
|
592 | let mut vertex_array = 0;
|
593 | gl.GenVertexArrays(1, &mut vertex_array);
|
594 | Ok(NativeVertexArray(non_zero_gl_name(vertex_array)))
|
595 | }
|
596 |
|
597 | unsafe fn delete_vertex_array(&self, vertex_array: Self::VertexArray) {
|
598 | let gl = &self.raw;
|
599 | gl.DeleteVertexArrays(1, &vertex_array.0.get());
|
600 | }
|
601 |
|
602 | unsafe fn bind_vertex_array(&self, vertex_array: Option<Self::VertexArray>) {
|
603 | let gl = &self.raw;
|
604 | gl.BindVertexArray(vertex_array.map(|va| va.0.get()).unwrap_or(0));
|
605 | }
|
606 |
|
607 | unsafe fn clear_color(&self, red: f32, green: f32, blue: f32, alpha: f32) {
|
608 | let gl = &self.raw;
|
609 | gl.ClearColor(red, green, blue, alpha);
|
610 | }
|
611 |
|
612 | unsafe fn supports_f64_precision() -> bool {
|
613 | // TODO: Handle OpenGL ES
|
614 | true
|
615 | }
|
616 |
|
617 | unsafe fn clear_depth_f64(&self, depth: f64) {
|
618 | let gl = &self.raw;
|
619 | gl.ClearDepth(depth);
|
620 | }
|
621 |
|
622 | unsafe fn clear_depth_f32(&self, depth: f32) {
|
623 | let gl = &self.raw;
|
624 | gl.ClearDepthf(depth);
|
625 | }
|
626 |
|
627 | unsafe fn clear_stencil(&self, stencil: i32) {
|
628 | let gl = &self.raw;
|
629 | gl.ClearStencil(stencil);
|
630 | }
|
631 |
|
632 | unsafe fn clear(&self, mask: u32) {
|
633 | let gl = &self.raw;
|
634 | gl.Clear(mask);
|
635 | }
|
636 |
|
637 | unsafe fn patch_parameter_i32(&self, parameter: u32, value: i32) {
|
638 | let gl = &self.raw;
|
639 | gl.PatchParameteri(parameter, value);
|
640 | }
|
641 |
|
642 | unsafe fn pixel_store_i32(&self, parameter: u32, value: i32) {
|
643 | let gl = &self.raw;
|
644 | gl.PixelStorei(parameter, value);
|
645 | }
|
646 |
|
647 | unsafe fn pixel_store_bool(&self, parameter: u32, value: bool) {
|
648 | let gl = &self.raw;
|
649 | gl.PixelStorei(parameter, value as i32);
|
650 | }
|
651 |
|
652 | unsafe fn bind_frag_data_location(
|
653 | &self,
|
654 | program: Self::Program,
|
655 | color_number: u32,
|
656 | name: &str,
|
657 | ) {
|
658 | let gl = &self.raw;
|
659 | gl.BindFragDataLocation(
|
660 | program.0.get(),
|
661 | color_number,
|
662 | name.as_ptr() as *const native_gl::GLchar,
|
663 | );
|
664 | }
|
665 |
|
666 | unsafe fn buffer_data_size(&self, target: u32, size: i32, usage: u32) {
|
667 | let gl = &self.raw;
|
668 | gl.BufferData(target, size as isize, std::ptr::null(), usage);
|
669 | }
|
670 |
|
671 | unsafe fn buffer_data_u8_slice(&self, target: u32, data: &[u8], usage: u32) {
|
672 | let gl = &self.raw;
|
673 | gl.BufferData(
|
674 | target,
|
675 | data.len() as isize,
|
676 | data.as_ptr() as *const std::ffi::c_void,
|
677 | usage,
|
678 | );
|
679 | }
|
680 |
|
681 | unsafe fn named_buffer_data_u8_slice(&self, buffer: Self::Buffer, data: &[u8], usage: u32) {
|
682 | let gl = &self.raw;
|
683 | gl.NamedBufferData(
|
684 | buffer.0.get(),
|
685 | data.len() as isize,
|
686 | data.as_ptr() as *const std::ffi::c_void,
|
687 | usage,
|
688 | );
|
689 | }
|
690 |
|
691 | unsafe fn buffer_sub_data_u8_slice(&self, target: u32, offset: i32, src_data: &[u8]) {
|
692 | let gl = &self.raw;
|
693 | gl.BufferSubData(
|
694 | target,
|
695 | offset as isize,
|
696 | src_data.len() as isize,
|
697 | src_data.as_ptr() as *const std::ffi::c_void,
|
698 | );
|
699 | }
|
700 |
|
701 | unsafe fn get_buffer_sub_data(&self, target: u32, offset: i32, dst_data: &mut [u8]) {
|
702 | let gl = &self.raw;
|
703 | gl.GetBufferSubData(
|
704 | target,
|
705 | offset as isize,
|
706 | dst_data.len() as isize,
|
707 | dst_data.as_mut_ptr() as *mut std::ffi::c_void,
|
708 | );
|
709 | }
|
710 |
|
711 | unsafe fn buffer_storage(&self, target: u32, size: i32, data: Option<&[u8]>, flags: u32) {
|
712 | let gl = &self.raw;
|
713 | let size = size as isize;
|
714 | let data = data.map(|p| p.as_ptr()).unwrap_or(std::ptr::null()) as *const std::ffi::c_void;
|
715 | if gl.BufferStorage_is_loaded() {
|
716 | gl.BufferStorage(target, size, data, flags);
|
717 | } else {
|
718 | gl.BufferStorageEXT(target, size, data, flags);
|
719 | }
|
720 | }
|
721 |
|
722 | unsafe fn check_framebuffer_status(&self, target: u32) -> u32 {
|
723 | let gl = &self.raw;
|
724 | gl.CheckFramebufferStatus(target)
|
725 | }
|
726 |
|
727 | unsafe fn clear_buffer_i32_slice(&self, target: u32, draw_buffer: u32, values: &[i32]) {
|
728 | let gl = &self.raw;
|
729 | gl.ClearBufferiv(target, draw_buffer as i32, values.as_ptr());
|
730 | }
|
731 |
|
732 | unsafe fn clear_buffer_u32_slice(&self, target: u32, draw_buffer: u32, values: &[u32]) {
|
733 | let gl = &self.raw;
|
734 | gl.ClearBufferuiv(target, draw_buffer as i32, values.as_ptr());
|
735 | }
|
736 |
|
737 | unsafe fn clear_buffer_f32_slice(&self, target: u32, draw_buffer: u32, values: &[f32]) {
|
738 | let gl = &self.raw;
|
739 | gl.ClearBufferfv(target, draw_buffer as i32, values.as_ptr());
|
740 | }
|
741 |
|
742 | unsafe fn clear_buffer_depth_stencil(
|
743 | &self,
|
744 | target: u32,
|
745 | draw_buffer: u32,
|
746 | depth: f32,
|
747 | stencil: i32,
|
748 | ) {
|
749 | let gl = &self.raw;
|
750 | gl.ClearBufferfi(target, draw_buffer as i32, depth, stencil);
|
751 | }
|
752 |
|
753 | unsafe fn client_wait_sync(&self, fence: Self::Fence, flags: u32, timeout: i32) -> u32 {
|
754 | let gl = &self.raw;
|
755 | gl.ClientWaitSync(fence.0, flags, timeout as u64)
|
756 | }
|
757 |
|
758 | unsafe fn wait_sync(&self, fence: Self::Fence, flags: u32, timeout: u64) {
|
759 | let gl = &self.raw;
|
760 | gl.WaitSync(fence.0, flags, timeout)
|
761 | }
|
762 |
|
763 | unsafe fn copy_buffer_sub_data(
|
764 | &self,
|
765 | src_target: u32,
|
766 | dst_target: u32,
|
767 | src_offset: i32,
|
768 | dst_offset: i32,
|
769 | size: i32,
|
770 | ) {
|
771 | let gl = &self.raw;
|
772 | gl.CopyBufferSubData(
|
773 | src_target,
|
774 | dst_target,
|
775 | src_offset as isize,
|
776 | dst_offset as isize,
|
777 | size as isize,
|
778 | );
|
779 | }
|
780 |
|
781 | unsafe fn copy_image_sub_data(
|
782 | &self,
|
783 | src_name: Self::Texture,
|
784 | src_target: u32,
|
785 | src_level: i32,
|
786 | src_x: i32,
|
787 | src_y: i32,
|
788 | src_z: i32,
|
789 | dst_name: Self::Texture,
|
790 | dst_target: u32,
|
791 | dst_level: i32,
|
792 | dst_x: i32,
|
793 | dst_y: i32,
|
794 | dst_z: i32,
|
795 | src_width: i32,
|
796 | src_height: i32,
|
797 | src_depth: i32,
|
798 | ) {
|
799 | let gl = &self.raw;
|
800 | gl.CopyImageSubData(
|
801 | src_name.0.get(),
|
802 | src_target,
|
803 | src_level,
|
804 | src_x,
|
805 | src_y,
|
806 | src_z,
|
807 | dst_name.0.get(),
|
808 | dst_target,
|
809 | dst_level,
|
810 | dst_x,
|
811 | dst_y,
|
812 | dst_z,
|
813 | src_width,
|
814 | src_height,
|
815 | src_depth,
|
816 | );
|
817 | }
|
818 |
|
819 | unsafe fn copy_tex_image_2d(
|
820 | &self,
|
821 | target: u32,
|
822 | level: i32,
|
823 | internal_format: u32,
|
824 | x: i32,
|
825 | y: i32,
|
826 | width: i32,
|
827 | height: i32,
|
828 | border: i32,
|
829 | ) {
|
830 | let gl = &self.raw;
|
831 | gl.CopyTexImage2D(target, level, internal_format, x, y, width, height, border);
|
832 | }
|
833 |
|
834 | unsafe fn copy_tex_sub_image_2d(
|
835 | &self,
|
836 | target: u32,
|
837 | level: i32,
|
838 | x_offset: i32,
|
839 | y_offset: i32,
|
840 | x: i32,
|
841 | y: i32,
|
842 | width: i32,
|
843 | height: i32,
|
844 | ) {
|
845 | let gl = &self.raw;
|
846 | gl.CopyTexSubImage2D(target, level, x_offset, y_offset, x, y, width, height);
|
847 | }
|
848 |
|
849 | unsafe fn copy_tex_sub_image_3d(
|
850 | &self,
|
851 | target: u32,
|
852 | level: i32,
|
853 | x_offset: i32,
|
854 | y_offset: i32,
|
855 | z_offset: i32,
|
856 | x: i32,
|
857 | y: i32,
|
858 | width: i32,
|
859 | height: i32,
|
860 | ) {
|
861 | let gl = &self.raw;
|
862 | gl.CopyTexSubImage3D(
|
863 | target, level, x_offset, y_offset, z_offset, x, y, width, height,
|
864 | );
|
865 | }
|
866 |
|
867 | unsafe fn delete_buffer(&self, buffer: Self::Buffer) {
|
868 | let gl = &self.raw;
|
869 | gl.DeleteBuffers(1, &buffer.0.get());
|
870 | }
|
871 |
|
872 | unsafe fn delete_framebuffer(&self, framebuffer: Self::Framebuffer) {
|
873 | let gl = &self.raw;
|
874 | gl.DeleteFramebuffers(1, &framebuffer.0.get());
|
875 | }
|
876 |
|
877 | unsafe fn delete_query(&self, query: Self::Query) {
|
878 | let gl = &self.raw;
|
879 | gl.DeleteQueries(1, &query.0.get());
|
880 | }
|
881 |
|
882 | unsafe fn delete_renderbuffer(&self, renderbuffer: Self::Renderbuffer) {
|
883 | let gl = &self.raw;
|
884 | gl.DeleteRenderbuffers(1, &renderbuffer.0.get());
|
885 | }
|
886 |
|
887 | unsafe fn delete_sampler(&self, sampler: Self::Sampler) {
|
888 | let gl = &self.raw;
|
889 | gl.DeleteSamplers(1, &sampler.0.get());
|
890 | }
|
891 |
|
892 | unsafe fn delete_sync(&self, fence: Self::Fence) {
|
893 | let gl = &self.raw;
|
894 | gl.DeleteSync(fence.0);
|
895 | }
|
896 |
|
897 | unsafe fn delete_texture(&self, texture: Self::Texture) {
|
898 | let gl = &self.raw;
|
899 | gl.DeleteTextures(1, &texture.0.get());
|
900 | }
|
901 |
|
902 | unsafe fn disable(&self, parameter: u32) {
|
903 | let gl = &self.raw;
|
904 | gl.Disable(parameter);
|
905 | }
|
906 |
|
907 | unsafe fn disable_draw_buffer(&self, parameter: u32, draw_buffer: u32) {
|
908 | let gl = &self.raw;
|
909 | gl.Disablei(parameter, draw_buffer);
|
910 | }
|
911 |
|
912 | unsafe fn disable_vertex_attrib_array(&self, index: u32) {
|
913 | let gl = &self.raw;
|
914 | gl.DisableVertexAttribArray(index);
|
915 | }
|
916 |
|
917 | unsafe fn dispatch_compute(&self, groups_x: u32, groups_y: u32, groups_z: u32) {
|
918 | let gl = &self.raw;
|
919 | gl.DispatchCompute(groups_x, groups_y, groups_z);
|
920 | }
|
921 |
|
922 | unsafe fn dispatch_compute_indirect(&self, offset: i32) {
|
923 | let gl = &self.raw;
|
924 | gl.DispatchComputeIndirect(offset as isize);
|
925 | }
|
926 |
|
927 | unsafe fn draw_arrays(&self, mode: u32, first: i32, count: i32) {
|
928 | let gl = &self.raw;
|
929 | gl.DrawArrays(mode as u32, first, count);
|
930 | }
|
931 |
|
932 | unsafe fn draw_arrays_instanced(&self, mode: u32, first: i32, count: i32, instance_count: i32) {
|
933 | let gl = &self.raw;
|
934 | gl.DrawArraysInstanced(mode as u32, first, count, instance_count);
|
935 | }
|
936 |
|
937 | unsafe fn draw_arrays_instanced_base_instance(
|
938 | &self,
|
939 | mode: u32,
|
940 | first: i32,
|
941 | count: i32,
|
942 | instance_count: i32,
|
943 | base_instance: u32,
|
944 | ) {
|
945 | let gl = &self.raw;
|
946 | gl.DrawArraysInstancedBaseInstance(
|
947 | mode as u32,
|
948 | first,
|
949 | count,
|
950 | instance_count,
|
951 | base_instance,
|
952 | );
|
953 | }
|
954 |
|
955 | unsafe fn draw_arrays_indirect_offset(&self, mode: u32, offset: i32) {
|
956 | let gl = &self.raw;
|
957 | gl.DrawArraysIndirect(mode, offset as *const std::ffi::c_void);
|
958 | }
|
959 |
|
960 | unsafe fn draw_buffer(&self, draw_buffer: u32) {
|
961 | let gl = &self.raw;
|
962 | gl.DrawBuffer(draw_buffer);
|
963 | }
|
964 |
|
965 | unsafe fn draw_buffers(&self, buffers: &[u32]) {
|
966 | let gl = &self.raw;
|
967 | gl.DrawBuffers(buffers.len() as i32, buffers.as_ptr());
|
968 | }
|
969 |
|
970 | unsafe fn draw_elements(&self, mode: u32, count: i32, element_type: u32, offset: i32) {
|
971 | let gl = &self.raw;
|
972 | gl.DrawElements(
|
973 | mode as u32,
|
974 | count,
|
975 | element_type as u32,
|
976 | offset as *const std::ffi::c_void,
|
977 | );
|
978 | }
|
979 |
|
980 | unsafe fn draw_elements_base_vertex(
|
981 | &self,
|
982 | mode: u32,
|
983 | count: i32,
|
984 | element_type: u32,
|
985 | offset: i32,
|
986 | base_vertex: i32,
|
987 | ) {
|
988 | let gl = &self.raw;
|
989 | gl.DrawElementsBaseVertex(
|
990 | mode as u32,
|
991 | count,
|
992 | element_type as u32,
|
993 | offset as *const std::ffi::c_void,
|
994 | base_vertex,
|
995 | );
|
996 | }
|
997 |
|
998 | unsafe fn draw_elements_instanced(
|
999 | &self,
|
1000 | mode: u32,
|
1001 | count: i32,
|
1002 | element_type: u32,
|
1003 | offset: i32,
|
1004 | instance_count: i32,
|
1005 | ) {
|
1006 | let gl = &self.raw;
|
1007 | gl.DrawElementsInstanced(
|
1008 | mode as u32,
|
1009 | count,
|
1010 | element_type as u32,
|
1011 | offset as *const std::ffi::c_void,
|
1012 | instance_count,
|
1013 | );
|
1014 | }
|
1015 |
|
1016 | unsafe fn draw_elements_instanced_base_vertex(
|
1017 | &self,
|
1018 | mode: u32,
|
1019 | count: i32,
|
1020 | element_type: u32,
|
1021 | offset: i32,
|
1022 | instance_count: i32,
|
1023 | base_vertex: i32,
|
1024 | ) {
|
1025 | let gl = &self.raw;
|
1026 | gl.DrawElementsInstancedBaseVertex(
|
1027 | mode as u32,
|
1028 | count,
|
1029 | element_type as u32,
|
1030 | offset as *const std::ffi::c_void,
|
1031 | instance_count,
|
1032 | base_vertex,
|
1033 | );
|
1034 | }
|
1035 |
|
1036 | unsafe fn draw_elements_instanced_base_vertex_base_instance(
|
1037 | &self,
|
1038 | mode: u32,
|
1039 | count: i32,
|
1040 | element_type: u32,
|
1041 | offset: i32,
|
1042 | instance_count: i32,
|
1043 | base_vertex: i32,
|
1044 | base_instance: u32,
|
1045 | ) {
|
1046 | let gl = &self.raw;
|
1047 | gl.DrawElementsInstancedBaseVertexBaseInstance(
|
1048 | mode as u32,
|
1049 | count,
|
1050 | element_type as u32,
|
1051 | offset as *const std::ffi::c_void,
|
1052 | instance_count,
|
1053 | base_vertex,
|
1054 | base_instance,
|
1055 | );
|
1056 | }
|
1057 |
|
1058 | unsafe fn draw_elements_indirect_offset(&self, mode: u32, element_type: u32, offset: i32) {
|
1059 | let gl = &self.raw;
|
1060 | gl.DrawElementsIndirect(mode, element_type, offset as *const std::ffi::c_void);
|
1061 | }
|
1062 |
|
1063 | unsafe fn enable(&self, parameter: u32) {
|
1064 | let gl = &self.raw;
|
1065 | gl.Enable(parameter);
|
1066 | }
|
1067 |
|
1068 | unsafe fn is_enabled(&self, parameter: u32) -> bool {
|
1069 | let gl = &self.raw;
|
1070 | gl.IsEnabled(parameter) != 0
|
1071 | }
|
1072 |
|
1073 | unsafe fn enable_draw_buffer(&self, parameter: u32, draw_buffer: u32) {
|
1074 | let gl = &self.raw;
|
1075 | gl.Enablei(parameter, draw_buffer);
|
1076 | }
|
1077 |
|
1078 | unsafe fn enable_vertex_array_attrib(&self, vao: Self::VertexArray, index: u32) {
|
1079 | let gl = &self.raw;
|
1080 | gl.EnableVertexArrayAttrib(vao.0.get(), index);
|
1081 | }
|
1082 |
|
1083 | unsafe fn enable_vertex_attrib_array(&self, index: u32) {
|
1084 | let gl = &self.raw;
|
1085 | gl.EnableVertexAttribArray(index);
|
1086 | }
|
1087 |
|
1088 | unsafe fn flush(&self) {
|
1089 | let gl = &self.raw;
|
1090 | gl.Flush();
|
1091 | }
|
1092 |
|
1093 | unsafe fn framebuffer_renderbuffer(
|
1094 | &self,
|
1095 | target: u32,
|
1096 | attachment: u32,
|
1097 | renderbuffer_target: u32,
|
1098 | renderbuffer: Option<Self::Renderbuffer>,
|
1099 | ) {
|
1100 | let gl = &self.raw;
|
1101 | gl.FramebufferRenderbuffer(
|
1102 | target,
|
1103 | attachment,
|
1104 | renderbuffer_target,
|
1105 | renderbuffer.map(|rb| rb.0.get()).unwrap_or(0),
|
1106 | );
|
1107 | }
|
1108 |
|
1109 | unsafe fn framebuffer_texture(
|
1110 | &self,
|
1111 | target: u32,
|
1112 | attachment: u32,
|
1113 | texture: Option<Self::Texture>,
|
1114 | level: i32,
|
1115 | ) {
|
1116 | let gl = &self.raw;
|
1117 | gl.FramebufferTexture(
|
1118 | target,
|
1119 | attachment,
|
1120 | texture.map(|t| t.0.get()).unwrap_or(0),
|
1121 | level,
|
1122 | );
|
1123 | }
|
1124 |
|
1125 | unsafe fn framebuffer_texture_2d(
|
1126 | &self,
|
1127 | target: u32,
|
1128 | attachment: u32,
|
1129 | texture_target: u32,
|
1130 | texture: Option<Self::Texture>,
|
1131 | level: i32,
|
1132 | ) {
|
1133 | let gl = &self.raw;
|
1134 | gl.FramebufferTexture2D(
|
1135 | target,
|
1136 | attachment,
|
1137 | texture_target,
|
1138 | texture.map(|t| t.0.get()).unwrap_or(0),
|
1139 | level,
|
1140 | );
|
1141 | }
|
1142 |
|
1143 | unsafe fn framebuffer_texture_3d(
|
1144 | &self,
|
1145 | target: u32,
|
1146 | attachment: u32,
|
1147 | texture_target: u32,
|
1148 | texture: Option<Self::Texture>,
|
1149 | level: i32,
|
1150 | layer: i32,
|
1151 | ) {
|
1152 | let gl = &self.raw;
|
1153 | gl.FramebufferTexture3D(
|
1154 | target,
|
1155 | attachment,
|
1156 | texture_target,
|
1157 | texture.map(|t| t.0.get()).unwrap_or(0),
|
1158 | level,
|
1159 | layer,
|
1160 | );
|
1161 | }
|
1162 |
|
1163 | unsafe fn framebuffer_texture_layer(
|
1164 | &self,
|
1165 | target: u32,
|
1166 | attachment: u32,
|
1167 | texture: Option<Self::Texture>,
|
1168 | level: i32,
|
1169 | layer: i32,
|
1170 | ) {
|
1171 | let gl = &self.raw;
|
1172 | gl.FramebufferTextureLayer(
|
1173 | target,
|
1174 | attachment,
|
1175 | texture.map(|t| t.0.get()).unwrap_or(0),
|
1176 | level,
|
1177 | layer,
|
1178 | );
|
1179 | }
|
1180 |
|
1181 | unsafe fn front_face(&self, value: u32) {
|
1182 | let gl = &self.raw;
|
1183 | gl.FrontFace(value as u32);
|
1184 | }
|
1185 |
|
1186 | unsafe fn get_error(&self) -> u32 {
|
1187 | let gl = &self.raw;
|
1188 | gl.GetError()
|
1189 | }
|
1190 |
|
1191 | unsafe fn get_tex_parameter_i32(&self, target: u32, parameter: u32) -> i32 {
|
1192 | let gl = &self.raw;
|
1193 | let mut value = 0;
|
1194 | gl.GetTexParameteriv(target, parameter, &mut value);
|
1195 | value
|
1196 | }
|
1197 |
|
1198 | unsafe fn get_buffer_parameter_i32(&self, target: u32, parameter: u32) -> i32 {
|
1199 | let gl = &self.raw;
|
1200 | let mut value = 0;
|
1201 | gl.GetBufferParameteriv(target, parameter, &mut value);
|
1202 | value
|
1203 | }
|
1204 |
|
1205 | unsafe fn get_parameter_i32(&self, parameter: u32) -> i32 {
|
1206 | let gl = &self.raw;
|
1207 | let mut value = 0;
|
1208 | gl.GetIntegerv(parameter, &mut value);
|
1209 | value
|
1210 | }
|
1211 |
|
1212 | unsafe fn get_parameter_i32_slice(&self, parameter: u32, out: &mut [i32]) {
|
1213 | let gl = &self.raw;
|
1214 | gl.GetIntegerv(parameter, &mut out[0]);
|
1215 | }
|
1216 |
|
1217 | unsafe fn get_parameter_f32(&self, parameter: u32) -> f32 {
|
1218 | let gl = &self.raw;
|
1219 | let mut value: f32 = 0.0;
|
1220 | gl.GetFloatv(parameter, &mut value);
|
1221 | value
|
1222 | }
|
1223 |
|
1224 | unsafe fn get_parameter_f32_slice(&self, parameter: u32, out: &mut [f32]) {
|
1225 | let gl = &self.raw;
|
1226 | gl.GetFloatv(parameter, &mut out[0]);
|
1227 | }
|
1228 |
|
1229 | unsafe fn get_parameter_indexed_i32(&self, parameter: u32, index: u32) -> i32 {
|
1230 | let gl = &self.raw;
|
1231 | let mut value = 0;
|
1232 | gl.GetIntegeri_v(parameter, index, &mut value);
|
1233 | value
|
1234 | }
|
1235 |
|
1236 | unsafe fn get_parameter_indexed_string(&self, parameter: u32, index: u32) -> String {
|
1237 | let gl = &self.raw;
|
1238 | let raw_ptr = gl.GetStringi(parameter, index);
|
1239 | std::ffi::CStr::from_ptr(raw_ptr as *const native_gl::GLchar)
|
1240 | .to_str()
|
1241 | .unwrap()
|
1242 | .to_owned()
|
1243 | }
|
1244 |
|
1245 | unsafe fn get_parameter_string(&self, parameter: u32) -> String {
|
1246 | let gl = &self.raw;
|
1247 | let raw_ptr = gl.GetString(parameter);
|
1248 | if raw_ptr.is_null() {
|
1249 | panic!(
|
1250 | "Get parameter string 0x {:X} failed. Maybe your GL context version is too outdated." ,
|
1251 | parameter
|
1252 | )
|
1253 | }
|
1254 | std::ffi::CStr::from_ptr(raw_ptr as *const native_gl::GLchar)
|
1255 | .to_str()
|
1256 | .unwrap()
|
1257 | .to_owned()
|
1258 | }
|
1259 |
|
1260 | unsafe fn get_uniform_location(
|
1261 | &self,
|
1262 | program: Self::Program,
|
1263 | name: &str,
|
1264 | ) -> Option<Self::UniformLocation> {
|
1265 | let gl = &self.raw;
|
1266 | let name = CString::new(name).unwrap();
|
1267 | let uniform_location =
|
1268 | gl.GetUniformLocation(program.0.get(), name.as_ptr() as *const native_gl::GLchar);
|
1269 | if uniform_location < 0 {
|
1270 | None
|
1271 | } else {
|
1272 | Some(NativeUniformLocation(uniform_location as u32))
|
1273 | }
|
1274 | }
|
1275 |
|
1276 | unsafe fn get_attrib_location(&self, program: Self::Program, name: &str) -> Option<u32> {
|
1277 | let gl = &self.raw;
|
1278 | let name = CString::new(name).unwrap();
|
1279 | let attrib_location =
|
1280 | gl.GetAttribLocation(program.0.get(), name.as_ptr() as *const native_gl::GLchar);
|
1281 | if attrib_location < 0 {
|
1282 | None
|
1283 | } else {
|
1284 | Some(attrib_location as u32)
|
1285 | }
|
1286 | }
|
1287 |
|
1288 | unsafe fn bind_attrib_location(&self, program: Self::Program, index: u32, name: &str) {
|
1289 | let gl = &self.raw;
|
1290 | let name = CString::new(name).unwrap();
|
1291 | gl.BindAttribLocation(
|
1292 | program.0.get(),
|
1293 | index,
|
1294 | name.as_ptr() as *const native_gl::GLchar,
|
1295 | );
|
1296 | }
|
1297 |
|
1298 | unsafe fn get_active_attributes(&self, program: Self::Program) -> u32 {
|
1299 | let gl = &self.raw;
|
1300 | let mut count = 0;
|
1301 | gl.GetProgramiv(program.0.get(), ACTIVE_ATTRIBUTES, &mut count);
|
1302 | count as u32
|
1303 | }
|
1304 |
|
1305 | unsafe fn get_active_attribute(
|
1306 | &self,
|
1307 | program: Self::Program,
|
1308 | index: u32,
|
1309 | ) -> Option<ActiveAttribute> {
|
1310 | let gl = &self.raw;
|
1311 | let mut attribute_max_size = 0;
|
1312 | gl.GetProgramiv(
|
1313 | program.0.get(),
|
1314 | ACTIVE_ATTRIBUTE_MAX_LENGTH,
|
1315 | &mut attribute_max_size,
|
1316 | );
|
1317 | let mut name = String::with_capacity(attribute_max_size as usize);
|
1318 | name.extend(std::iter::repeat(' \0' ).take(attribute_max_size as usize));
|
1319 | let mut length = 0;
|
1320 | let mut size = 0;
|
1321 | let mut atype = 0;
|
1322 | gl.GetActiveAttrib(
|
1323 | program.0.get(),
|
1324 | index,
|
1325 | attribute_max_size,
|
1326 | &mut length,
|
1327 | &mut size,
|
1328 | &mut atype,
|
1329 | name.as_ptr() as *mut native_gl::GLchar,
|
1330 | );
|
1331 |
|
1332 | name.truncate(length as usize);
|
1333 |
|
1334 | Some(ActiveAttribute { name, size, atype })
|
1335 | }
|
1336 |
|
1337 | unsafe fn get_sync_status(&self, fence: Self::Fence) -> u32 {
|
1338 | let gl = &self.raw;
|
1339 | let mut len = 0;
|
1340 | let mut values = [UNSIGNALED as i32];
|
1341 | gl.GetSynciv(
|
1342 | fence.0,
|
1343 | SYNC_STATUS,
|
1344 | values.len() as i32,
|
1345 | &mut len,
|
1346 | values.as_mut_ptr(),
|
1347 | );
|
1348 | values[0] as u32
|
1349 | }
|
1350 |
|
1351 | unsafe fn is_sync(&self, fence: Self::Fence) -> bool {
|
1352 | let gl = &self.raw;
|
1353 | 1 == gl.IsSync(fence.0)
|
1354 | }
|
1355 |
|
1356 | unsafe fn renderbuffer_storage(
|
1357 | &self,
|
1358 | target: u32,
|
1359 | internal_format: u32,
|
1360 | width: i32,
|
1361 | height: i32,
|
1362 | ) {
|
1363 | let gl = &self.raw;
|
1364 | gl.RenderbufferStorage(target, internal_format, width, height);
|
1365 | }
|
1366 |
|
1367 | unsafe fn renderbuffer_storage_multisample(
|
1368 | &self,
|
1369 | target: u32,
|
1370 | samples: i32,
|
1371 | internal_format: u32,
|
1372 | width: i32,
|
1373 | height: i32,
|
1374 | ) {
|
1375 | let gl = &self.raw;
|
1376 | gl.RenderbufferStorageMultisample(target, samples, internal_format, width, height);
|
1377 | }
|
1378 |
|
1379 | unsafe fn sampler_parameter_f32(&self, sampler: Self::Sampler, name: u32, value: f32) {
|
1380 | let gl = &self.raw;
|
1381 | gl.SamplerParameterf(sampler.0.get(), name, value);
|
1382 | }
|
1383 |
|
1384 | unsafe fn sampler_parameter_f32_slice(&self, sampler: Self::Sampler, name: u32, value: &[f32]) {
|
1385 | let gl = &self.raw;
|
1386 | gl.SamplerParameterfv(sampler.0.get(), name, value.as_ptr());
|
1387 | }
|
1388 |
|
1389 | unsafe fn sampler_parameter_i32(&self, sampler: Self::Sampler, name: u32, value: i32) {
|
1390 | let gl = &self.raw;
|
1391 | gl.SamplerParameteri(sampler.0.get(), name, value);
|
1392 | }
|
1393 |
|
1394 | unsafe fn generate_mipmap(&self, target: u32) {
|
1395 | let gl = &self.raw;
|
1396 | gl.GenerateMipmap(target);
|
1397 | }
|
1398 |
|
1399 | unsafe fn generate_texture_mipmap(&self, texture: Self::Texture) {
|
1400 | let gl = &self.raw;
|
1401 | gl.GenerateTextureMipmap(texture.0.get());
|
1402 | }
|
1403 |
|
1404 | unsafe fn tex_image_1d(
|
1405 | &self,
|
1406 | target: u32,
|
1407 | level: i32,
|
1408 | internal_format: i32,
|
1409 | width: i32,
|
1410 | border: i32,
|
1411 | format: u32,
|
1412 | ty: u32,
|
1413 | pixels: Option<&[u8]>,
|
1414 | ) {
|
1415 | let gl = &self.raw;
|
1416 | gl.TexImage1D(
|
1417 | target,
|
1418 | level,
|
1419 | internal_format,
|
1420 | width,
|
1421 | border,
|
1422 | format,
|
1423 | ty,
|
1424 | pixels.map(|p| p.as_ptr()).unwrap_or(std::ptr::null()) as *const std::ffi::c_void,
|
1425 | );
|
1426 | }
|
1427 |
|
1428 | unsafe fn compressed_tex_image_1d(
|
1429 | &self,
|
1430 | target: u32,
|
1431 | level: i32,
|
1432 | internal_format: i32,
|
1433 | width: i32,
|
1434 | border: i32,
|
1435 | image_size: i32,
|
1436 | pixels: &[u8],
|
1437 | ) {
|
1438 | let gl = &self.raw;
|
1439 | gl.CompressedTexImage1D(
|
1440 | target,
|
1441 | level,
|
1442 | internal_format as u32,
|
1443 | width,
|
1444 | border,
|
1445 | image_size,
|
1446 | pixels.as_ptr() as *const std::ffi::c_void,
|
1447 | );
|
1448 | }
|
1449 |
|
1450 | unsafe fn tex_image_2d(
|
1451 | &self,
|
1452 | target: u32,
|
1453 | level: i32,
|
1454 | internal_format: i32,
|
1455 | width: i32,
|
1456 | height: i32,
|
1457 | border: i32,
|
1458 | format: u32,
|
1459 | ty: u32,
|
1460 | pixels: Option<&[u8]>,
|
1461 | ) {
|
1462 | let gl = &self.raw;
|
1463 | gl.TexImage2D(
|
1464 | target,
|
1465 | level,
|
1466 | internal_format,
|
1467 | width,
|
1468 | height,
|
1469 | border,
|
1470 | format,
|
1471 | ty,
|
1472 | pixels.map(|p| p.as_ptr()).unwrap_or(std::ptr::null()) as *const std::ffi::c_void,
|
1473 | );
|
1474 | }
|
1475 |
|
1476 | unsafe fn tex_image_2d_multisample(
|
1477 | &self,
|
1478 | target: u32,
|
1479 | samples: i32,
|
1480 | internal_format: i32,
|
1481 | width: i32,
|
1482 | height: i32,
|
1483 | fixed_sample_locations: bool,
|
1484 | ) {
|
1485 | let gl = &self.raw;
|
1486 | gl.TexImage2DMultisample(
|
1487 | target,
|
1488 | samples,
|
1489 | internal_format as u32,
|
1490 | width,
|
1491 | height,
|
1492 | if fixed_sample_locations { 1 } else { 0 },
|
1493 | );
|
1494 | }
|
1495 |
|
1496 | unsafe fn compressed_tex_image_2d(
|
1497 | &self,
|
1498 | target: u32,
|
1499 | level: i32,
|
1500 | internal_format: i32,
|
1501 | width: i32,
|
1502 | height: i32,
|
1503 | border: i32,
|
1504 | image_size: i32,
|
1505 | pixels: &[u8],
|
1506 | ) {
|
1507 | let gl = &self.raw;
|
1508 | gl.CompressedTexImage2D(
|
1509 | target,
|
1510 | level,
|
1511 | internal_format as u32,
|
1512 | width,
|
1513 | height,
|
1514 | border,
|
1515 | image_size,
|
1516 | pixels.as_ptr() as *const std::ffi::c_void,
|
1517 | );
|
1518 | }
|
1519 |
|
1520 | unsafe fn tex_image_3d(
|
1521 | &self,
|
1522 | target: u32,
|
1523 | level: i32,
|
1524 | internal_format: i32,
|
1525 | width: i32,
|
1526 | height: i32,
|
1527 | depth: i32,
|
1528 | border: i32,
|
1529 | format: u32,
|
1530 | ty: u32,
|
1531 | pixels: Option<&[u8]>,
|
1532 | ) {
|
1533 | let gl = &self.raw;
|
1534 | gl.TexImage3D(
|
1535 | target,
|
1536 | level,
|
1537 | internal_format,
|
1538 | width,
|
1539 | height,
|
1540 | depth,
|
1541 | border,
|
1542 | format,
|
1543 | ty,
|
1544 | pixels.map(|p| p.as_ptr()).unwrap_or(std::ptr::null()) as *const std::ffi::c_void,
|
1545 | );
|
1546 | }
|
1547 |
|
1548 | unsafe fn compressed_tex_image_3d(
|
1549 | &self,
|
1550 | target: u32,
|
1551 | level: i32,
|
1552 | internal_format: i32,
|
1553 | width: i32,
|
1554 | height: i32,
|
1555 | depth: i32,
|
1556 | border: i32,
|
1557 | image_size: i32,
|
1558 | pixels: &[u8],
|
1559 | ) {
|
1560 | let gl = &self.raw;
|
1561 | gl.CompressedTexImage3D(
|
1562 | target,
|
1563 | level,
|
1564 | internal_format as u32,
|
1565 | width,
|
1566 | height,
|
1567 | depth,
|
1568 | border,
|
1569 | image_size,
|
1570 | pixels.as_ptr() as *const std::ffi::c_void,
|
1571 | );
|
1572 | }
|
1573 |
|
1574 | unsafe fn tex_storage_1d(&self, target: u32, levels: i32, internal_format: u32, width: i32) {
|
1575 | let gl = &self.raw;
|
1576 | gl.TexStorage1D(target, levels, internal_format, width);
|
1577 | }
|
1578 |
|
1579 | unsafe fn tex_storage_2d(
|
1580 | &self,
|
1581 | target: u32,
|
1582 | levels: i32,
|
1583 | internal_format: u32,
|
1584 | width: i32,
|
1585 | height: i32,
|
1586 | ) {
|
1587 | let gl = &self.raw;
|
1588 | gl.TexStorage2D(target, levels, internal_format, width, height);
|
1589 | }
|
1590 |
|
1591 | unsafe fn tex_storage_2d_multisample(
|
1592 | &self,
|
1593 | target: u32,
|
1594 | samples: i32,
|
1595 | internal_format: u32,
|
1596 | width: i32,
|
1597 | height: i32,
|
1598 | fixed_sample_locations: bool,
|
1599 | ) {
|
1600 | let gl = &self.raw;
|
1601 | gl.TexStorage2DMultisample(
|
1602 | target,
|
1603 | samples,
|
1604 | internal_format,
|
1605 | width,
|
1606 | height,
|
1607 | if fixed_sample_locations { 1 } else { 0 },
|
1608 | );
|
1609 | }
|
1610 |
|
1611 | unsafe fn tex_storage_3d(
|
1612 | &self,
|
1613 | target: u32,
|
1614 | levels: i32,
|
1615 | internal_format: u32,
|
1616 | width: i32,
|
1617 | height: i32,
|
1618 | depth: i32,
|
1619 | ) {
|
1620 | let gl = &self.raw;
|
1621 | gl.TexStorage3D(target, levels, internal_format, width, height, depth);
|
1622 | }
|
1623 |
|
1624 | unsafe fn texture_storage_3d(
|
1625 | &self,
|
1626 | texture: Self::Texture,
|
1627 | levels: i32,
|
1628 | internal_format: u32,
|
1629 | width: i32,
|
1630 | height: i32,
|
1631 | depth: i32,
|
1632 | ) {
|
1633 | let gl = &self.raw;
|
1634 | gl.TextureStorage3D(
|
1635 | texture.0.get(),
|
1636 | levels,
|
1637 | internal_format,
|
1638 | width,
|
1639 | height,
|
1640 | depth,
|
1641 | );
|
1642 | }
|
1643 |
|
1644 | unsafe fn get_uniform_i32(
|
1645 | &self,
|
1646 | program: Self::Program,
|
1647 | location: &Self::UniformLocation,
|
1648 | v: &mut [i32],
|
1649 | ) {
|
1650 | let gl = &self.raw;
|
1651 | gl.GetUniformiv(
|
1652 | program.0.get() as u32,
|
1653 | location.0 as i32,
|
1654 | v.as_mut_ptr() as *mut i32,
|
1655 | )
|
1656 | }
|
1657 |
|
1658 | unsafe fn get_uniform_f32(
|
1659 | &self,
|
1660 | program: Self::Program,
|
1661 | location: &Self::UniformLocation,
|
1662 | v: &mut [f32],
|
1663 | ) {
|
1664 | let gl = &self.raw;
|
1665 | gl.GetUniformfv(
|
1666 | program.0.get() as u32,
|
1667 | location.0 as i32,
|
1668 | v.as_mut_ptr() as *mut f32,
|
1669 | )
|
1670 | }
|
1671 |
|
1672 | unsafe fn uniform_1_i32(&self, location: Option<&Self::UniformLocation>, x: i32) {
|
1673 | let gl = &self.raw;
|
1674 | if let Some(loc) = location {
|
1675 | gl.Uniform1i(loc.0 as i32, x);
|
1676 | }
|
1677 | }
|
1678 |
|
1679 | unsafe fn uniform_2_i32(&self, location: Option<&Self::UniformLocation>, x: i32, y: i32) {
|
1680 | let gl = &self.raw;
|
1681 | if let Some(loc) = location {
|
1682 | gl.Uniform2i(loc.0 as i32, x, y);
|
1683 | }
|
1684 | }
|
1685 |
|
1686 | unsafe fn uniform_3_i32(
|
1687 | &self,
|
1688 | location: Option<&Self::UniformLocation>,
|
1689 | x: i32,
|
1690 | y: i32,
|
1691 | z: i32,
|
1692 | ) {
|
1693 | let gl = &self.raw;
|
1694 | if let Some(loc) = location {
|
1695 | gl.Uniform3i(loc.0 as i32, x, y, z);
|
1696 | }
|
1697 | }
|
1698 |
|
1699 | unsafe fn uniform_4_i32(
|
1700 | &self,
|
1701 | location: Option<&Self::UniformLocation>,
|
1702 | x: i32,
|
1703 | y: i32,
|
1704 | z: i32,
|
1705 | w: i32,
|
1706 | ) {
|
1707 | let gl = &self.raw;
|
1708 | if let Some(loc) = location {
|
1709 | gl.Uniform4i(loc.0 as i32, x, y, z, w);
|
1710 | }
|
1711 | }
|
1712 |
|
1713 | unsafe fn uniform_1_i32_slice(&self, location: Option<&Self::UniformLocation>, v: &[i32]) {
|
1714 | let gl = &self.raw;
|
1715 | if let Some(loc) = location {
|
1716 | gl.Uniform1iv(loc.0 as i32, v.len() as i32, v.as_ptr());
|
1717 | }
|
1718 | }
|
1719 |
|
1720 | unsafe fn uniform_2_i32_slice(&self, location: Option<&Self::UniformLocation>, v: &[i32]) {
|
1721 | let gl = &self.raw;
|
1722 | if let Some(loc) = location {
|
1723 | gl.Uniform2iv(loc.0 as i32, v.len() as i32 / 2, v.as_ptr());
|
1724 | }
|
1725 | }
|
1726 |
|
1727 | unsafe fn uniform_3_i32_slice(&self, location: Option<&Self::UniformLocation>, v: &[i32]) {
|
1728 | let gl = &self.raw;
|
1729 | if let Some(loc) = location {
|
1730 | gl.Uniform3iv(loc.0 as i32, v.len() as i32 / 3, v.as_ptr());
|
1731 | }
|
1732 | }
|
1733 |
|
1734 | unsafe fn uniform_4_i32_slice(&self, location: Option<&Self::UniformLocation>, v: &[i32]) {
|
1735 | let gl = &self.raw;
|
1736 | if let Some(loc) = location {
|
1737 | gl.Uniform4iv(loc.0 as i32, v.len() as i32 / 4, v.as_ptr());
|
1738 | }
|
1739 | }
|
1740 |
|
1741 | unsafe fn uniform_1_u32(&self, location: Option<&Self::UniformLocation>, x: u32) {
|
1742 | let gl = &self.raw;
|
1743 | if let Some(loc) = location {
|
1744 | gl.Uniform1ui(loc.0 as i32, x);
|
1745 | }
|
1746 | }
|
1747 |
|
1748 | unsafe fn uniform_2_u32(&self, location: Option<&Self::UniformLocation>, x: u32, y: u32) {
|
1749 | let gl = &self.raw;
|
1750 | if let Some(loc) = location {
|
1751 | gl.Uniform2ui(loc.0 as i32, x, y);
|
1752 | }
|
1753 | }
|
1754 |
|
1755 | unsafe fn uniform_3_u32(
|
1756 | &self,
|
1757 | location: Option<&Self::UniformLocation>,
|
1758 | x: u32,
|
1759 | y: u32,
|
1760 | z: u32,
|
1761 | ) {
|
1762 | let gl = &self.raw;
|
1763 | if let Some(loc) = location {
|
1764 | gl.Uniform3ui(loc.0 as i32, x, y, z);
|
1765 | }
|
1766 | }
|
1767 |
|
1768 | unsafe fn uniform_4_u32(
|
1769 | &self,
|
1770 | location: Option<&Self::UniformLocation>,
|
1771 | x: u32,
|
1772 | y: u32,
|
1773 | z: u32,
|
1774 | w: u32,
|
1775 | ) {
|
1776 | let gl = &self.raw;
|
1777 | if let Some(loc) = location {
|
1778 | gl.Uniform4ui(loc.0 as i32, x, y, z, w);
|
1779 | }
|
1780 | }
|
1781 |
|
1782 | unsafe fn uniform_1_u32_slice(&self, location: Option<&Self::UniformLocation>, v: &[u32]) {
|
1783 | let gl = &self.raw;
|
1784 | if let Some(loc) = location {
|
1785 | gl.Uniform1uiv(loc.0 as i32, v.len() as i32, v.as_ptr());
|
1786 | }
|
1787 | }
|
1788 |
|
1789 | unsafe fn uniform_2_u32_slice(&self, location: Option<&Self::UniformLocation>, v: &[u32]) {
|
1790 | let gl = &self.raw;
|
1791 | if let Some(loc) = location {
|
1792 | gl.Uniform2uiv(loc.0 as i32, v.len() as i32 / 2, v.as_ptr());
|
1793 | }
|
1794 | }
|
1795 |
|
1796 | unsafe fn uniform_3_u32_slice(&self, location: Option<&Self::UniformLocation>, v: &[u32]) {
|
1797 | let gl = &self.raw;
|
1798 | if let Some(loc) = location {
|
1799 | gl.Uniform3uiv(loc.0 as i32, v.len() as i32 / 3, v.as_ptr());
|
1800 | }
|
1801 | }
|
1802 |
|
1803 | unsafe fn uniform_4_u32_slice(&self, location: Option<&Self::UniformLocation>, v: &[u32]) {
|
1804 | let gl = &self.raw;
|
1805 | if let Some(loc) = location {
|
1806 | gl.Uniform4uiv(loc.0 as i32, v.len() as i32 / 4, v.as_ptr());
|
1807 | }
|
1808 | }
|
1809 |
|
1810 | unsafe fn uniform_1_f32(&self, location: Option<&Self::UniformLocation>, x: f32) {
|
1811 | let gl = &self.raw;
|
1812 | if let Some(loc) = location {
|
1813 | gl.Uniform1f(loc.0 as i32, x);
|
1814 | }
|
1815 | }
|
1816 |
|
1817 | unsafe fn uniform_2_f32(&self, location: Option<&Self::UniformLocation>, x: f32, y: f32) {
|
1818 | let gl = &self.raw;
|
1819 | if let Some(loc) = location {
|
1820 | gl.Uniform2f(loc.0 as i32, x, y);
|
1821 | }
|
1822 | }
|
1823 |
|
1824 | unsafe fn uniform_3_f32(
|
1825 | &self,
|
1826 | location: Option<&Self::UniformLocation>,
|
1827 | x: f32,
|
1828 | y: f32,
|
1829 | z: f32,
|
1830 | ) {
|
1831 | let gl = &self.raw;
|
1832 | if let Some(loc) = location {
|
1833 | gl.Uniform3f(loc.0 as i32, x, y, z);
|
1834 | }
|
1835 | }
|
1836 |
|
1837 | unsafe fn uniform_4_f32(
|
1838 | &self,
|
1839 | location: Option<&Self::UniformLocation>,
|
1840 | x: f32,
|
1841 | y: f32,
|
1842 | z: f32,
|
1843 | w: f32,
|
1844 | ) {
|
1845 | let gl = &self.raw;
|
1846 | if let Some(loc) = location {
|
1847 | gl.Uniform4f(loc.0 as i32, x, y, z, w);
|
1848 | }
|
1849 | }
|
1850 |
|
1851 | unsafe fn uniform_1_f32_slice(&self, location: Option<&Self::UniformLocation>, v: &[f32]) {
|
1852 | let gl = &self.raw;
|
1853 | if let Some(loc) = location {
|
1854 | gl.Uniform1fv(loc.0 as i32, v.len() as i32, v.as_ptr());
|
1855 | }
|
1856 | }
|
1857 |
|
1858 | unsafe fn uniform_2_f32_slice(&self, location: Option<&Self::UniformLocation>, v: &[f32]) {
|
1859 | let gl = &self.raw;
|
1860 | if let Some(loc) = location {
|
1861 | gl.Uniform2fv(loc.0 as i32, v.len() as i32 / 2, v.as_ptr());
|
1862 | }
|
1863 | }
|
1864 |
|
1865 | unsafe fn uniform_3_f32_slice(&self, location: Option<&Self::UniformLocation>, v: &[f32]) {
|
1866 | let gl = &self.raw;
|
1867 | if let Some(loc) = location {
|
1868 | gl.Uniform3fv(loc.0 as i32, v.len() as i32 / 3, v.as_ptr());
|
1869 | }
|
1870 | }
|
1871 |
|
1872 | unsafe fn uniform_4_f32_slice(&self, location: Option<&Self::UniformLocation>, v: &[f32]) {
|
1873 | let gl = &self.raw;
|
1874 | if let Some(loc) = location {
|
1875 | gl.Uniform4fv(loc.0 as i32, v.len() as i32 / 4, v.as_ptr());
|
1876 | }
|
1877 | }
|
1878 |
|
1879 | unsafe fn uniform_matrix_2_f32_slice(
|
1880 | &self,
|
1881 | location: Option<&Self::UniformLocation>,
|
1882 | transpose: bool,
|
1883 | v: &[f32],
|
1884 | ) {
|
1885 | let gl = &self.raw;
|
1886 | if let Some(loc) = location {
|
1887 | gl.UniformMatrix2fv(
|
1888 | loc.0 as i32,
|
1889 | v.len() as i32 / 4,
|
1890 | transpose as u8,
|
1891 | v.as_ptr(),
|
1892 | );
|
1893 | }
|
1894 | }
|
1895 |
|
1896 | unsafe fn uniform_matrix_2x3_f32_slice(
|
1897 | &self,
|
1898 | location: Option<&Self::UniformLocation>,
|
1899 | transpose: bool,
|
1900 | v: &[f32],
|
1901 | ) {
|
1902 | let gl = &self.raw;
|
1903 | if let Some(loc) = location {
|
1904 | gl.UniformMatrix2x3fv(
|
1905 | loc.0 as i32,
|
1906 | v.len() as i32 / 6,
|
1907 | transpose as u8,
|
1908 | v.as_ptr(),
|
1909 | );
|
1910 | }
|
1911 | }
|
1912 |
|
1913 | unsafe fn uniform_matrix_2x4_f32_slice(
|
1914 | &self,
|
1915 | location: Option<&Self::UniformLocation>,
|
1916 | transpose: bool,
|
1917 | v: &[f32],
|
1918 | ) {
|
1919 | let gl = &self.raw;
|
1920 | if let Some(loc) = location {
|
1921 | gl.UniformMatrix2x4fv(
|
1922 | loc.0 as i32,
|
1923 | v.len() as i32 / 8,
|
1924 | transpose as u8,
|
1925 | v.as_ptr(),
|
1926 | );
|
1927 | }
|
1928 | }
|
1929 |
|
1930 | unsafe fn uniform_matrix_3x2_f32_slice(
|
1931 | &self,
|
1932 | location: Option<&Self::UniformLocation>,
|
1933 | transpose: bool,
|
1934 | v: &[f32],
|
1935 | ) {
|
1936 | let gl = &self.raw;
|
1937 | if let Some(loc) = location {
|
1938 | gl.UniformMatrix3x2fv(
|
1939 | loc.0 as i32,
|
1940 | v.len() as i32 / 6,
|
1941 | transpose as u8,
|
1942 | v.as_ptr(),
|
1943 | );
|
1944 | }
|
1945 | }
|
1946 |
|
1947 | unsafe fn uniform_matrix_3_f32_slice(
|
1948 | &self,
|
1949 | location: Option<&Self::UniformLocation>,
|
1950 | transpose: bool,
|
1951 | v: &[f32],
|
1952 | ) {
|
1953 | let gl = &self.raw;
|
1954 | if let Some(loc) = location {
|
1955 | gl.UniformMatrix3fv(
|
1956 | loc.0 as i32,
|
1957 | v.len() as i32 / 9,
|
1958 | transpose as u8,
|
1959 | v.as_ptr(),
|
1960 | );
|
1961 | }
|
1962 | }
|
1963 |
|
1964 | unsafe fn uniform_matrix_3x4_f32_slice(
|
1965 | &self,
|
1966 | location: Option<&Self::UniformLocation>,
|
1967 | transpose: bool,
|
1968 | v: &[f32],
|
1969 | ) {
|
1970 | let gl = &self.raw;
|
1971 | if let Some(loc) = location {
|
1972 | gl.UniformMatrix3x4fv(
|
1973 | loc.0 as i32,
|
1974 | v.len() as i32 / 12,
|
1975 | transpose as u8,
|
1976 | v.as_ptr(),
|
1977 | );
|
1978 | }
|
1979 | }
|
1980 |
|
1981 | unsafe fn uniform_matrix_4x2_f32_slice(
|
1982 | &self,
|
1983 | location: Option<&Self::UniformLocation>,
|
1984 | transpose: bool,
|
1985 | v: &[f32],
|
1986 | ) {
|
1987 | let gl = &self.raw;
|
1988 | if let Some(loc) = location {
|
1989 | gl.UniformMatrix4x2fv(
|
1990 | loc.0 as i32,
|
1991 | v.len() as i32 / 8,
|
1992 | transpose as u8,
|
1993 | v.as_ptr(),
|
1994 | );
|
1995 | }
|
1996 | }
|
1997 |
|
1998 | unsafe fn uniform_matrix_4x3_f32_slice(
|
1999 | &self,
|
2000 | location: Option<&Self::UniformLocation>,
|
2001 | transpose: bool,
|
2002 | v: &[f32],
|
2003 | ) {
|
2004 | let gl = &self.raw;
|
2005 | if let Some(loc) = location {
|
2006 | gl.UniformMatrix4x3fv(
|
2007 | loc.0 as i32,
|
2008 | v.len() as i32 / 12,
|
2009 | transpose as u8,
|
2010 | v.as_ptr(),
|
2011 | );
|
2012 | }
|
2013 | }
|
2014 |
|
2015 | unsafe fn uniform_matrix_4_f32_slice(
|
2016 | &self,
|
2017 | location: Option<&Self::UniformLocation>,
|
2018 | transpose: bool,
|
2019 | v: &[f32],
|
2020 | ) {
|
2021 | let gl = &self.raw;
|
2022 | if let Some(loc) = location {
|
2023 | gl.UniformMatrix4fv(
|
2024 | loc.0 as i32,
|
2025 | v.len() as i32 / 16,
|
2026 | transpose as u8,
|
2027 | v.as_ptr(),
|
2028 | );
|
2029 | }
|
2030 | }
|
2031 |
|
2032 | unsafe fn unmap_buffer(&self, target: u32) {
|
2033 | let gl = &self.raw;
|
2034 | gl.UnmapBuffer(target);
|
2035 | }
|
2036 |
|
2037 | unsafe fn cull_face(&self, value: u32) {
|
2038 | let gl = &self.raw;
|
2039 | gl.CullFace(value as u32);
|
2040 | }
|
2041 |
|
2042 | unsafe fn color_mask(&self, red: bool, green: bool, blue: bool, alpha: bool) {
|
2043 | let gl = &self.raw;
|
2044 | gl.ColorMask(red as u8, green as u8, blue as u8, alpha as u8);
|
2045 | }
|
2046 |
|
2047 | unsafe fn color_mask_draw_buffer(
|
2048 | &self,
|
2049 | draw_buffer: u32,
|
2050 | red: bool,
|
2051 | green: bool,
|
2052 | blue: bool,
|
2053 | alpha: bool,
|
2054 | ) {
|
2055 | let gl = &self.raw;
|
2056 | gl.ColorMaski(draw_buffer, red as u8, green as u8, blue as u8, alpha as u8);
|
2057 | }
|
2058 |
|
2059 | unsafe fn depth_mask(&self, value: bool) {
|
2060 | let gl = &self.raw;
|
2061 | gl.DepthMask(value as u8);
|
2062 | }
|
2063 |
|
2064 | unsafe fn blend_color(&self, red: f32, green: f32, blue: f32, alpha: f32) {
|
2065 | let gl = &self.raw;
|
2066 | gl.BlendColor(red, green, blue, alpha);
|
2067 | }
|
2068 |
|
2069 | unsafe fn line_width(&self, width: f32) {
|
2070 | let gl = &self.raw;
|
2071 | gl.LineWidth(width);
|
2072 | }
|
2073 |
|
2074 | unsafe fn map_buffer_range(
|
2075 | &self,
|
2076 | target: u32,
|
2077 | offset: i32,
|
2078 | length: i32,
|
2079 | access: u32,
|
2080 | ) -> *mut u8 {
|
2081 | let gl = &self.raw;
|
2082 | gl.MapBufferRange(target, offset as isize, length as isize, access) as *mut u8
|
2083 | }
|
2084 |
|
2085 | unsafe fn flush_mapped_buffer_range(&self, target: u32, offset: i32, length: i32) {
|
2086 | let gl = &self.raw;
|
2087 | gl.FlushMappedBufferRange(target, offset as isize, length as isize)
|
2088 | }
|
2089 |
|
2090 | unsafe fn invalidate_buffer_sub_data(&self, target: u32, offset: i32, length: i32) {
|
2091 | let gl = &self.raw;
|
2092 | gl.InvalidateBufferSubData(target, offset as isize, length as isize)
|
2093 | }
|
2094 |
|
2095 | unsafe fn invalidate_framebuffer(&self, target: u32, attachments: &[u32]) {
|
2096 | let gl = &self.raw;
|
2097 | gl.InvalidateFramebuffer(target, attachments.len() as i32, attachments.as_ptr());
|
2098 | }
|
2099 |
|
2100 | unsafe fn polygon_offset(&self, factor: f32, units: f32) {
|
2101 | let gl = &self.raw;
|
2102 | gl.PolygonOffset(factor, units);
|
2103 | }
|
2104 |
|
2105 | unsafe fn polygon_mode(&self, face: u32, mode: u32) {
|
2106 | let gl = &self.raw;
|
2107 | gl.PolygonMode(face as u32, mode as u32);
|
2108 | }
|
2109 |
|
2110 | unsafe fn finish(&self) {
|
2111 | let gl = &self.raw;
|
2112 | gl.Finish();
|
2113 | }
|
2114 |
|
2115 | unsafe fn bind_texture(&self, target: u32, texture: Option<Self::Texture>) {
|
2116 | let gl = &self.raw;
|
2117 | gl.BindTexture(target, texture.map(|t| t.0.get()).unwrap_or(0));
|
2118 | }
|
2119 |
|
2120 | unsafe fn bind_sampler(&self, unit: u32, sampler: Option<Self::Sampler>) {
|
2121 | let gl = &self.raw;
|
2122 | gl.BindSampler(unit, sampler.map(|s| s.0.get()).unwrap_or(0));
|
2123 | }
|
2124 |
|
2125 | unsafe fn active_texture(&self, unit: u32) {
|
2126 | let gl = &self.raw;
|
2127 | gl.ActiveTexture(unit);
|
2128 | }
|
2129 |
|
2130 | unsafe fn fence_sync(&self, condition: u32, flags: u32) -> Result<Self::Fence, String> {
|
2131 | let gl = &self.raw;
|
2132 | Ok(NativeFence(gl.FenceSync(condition as u32, flags)))
|
2133 | }
|
2134 |
|
2135 | unsafe fn tex_parameter_f32(&self, target: u32, parameter: u32, value: f32) {
|
2136 | let gl = &self.raw;
|
2137 | gl.TexParameterf(target, parameter, value);
|
2138 | }
|
2139 |
|
2140 | unsafe fn tex_parameter_i32(&self, target: u32, parameter: u32, value: i32) {
|
2141 | let gl = &self.raw;
|
2142 | gl.TexParameteri(target, parameter, value);
|
2143 | }
|
2144 |
|
2145 | unsafe fn texture_parameter_i32(&self, texture: Self::Texture, parameter: u32, value: i32) {
|
2146 | let gl = &self.raw;
|
2147 | gl.TextureParameteri(texture.0.get(), parameter, value);
|
2148 | }
|
2149 |
|
2150 | unsafe fn tex_parameter_f32_slice(&self, target: u32, parameter: u32, values: &[f32]) {
|
2151 | let gl = &self.raw;
|
2152 | gl.TexParameterfv(target, parameter, values.as_ptr());
|
2153 | }
|
2154 |
|
2155 | unsafe fn tex_parameter_i32_slice(&self, target: u32, parameter: u32, values: &[i32]) {
|
2156 | let gl = &self.raw;
|
2157 | gl.TexParameteriv(target, parameter, values.as_ptr());
|
2158 | }
|
2159 |
|
2160 | unsafe fn tex_sub_image_2d(
|
2161 | &self,
|
2162 | target: u32,
|
2163 | level: i32,
|
2164 | x_offset: i32,
|
2165 | y_offset: i32,
|
2166 | width: i32,
|
2167 | height: i32,
|
2168 | format: u32,
|
2169 | ty: u32,
|
2170 | pixels: PixelUnpackData,
|
2171 | ) {
|
2172 | let gl = &self.raw;
|
2173 | gl.TexSubImage2D(
|
2174 | target,
|
2175 | level,
|
2176 | x_offset,
|
2177 | y_offset,
|
2178 | width,
|
2179 | height,
|
2180 | format,
|
2181 | ty,
|
2182 | match pixels {
|
2183 | PixelUnpackData::BufferOffset(offset) => offset as *const std::ffi::c_void,
|
2184 | PixelUnpackData::Slice(data) => data.as_ptr() as *const std::ffi::c_void,
|
2185 | },
|
2186 | );
|
2187 | }
|
2188 |
|
2189 | unsafe fn compressed_tex_sub_image_2d(
|
2190 | &self,
|
2191 | target: u32,
|
2192 | level: i32,
|
2193 | x_offset: i32,
|
2194 | y_offset: i32,
|
2195 | width: i32,
|
2196 | height: i32,
|
2197 | format: u32,
|
2198 | pixels: CompressedPixelUnpackData,
|
2199 | ) {
|
2200 | let gl = &self.raw;
|
2201 | let (data, image_size) = match pixels {
|
2202 | CompressedPixelUnpackData::BufferRange(ref range) => (
|
2203 | range.start as *const std::ffi::c_void,
|
2204 | (range.end - range.start) as i32,
|
2205 | ),
|
2206 | CompressedPixelUnpackData::Slice(data) => {
|
2207 | (data.as_ptr() as *const std::ffi::c_void, data.len() as i32)
|
2208 | }
|
2209 | };
|
2210 |
|
2211 | gl.CompressedTexSubImage2D(
|
2212 | target, level, x_offset, y_offset, width, height, format, image_size, data,
|
2213 | );
|
2214 | }
|
2215 |
|
2216 | unsafe fn tex_sub_image_3d(
|
2217 | &self,
|
2218 | target: u32,
|
2219 | level: i32,
|
2220 | x_offset: i32,
|
2221 | y_offset: i32,
|
2222 | z_offset: i32,
|
2223 | width: i32,
|
2224 | height: i32,
|
2225 | depth: i32,
|
2226 | format: u32,
|
2227 | ty: u32,
|
2228 | pixels: PixelUnpackData,
|
2229 | ) {
|
2230 | let gl = &self.raw;
|
2231 | gl.TexSubImage3D(
|
2232 | target,
|
2233 | level,
|
2234 | x_offset,
|
2235 | y_offset,
|
2236 | z_offset,
|
2237 | width,
|
2238 | height,
|
2239 | depth,
|
2240 | format,
|
2241 | ty,
|
2242 | match pixels {
|
2243 | PixelUnpackData::BufferOffset(offset) => offset as *const std::ffi::c_void,
|
2244 | PixelUnpackData::Slice(data) => data.as_ptr() as *const std::ffi::c_void,
|
2245 | },
|
2246 | );
|
2247 | }
|
2248 |
|
2249 | unsafe fn texture_sub_image_3d(
|
2250 | &self,
|
2251 | texture: Self::Texture,
|
2252 | level: i32,
|
2253 | x_offset: i32,
|
2254 | y_offset: i32,
|
2255 | z_offset: i32,
|
2256 | width: i32,
|
2257 | height: i32,
|
2258 | depth: i32,
|
2259 | format: u32,
|
2260 | ty: u32,
|
2261 | pixels: PixelUnpackData,
|
2262 | ) {
|
2263 | let gl = &self.raw;
|
2264 | gl.TextureSubImage3D(
|
2265 | texture.0.get(),
|
2266 | level,
|
2267 | x_offset,
|
2268 | y_offset,
|
2269 | z_offset,
|
2270 | width,
|
2271 | height,
|
2272 | depth,
|
2273 | format,
|
2274 | ty,
|
2275 | match pixels {
|
2276 | PixelUnpackData::BufferOffset(offset) => offset as *const std::ffi::c_void,
|
2277 | PixelUnpackData::Slice(data) => data.as_ptr() as *const std::ffi::c_void,
|
2278 | },
|
2279 | );
|
2280 | }
|
2281 |
|
2282 | unsafe fn compressed_tex_sub_image_3d(
|
2283 | &self,
|
2284 | target: u32,
|
2285 | level: i32,
|
2286 | x_offset: i32,
|
2287 | y_offset: i32,
|
2288 | z_offset: i32,
|
2289 | width: i32,
|
2290 | height: i32,
|
2291 | depth: i32,
|
2292 | format: u32,
|
2293 | pixels: CompressedPixelUnpackData,
|
2294 | ) {
|
2295 | let gl = &self.raw;
|
2296 | let (data, image_size) = match pixels {
|
2297 | CompressedPixelUnpackData::BufferRange(ref range) => (
|
2298 | range.start as *const std::ffi::c_void,
|
2299 | (range.end - range.start) as i32,
|
2300 | ),
|
2301 | CompressedPixelUnpackData::Slice(data) => {
|
2302 | (data.as_ptr() as *const std::ffi::c_void, data.len() as i32)
|
2303 | }
|
2304 | };
|
2305 |
|
2306 | gl.CompressedTexSubImage3D(
|
2307 | target, level, x_offset, y_offset, z_offset, width, height, depth, format, image_size,
|
2308 | data,
|
2309 | );
|
2310 | }
|
2311 |
|
2312 | unsafe fn depth_func(&self, func: u32) {
|
2313 | let gl = &self.raw;
|
2314 | gl.DepthFunc(func as u32);
|
2315 | }
|
2316 |
|
2317 | unsafe fn depth_range_f32(&self, near: f32, far: f32) {
|
2318 | let gl = &self.raw;
|
2319 | gl.DepthRangef(near, far);
|
2320 | }
|
2321 |
|
2322 | unsafe fn depth_range_f64(&self, near: f64, far: f64) {
|
2323 | let gl = &self.raw;
|
2324 | gl.DepthRange(near, far);
|
2325 | }
|
2326 |
|
2327 | unsafe fn depth_range_f64_slice(&self, first: u32, count: i32, values: &[[f64; 2]]) {
|
2328 | let gl = &self.raw;
|
2329 | gl.DepthRangeArrayv(first, count, values.as_ptr() as *const f64);
|
2330 | }
|
2331 |
|
2332 | unsafe fn scissor(&self, x: i32, y: i32, width: i32, height: i32) {
|
2333 | let gl = &self.raw;
|
2334 | gl.Scissor(x, y, width, height);
|
2335 | }
|
2336 |
|
2337 | unsafe fn scissor_slice(&self, first: u32, count: i32, scissors: &[[i32; 4]]) {
|
2338 | let gl = &self.raw;
|
2339 | gl.ScissorArrayv(first, count, scissors.as_ptr() as *const i32);
|
2340 | }
|
2341 |
|
2342 | unsafe fn vertex_array_attrib_binding_f32(
|
2343 | &self,
|
2344 | vao: Self::VertexArray,
|
2345 | index: u32,
|
2346 | binding_index: u32,
|
2347 | ) {
|
2348 | let gl = &self.raw;
|
2349 | gl.VertexArrayAttribBinding(vao.0.get(), index, binding_index);
|
2350 | }
|
2351 |
|
2352 | unsafe fn vertex_array_attrib_format_f32(
|
2353 | &self,
|
2354 | vao: Self::VertexArray,
|
2355 | index: u32,
|
2356 | size: i32,
|
2357 | data_type: u32,
|
2358 | normalized: bool,
|
2359 | relative_offset: u32,
|
2360 | ) {
|
2361 | let gl = &self.raw;
|
2362 | gl.VertexArrayAttribFormat(
|
2363 | vao.0.get(),
|
2364 | index,
|
2365 | size,
|
2366 | data_type,
|
2367 | normalized as u8,
|
2368 | relative_offset,
|
2369 | );
|
2370 | }
|
2371 |
|
2372 | unsafe fn vertex_array_attrib_format_i32(
|
2373 | &self,
|
2374 | vao: Self::VertexArray,
|
2375 | index: u32,
|
2376 | size: i32,
|
2377 | data_type: u32,
|
2378 | relative_offset: u32,
|
2379 | ) {
|
2380 | let gl = &self.raw;
|
2381 | gl.VertexArrayAttribIFormat(vao.0.get(), index, size, data_type, relative_offset);
|
2382 | }
|
2383 |
|
2384 | unsafe fn vertex_array_element_buffer(
|
2385 | &self,
|
2386 | vao: Self::VertexArray,
|
2387 | buffer: Option<Self::Buffer>,
|
2388 | ) {
|
2389 | let gl = &self.raw;
|
2390 | gl.VertexArrayElementBuffer(vao.0.get(), buffer.map(|b| b.0.get()).unwrap_or(0));
|
2391 | }
|
2392 |
|
2393 | unsafe fn vertex_array_vertex_buffer(
|
2394 | &self,
|
2395 | vao: Self::VertexArray,
|
2396 | binding_index: u32,
|
2397 | buffer: Option<Self::Buffer>,
|
2398 | offset: i32,
|
2399 | stride: i32,
|
2400 | ) {
|
2401 | let gl = &self.raw;
|
2402 | gl.VertexArrayVertexBuffer(
|
2403 | vao.0.get(),
|
2404 | binding_index,
|
2405 | buffer.map(|b| b.0.get()).unwrap_or(0),
|
2406 | offset as isize,
|
2407 | stride,
|
2408 | );
|
2409 | }
|
2410 |
|
2411 | unsafe fn vertex_attrib_divisor(&self, index: u32, divisor: u32) {
|
2412 | let gl = &self.raw;
|
2413 | gl.VertexAttribDivisor(index, divisor);
|
2414 | }
|
2415 |
|
2416 | unsafe fn vertex_attrib_pointer_f32(
|
2417 | &self,
|
2418 | index: u32,
|
2419 | size: i32,
|
2420 | data_type: u32,
|
2421 | normalized: bool,
|
2422 | stride: i32,
|
2423 | offset: i32,
|
2424 | ) {
|
2425 | let gl = &self.raw;
|
2426 | gl.VertexAttribPointer(
|
2427 | index,
|
2428 | size,
|
2429 | data_type,
|
2430 | normalized as u8,
|
2431 | stride,
|
2432 | offset as *const std::ffi::c_void,
|
2433 | );
|
2434 | }
|
2435 |
|
2436 | unsafe fn vertex_attrib_pointer_i32(
|
2437 | &self,
|
2438 | index: u32,
|
2439 | size: i32,
|
2440 | data_type: u32,
|
2441 | stride: i32,
|
2442 | offset: i32,
|
2443 | ) {
|
2444 | let gl = &self.raw;
|
2445 | gl.VertexAttribIPointer(
|
2446 | index,
|
2447 | size,
|
2448 | data_type,
|
2449 | stride,
|
2450 | offset as *const std::ffi::c_void,
|
2451 | );
|
2452 | }
|
2453 |
|
2454 | unsafe fn vertex_attrib_pointer_f64(
|
2455 | &self,
|
2456 | index: u32,
|
2457 | size: i32,
|
2458 | data_type: u32,
|
2459 | stride: i32,
|
2460 | offset: i32,
|
2461 | ) {
|
2462 | let gl = &self.raw;
|
2463 | gl.VertexAttribLPointer(
|
2464 | index,
|
2465 | size,
|
2466 | data_type,
|
2467 | stride,
|
2468 | offset as *const std::ffi::c_void,
|
2469 | );
|
2470 | }
|
2471 |
|
2472 | unsafe fn vertex_attrib_format_f32(
|
2473 | &self,
|
2474 | index: u32,
|
2475 | size: i32,
|
2476 | data_type: u32,
|
2477 | normalized: bool,
|
2478 | relative_offset: u32,
|
2479 | ) {
|
2480 | let gl = &self.raw;
|
2481 | gl.VertexAttribFormat(index, size, data_type, normalized as u8, relative_offset);
|
2482 | }
|
2483 |
|
2484 | unsafe fn vertex_attrib_format_i32(
|
2485 | &self,
|
2486 | index: u32,
|
2487 | size: i32,
|
2488 | data_type: u32,
|
2489 | relative_offset: u32,
|
2490 | ) {
|
2491 | let gl = &self.raw;
|
2492 | gl.VertexAttribIFormat(index, size, data_type, relative_offset);
|
2493 | }
|
2494 |
|
2495 | unsafe fn vertex_attrib_1_f32(&self, index: u32, x: f32) {
|
2496 | let gl = &self.raw;
|
2497 | gl.VertexAttrib1f(index, x);
|
2498 | }
|
2499 |
|
2500 | unsafe fn vertex_attrib_2_f32(&self, index: u32, x: f32, y: f32) {
|
2501 | let gl = &self.raw;
|
2502 | gl.VertexAttrib2f(index, x, y);
|
2503 | }
|
2504 |
|
2505 | unsafe fn vertex_attrib_3_f32(&self, index: u32, x: f32, y: f32, z: f32) {
|
2506 | let gl = &self.raw;
|
2507 | gl.VertexAttrib3f(index, x, y, z);
|
2508 | }
|
2509 |
|
2510 | unsafe fn vertex_attrib_4_f32(&self, index: u32, x: f32, y: f32, z: f32, w: f32) {
|
2511 | let gl = &self.raw;
|
2512 | gl.VertexAttrib4f(index, x, y, z, w);
|
2513 | }
|
2514 |
|
2515 | unsafe fn vertex_attrib_1_f32_slice(&self, index: u32, v: &[f32]) {
|
2516 | let gl = &self.raw;
|
2517 | gl.VertexAttrib1fv(index, v.as_ptr());
|
2518 | }
|
2519 |
|
2520 | unsafe fn vertex_attrib_2_f32_slice(&self, index: u32, v: &[f32]) {
|
2521 | let gl = &self.raw;
|
2522 | gl.VertexAttrib2fv(index, v.as_ptr());
|
2523 | }
|
2524 |
|
2525 | unsafe fn vertex_attrib_3_f32_slice(&self, index: u32, v: &[f32]) {
|
2526 | let gl = &self.raw;
|
2527 | gl.VertexAttrib3fv(index, v.as_ptr());
|
2528 | }
|
2529 |
|
2530 | unsafe fn vertex_attrib_4_f32_slice(&self, index: u32, v: &[f32]) {
|
2531 | let gl = &self.raw;
|
2532 | gl.VertexAttrib4fv(index, v.as_ptr());
|
2533 | }
|
2534 |
|
2535 | unsafe fn vertex_attrib_binding(&self, attrib_index: u32, binding_index: u32) {
|
2536 | let gl = &self.raw;
|
2537 | gl.VertexAttribBinding(attrib_index, binding_index);
|
2538 | }
|
2539 |
|
2540 | unsafe fn vertex_binding_divisor(&self, binding_index: u32, divisor: u32) {
|
2541 | let gl = &self.raw;
|
2542 | gl.VertexBindingDivisor(binding_index, divisor);
|
2543 | }
|
2544 |
|
2545 | unsafe fn viewport(&self, x: i32, y: i32, width: i32, height: i32) {
|
2546 | let gl = &self.raw;
|
2547 | gl.Viewport(x, y, width, height);
|
2548 | }
|
2549 |
|
2550 | unsafe fn viewport_f32_slice(&self, first: u32, count: i32, values: &[[f32; 4]]) {
|
2551 | let gl = &self.raw;
|
2552 | gl.ViewportArrayv(first, count, values.as_ptr() as *const f32);
|
2553 | }
|
2554 |
|
2555 | unsafe fn blend_equation(&self, mode: u32) {
|
2556 | let gl = &self.raw;
|
2557 | gl.BlendEquation(mode as u32);
|
2558 | }
|
2559 |
|
2560 | unsafe fn blend_equation_draw_buffer(&self, draw_buffer: u32, mode: u32) {
|
2561 | let gl = &self.raw;
|
2562 | gl.BlendEquationi(draw_buffer, mode as u32);
|
2563 | }
|
2564 |
|
2565 | unsafe fn blend_equation_separate(&self, mode_rgb: u32, mode_alpha: u32) {
|
2566 | let gl = &self.raw;
|
2567 | gl.BlendEquationSeparate(mode_rgb as u32, mode_alpha as u32);
|
2568 | }
|
2569 |
|
2570 | unsafe fn blend_equation_separate_draw_buffer(
|
2571 | &self,
|
2572 | draw_buffer: u32,
|
2573 | mode_rgb: u32,
|
2574 | mode_alpha: u32,
|
2575 | ) {
|
2576 | let gl = &self.raw;
|
2577 | gl.BlendEquationSeparatei(draw_buffer, mode_rgb as u32, mode_alpha as u32);
|
2578 | }
|
2579 |
|
2580 | unsafe fn blend_func(&self, src: u32, dst: u32) {
|
2581 | let gl = &self.raw;
|
2582 | gl.BlendFunc(src as u32, dst as u32);
|
2583 | }
|
2584 |
|
2585 | unsafe fn blend_func_draw_buffer(&self, draw_buffer: u32, src: u32, dst: u32) {
|
2586 | let gl = &self.raw;
|
2587 | gl.BlendFunci(draw_buffer, src as u32, dst as u32);
|
2588 | }
|
2589 |
|
2590 | unsafe fn blend_func_separate(
|
2591 | &self,
|
2592 | src_rgb: u32,
|
2593 | dst_rgb: u32,
|
2594 | src_alpha: u32,
|
2595 | dst_alpha: u32,
|
2596 | ) {
|
2597 | let gl = &self.raw;
|
2598 | gl.BlendFuncSeparate(
|
2599 | src_rgb as u32,
|
2600 | dst_rgb as u32,
|
2601 | src_alpha as u32,
|
2602 | dst_alpha as u32,
|
2603 | );
|
2604 | }
|
2605 |
|
2606 | unsafe fn blend_func_separate_draw_buffer(
|
2607 | &self,
|
2608 | draw_buffer: u32,
|
2609 | src_rgb: u32,
|
2610 | dst_rgb: u32,
|
2611 | src_alpha: u32,
|
2612 | dst_alpha: u32,
|
2613 | ) {
|
2614 | let gl = &self.raw;
|
2615 | gl.BlendFuncSeparatei(
|
2616 | draw_buffer,
|
2617 | src_rgb as u32,
|
2618 | dst_rgb as u32,
|
2619 | src_alpha as u32,
|
2620 | dst_alpha as u32,
|
2621 | );
|
2622 | }
|
2623 |
|
2624 | unsafe fn stencil_func(&self, func: u32, reference: i32, mask: u32) {
|
2625 | let gl = &self.raw;
|
2626 | gl.StencilFunc(func as u32, reference, mask);
|
2627 | }
|
2628 |
|
2629 | unsafe fn stencil_func_separate(&self, face: u32, func: u32, reference: i32, mask: u32) {
|
2630 | let gl = &self.raw;
|
2631 | gl.StencilFuncSeparate(face as u32, func as u32, reference, mask);
|
2632 | }
|
2633 |
|
2634 | unsafe fn stencil_mask(&self, mask: u32) {
|
2635 | let gl = &self.raw;
|
2636 | gl.StencilMask(mask);
|
2637 | }
|
2638 |
|
2639 | unsafe fn stencil_mask_separate(&self, face: u32, mask: u32) {
|
2640 | let gl = &self.raw;
|
2641 | gl.StencilMaskSeparate(face as u32, mask);
|
2642 | }
|
2643 |
|
2644 | unsafe fn stencil_op(&self, stencil_fail: u32, depth_fail: u32, pass: u32) {
|
2645 | let gl = &self.raw;
|
2646 | gl.StencilOp(stencil_fail as u32, depth_fail as u32, pass as u32);
|
2647 | }
|
2648 |
|
2649 | unsafe fn stencil_op_separate(&self, face: u32, stencil_fail: u32, depth_fail: u32, pass: u32) {
|
2650 | let gl = &self.raw;
|
2651 | gl.StencilOpSeparate(
|
2652 | face as u32,
|
2653 | stencil_fail as u32,
|
2654 | depth_fail as u32,
|
2655 | pass as u32,
|
2656 | );
|
2657 | }
|
2658 |
|
2659 | unsafe fn debug_message_control(
|
2660 | &self,
|
2661 | source: u32,
|
2662 | msg_type: u32,
|
2663 | severity: u32,
|
2664 | ids: &[u32],
|
2665 | enabled: bool,
|
2666 | ) {
|
2667 | let gl = &self.raw;
|
2668 |
|
2669 | let ids_ptr = if ids.is_empty() {
|
2670 | std::ptr::null()
|
2671 | } else {
|
2672 | ids.as_ptr()
|
2673 | };
|
2674 |
|
2675 | gl.DebugMessageControl(
|
2676 | source,
|
2677 | msg_type,
|
2678 | severity,
|
2679 | ids.len() as i32,
|
2680 | ids_ptr,
|
2681 | enabled as u8,
|
2682 | );
|
2683 | }
|
2684 |
|
2685 | unsafe fn debug_message_insert<S>(
|
2686 | &self,
|
2687 | source: u32,
|
2688 | msg_type: u32,
|
2689 | id: u32,
|
2690 | severity: u32,
|
2691 | msg: S,
|
2692 | ) where
|
2693 | S: AsRef<str>,
|
2694 | {
|
2695 | let gl = &self.raw;
|
2696 | let message = msg.as_ref().as_bytes();
|
2697 | let length = message.len() as i32;
|
2698 | gl.DebugMessageInsert(
|
2699 | source,
|
2700 | msg_type,
|
2701 | id,
|
2702 | severity,
|
2703 | length,
|
2704 | message.as_ptr() as *const native_gl::GLchar,
|
2705 | );
|
2706 | }
|
2707 |
|
2708 | unsafe fn debug_message_callback<F>(&mut self, callback: F)
|
2709 | where
|
2710 | F: FnMut(u32, u32, u32, u32, &str) + 'static,
|
2711 | {
|
2712 | match self.debug_callback {
|
2713 | Some(_) => {
|
2714 | panic!("Debug callback already set" );
|
2715 | }
|
2716 | None => {
|
2717 | let trait_object: DebugCallback = Box::new(callback);
|
2718 | let thin_ptr = Box::new(trait_object);
|
2719 | let raw_ptr = Box::into_raw(thin_ptr) as *mut _ as *mut std::ffi::c_void;
|
2720 |
|
2721 | let gl = &self.raw;
|
2722 |
|
2723 | if gl.DebugMessageCallback_is_loaded() {
|
2724 | gl.DebugMessageCallback(Some(raw_debug_message_callback), raw_ptr);
|
2725 | } else {
|
2726 | // Fallback to extension
|
2727 | gl.DebugMessageCallbackKHR(Some(raw_debug_message_callback), raw_ptr);
|
2728 | }
|
2729 |
|
2730 | self.debug_callback = Some(DebugCallbackRawPtr { callback: raw_ptr });
|
2731 | }
|
2732 | }
|
2733 | }
|
2734 |
|
2735 | unsafe fn get_debug_message_log(&self, count: u32) -> Vec<DebugMessageLogEntry> {
|
2736 | let ct = count as usize;
|
2737 | let mut sources = Vec::with_capacity(ct);
|
2738 | let mut types = Vec::with_capacity(ct);
|
2739 | let mut ids = Vec::with_capacity(ct);
|
2740 | let mut severities = Vec::with_capacity(ct);
|
2741 | let mut lengths = Vec::with_capacity(ct);
|
2742 | let buf_size = (count * MAX_DEBUG_MESSAGE_LENGTH) as i32;
|
2743 | let mut message_log = Vec::with_capacity(buf_size as usize);
|
2744 |
|
2745 | let gl = &self.raw;
|
2746 | let received = gl.GetDebugMessageLog(
|
2747 | count,
|
2748 | buf_size,
|
2749 | sources.as_mut_ptr(),
|
2750 | types.as_mut_ptr(),
|
2751 | ids.as_mut_ptr(),
|
2752 | severities.as_mut_ptr(),
|
2753 | lengths.as_mut_ptr(),
|
2754 | message_log.as_mut_ptr(),
|
2755 | ) as usize;
|
2756 |
|
2757 | sources.set_len(received);
|
2758 | types.set_len(received);
|
2759 | ids.set_len(received);
|
2760 | severities.set_len(received);
|
2761 | lengths.set_len(received);
|
2762 | message_log.set_len(buf_size as usize);
|
2763 |
|
2764 | let mut entries = Vec::new();
|
2765 | let mut offset = 0;
|
2766 | for i in 0..received {
|
2767 | let message =
|
2768 | std::ffi::CStr::from_ptr(message_log[offset..].as_ptr()).to_string_lossy();
|
2769 | offset += lengths[i] as usize;
|
2770 | entries.push(DebugMessageLogEntry {
|
2771 | source: sources[i],
|
2772 | msg_type: types[i],
|
2773 | id: ids[i],
|
2774 | severity: severities[i],
|
2775 | message: message.to_string(),
|
2776 | });
|
2777 | }
|
2778 |
|
2779 | entries
|
2780 | }
|
2781 |
|
2782 | unsafe fn push_debug_group<S>(&self, source: u32, id: u32, message: S)
|
2783 | where
|
2784 | S: AsRef<str>,
|
2785 | {
|
2786 | let gl = &self.raw;
|
2787 | let msg = message.as_ref().as_bytes();
|
2788 | let length = msg.len() as i32;
|
2789 | gl.PushDebugGroup(source, id, length, msg.as_ptr() as *const native_gl::GLchar);
|
2790 | }
|
2791 |
|
2792 | unsafe fn pop_debug_group(&self) {
|
2793 | let gl = &self.raw;
|
2794 | gl.PopDebugGroup();
|
2795 | }
|
2796 |
|
2797 | unsafe fn object_label<S>(&self, identifier: u32, name: u32, label: Option<S>)
|
2798 | where
|
2799 | S: AsRef<str>,
|
2800 | {
|
2801 | let gl = &self.raw;
|
2802 |
|
2803 | match label {
|
2804 | Some(l) => {
|
2805 | let lbl = l.as_ref().as_bytes();
|
2806 | let length = lbl.len() as i32;
|
2807 | gl.ObjectLabel(
|
2808 | identifier,
|
2809 | name,
|
2810 | length,
|
2811 | lbl.as_ptr() as *const native_gl::GLchar,
|
2812 | );
|
2813 | }
|
2814 | None => gl.ObjectLabel(identifier, name, 0, std::ptr::null()),
|
2815 | }
|
2816 | }
|
2817 |
|
2818 | unsafe fn get_object_label(&self, identifier: u32, name: u32) -> String {
|
2819 | let gl = &self.raw;
|
2820 | let mut len = 0;
|
2821 | let mut label_buf = Vec::with_capacity(self.constants.max_label_length as usize);
|
2822 | gl.GetObjectLabel(
|
2823 | identifier,
|
2824 | name,
|
2825 | self.constants.max_label_length,
|
2826 | &mut len,
|
2827 | label_buf.as_mut_ptr(),
|
2828 | );
|
2829 | label_buf.set_len(len as usize);
|
2830 | std::ffi::CStr::from_ptr(label_buf.as_ptr())
|
2831 | .to_str()
|
2832 | .unwrap()
|
2833 | .to_owned()
|
2834 | }
|
2835 |
|
2836 | unsafe fn object_ptr_label<S>(&self, sync: Self::Fence, label: Option<S>)
|
2837 | where
|
2838 | S: AsRef<str>,
|
2839 | {
|
2840 | let gl = &self.raw;
|
2841 |
|
2842 | match label {
|
2843 | Some(l) => {
|
2844 | let lbl = l.as_ref().as_bytes();
|
2845 | let length = lbl.len() as i32;
|
2846 | gl.ObjectPtrLabel(
|
2847 | sync.0 as *mut std::ffi::c_void,
|
2848 | length,
|
2849 | lbl.as_ptr() as *const native_gl::GLchar,
|
2850 | );
|
2851 | }
|
2852 | None => gl.ObjectPtrLabel(sync.0 as *mut std::ffi::c_void, 0, std::ptr::null()),
|
2853 | }
|
2854 | }
|
2855 |
|
2856 | unsafe fn get_object_ptr_label(&self, sync: Self::Fence) -> String {
|
2857 | let gl = &self.raw;
|
2858 | let mut len = 0;
|
2859 | let mut label_buf = Vec::with_capacity(self.constants.max_label_length as usize);
|
2860 | gl.GetObjectPtrLabel(
|
2861 | sync.0 as *mut std::ffi::c_void,
|
2862 | self.constants.max_label_length,
|
2863 | &mut len,
|
2864 | label_buf.as_mut_ptr(),
|
2865 | );
|
2866 | label_buf.set_len(len as usize);
|
2867 | std::ffi::CStr::from_ptr(label_buf.as_ptr())
|
2868 | .to_str()
|
2869 | .unwrap()
|
2870 | .to_owned()
|
2871 | }
|
2872 |
|
2873 | unsafe fn get_uniform_block_index(&self, program: Self::Program, name: &str) -> Option<u32> {
|
2874 | let gl = &self.raw;
|
2875 | let name = CString::new(name).unwrap();
|
2876 | let index = gl.GetUniformBlockIndex(program.0.get(), name.as_ptr());
|
2877 | if index == INVALID_INDEX {
|
2878 | None
|
2879 | } else {
|
2880 | Some(index)
|
2881 | }
|
2882 | }
|
2883 |
|
2884 | unsafe fn uniform_block_binding(&self, program: Self::Program, index: u32, binding: u32) {
|
2885 | let gl = &self.raw;
|
2886 | gl.UniformBlockBinding(program.0.get(), index, binding);
|
2887 | }
|
2888 |
|
2889 | unsafe fn get_shader_storage_block_index(
|
2890 | &self,
|
2891 | program: Self::Program,
|
2892 | name: &str,
|
2893 | ) -> Option<u32> {
|
2894 | let gl = &self.raw;
|
2895 | let name = CString::new(name).unwrap();
|
2896 | let index =
|
2897 | gl.GetProgramResourceIndex(program.0.get(), SHADER_STORAGE_BLOCK, name.as_ptr());
|
2898 | if index == INVALID_INDEX {
|
2899 | None
|
2900 | } else {
|
2901 | Some(index)
|
2902 | }
|
2903 | }
|
2904 |
|
2905 | unsafe fn shader_storage_block_binding(
|
2906 | &self,
|
2907 | program: Self::Program,
|
2908 | index: u32,
|
2909 | binding: u32,
|
2910 | ) {
|
2911 | let gl = &self.raw;
|
2912 | gl.ShaderStorageBlockBinding(program.0.get(), index, binding);
|
2913 | }
|
2914 |
|
2915 | unsafe fn read_buffer(&self, src: u32) {
|
2916 | let gl = &self.raw;
|
2917 | gl.ReadBuffer(src);
|
2918 | }
|
2919 |
|
2920 | unsafe fn read_pixels(
|
2921 | &self,
|
2922 | x: i32,
|
2923 | y: i32,
|
2924 | width: i32,
|
2925 | height: i32,
|
2926 | format: u32,
|
2927 | gltype: u32,
|
2928 | pixels: PixelPackData,
|
2929 | ) {
|
2930 | let gl = &self.raw;
|
2931 | gl.ReadPixels(
|
2932 | x,
|
2933 | y,
|
2934 | width,
|
2935 | height,
|
2936 | format,
|
2937 | gltype,
|
2938 | match pixels {
|
2939 | PixelPackData::BufferOffset(offset) => offset as *mut std::ffi::c_void,
|
2940 | PixelPackData::Slice(data) => data.as_mut_ptr() as *mut std::ffi::c_void,
|
2941 | },
|
2942 | );
|
2943 | }
|
2944 |
|
2945 | unsafe fn begin_query(&self, target: u32, query: Self::Query) {
|
2946 | let gl = &self.raw;
|
2947 | gl.BeginQuery(target, query.0.get());
|
2948 | }
|
2949 |
|
2950 | unsafe fn end_query(&self, target: u32) {
|
2951 | let gl = &self.raw;
|
2952 | gl.EndQuery(target);
|
2953 | }
|
2954 |
|
2955 | unsafe fn query_counter(&self, query: Self::Query, target: u32) {
|
2956 | let gl = &self.raw;
|
2957 | gl.QueryCounter(query.0.get(), target);
|
2958 | }
|
2959 |
|
2960 | unsafe fn get_query_parameter_u32(&self, query: Self::Query, parameter: u32) -> u32 {
|
2961 | let gl = &self.raw;
|
2962 | let mut value = 0;
|
2963 | gl.GetQueryObjectuiv(query.0.get(), parameter, &mut value);
|
2964 | value
|
2965 | }
|
2966 |
|
2967 | unsafe fn get_query_parameter_u64_with_offset(
|
2968 | &self,
|
2969 | query: Self::Query,
|
2970 | parameter: u32,
|
2971 | offset: usize,
|
2972 | ) {
|
2973 | let gl = &self.raw;
|
2974 | gl.GetQueryObjectui64v(query.0.get(), parameter, offset as *mut _);
|
2975 | }
|
2976 |
|
2977 | unsafe fn create_transform_feedback(&self) -> Result<Self::TransformFeedback, String> {
|
2978 | let gl = &self.raw;
|
2979 | let mut name = 0;
|
2980 | gl.GenTransformFeedbacks(1, &mut name);
|
2981 | Ok(NativeTransformFeedback(non_zero_gl_name(name)))
|
2982 | }
|
2983 |
|
2984 | unsafe fn delete_transform_feedback(&self, transform_feedback: Self::TransformFeedback) {
|
2985 | let gl = &self.raw;
|
2986 | gl.DeleteTransformFeedbacks(1, &transform_feedback.0.get());
|
2987 | }
|
2988 |
|
2989 | unsafe fn bind_transform_feedback(
|
2990 | &self,
|
2991 | target: u32,
|
2992 | transform_feedback: Option<Self::TransformFeedback>,
|
2993 | ) {
|
2994 | let gl = &self.raw;
|
2995 | gl.BindTransformFeedback(target, transform_feedback.map(|tf| tf.0.get()).unwrap_or(0));
|
2996 | }
|
2997 |
|
2998 | unsafe fn begin_transform_feedback(&self, primitive_mode: u32) {
|
2999 | let gl = &self.raw;
|
3000 | gl.BeginTransformFeedback(primitive_mode);
|
3001 | }
|
3002 |
|
3003 | unsafe fn end_transform_feedback(&self) {
|
3004 | let gl = &self.raw;
|
3005 | gl.EndTransformFeedback();
|
3006 | }
|
3007 |
|
3008 | unsafe fn pause_transform_feedback(&self) {
|
3009 | let gl = &self.raw;
|
3010 | gl.PauseTransformFeedback();
|
3011 | }
|
3012 |
|
3013 | unsafe fn resume_transform_feedback(&self) {
|
3014 | let gl = &self.raw;
|
3015 | gl.ResumeTransformFeedback();
|
3016 | }
|
3017 |
|
3018 | unsafe fn transform_feedback_varyings(
|
3019 | &self,
|
3020 | program: Self::Program,
|
3021 | varyings: &[&str],
|
3022 | buffer_mode: u32,
|
3023 | ) {
|
3024 | let gl = &self.raw;
|
3025 |
|
3026 | let strings: Vec<CString> = varyings
|
3027 | .iter()
|
3028 | .copied()
|
3029 | .map(CString::new)
|
3030 | .collect::<Result<_, _>>()
|
3031 | .unwrap();
|
3032 | let varyings: Vec<_> = strings.iter().map(|c_str| c_str.as_ptr()).collect();
|
3033 |
|
3034 | gl.TransformFeedbackVaryings(
|
3035 | program.0.get(),
|
3036 | varyings.len() as i32,
|
3037 | varyings.as_ptr(),
|
3038 | buffer_mode,
|
3039 | );
|
3040 | }
|
3041 |
|
3042 | unsafe fn get_transform_feedback_varying(
|
3043 | &self,
|
3044 | program: Self::Program,
|
3045 | index: u32,
|
3046 | ) -> Option<ActiveTransformFeedback> {
|
3047 | let gl = &self.raw;
|
3048 |
|
3049 | const buf_size: usize = 256;
|
3050 | const bytes: [u8; buf_size] = [0; buf_size];
|
3051 |
|
3052 | let size: i32 = 0;
|
3053 | let tftype: u32 = 0;
|
3054 | let c_name = CString::new(bytes.to_vec()).unwrap();
|
3055 | let c_name_buf = c_name.into_raw();
|
3056 |
|
3057 | gl.GetTransformFeedbackVarying(
|
3058 | program.0.get(),
|
3059 | index,
|
3060 | buf_size as i32,
|
3061 | std::ptr::null_mut(),
|
3062 | size as *mut i32,
|
3063 | tftype as *mut u32,
|
3064 | c_name_buf,
|
3065 | );
|
3066 |
|
3067 | let name = CString::from_raw(c_name_buf).into_string().unwrap();
|
3068 |
|
3069 | Some(ActiveTransformFeedback { size, tftype, name })
|
3070 | }
|
3071 |
|
3072 | unsafe fn memory_barrier(&self, barriers: u32) {
|
3073 | let gl = &self.raw;
|
3074 | gl.MemoryBarrier(barriers);
|
3075 | }
|
3076 |
|
3077 | unsafe fn memory_barrier_by_region(&self, barriers: u32) {
|
3078 | let gl = &self.raw;
|
3079 | gl.MemoryBarrierByRegion(barriers);
|
3080 | }
|
3081 |
|
3082 | unsafe fn bind_image_texture(
|
3083 | &self,
|
3084 | unit: u32,
|
3085 | texture: Self::Texture,
|
3086 | level: i32,
|
3087 | layered: bool,
|
3088 | layer: i32,
|
3089 | access: u32,
|
3090 | format: u32,
|
3091 | ) {
|
3092 | let gl = &self.raw;
|
3093 | gl.BindImageTexture(
|
3094 | unit,
|
3095 | texture.0.get(),
|
3096 | level,
|
3097 | layered as u8,
|
3098 | layer,
|
3099 | access,
|
3100 | format,
|
3101 | );
|
3102 | }
|
3103 | unsafe fn get_active_uniform_block_parameter_i32(
|
3104 | &self,
|
3105 | program: Self::Program,
|
3106 | uniform_block_index: u32,
|
3107 | parameter: u32,
|
3108 | ) -> i32 {
|
3109 | let gl = &self.raw;
|
3110 | let mut value = 0;
|
3111 | gl.GetActiveUniformBlockiv(program.0.get(), uniform_block_index, parameter, &mut value);
|
3112 | value
|
3113 | }
|
3114 |
|
3115 | unsafe fn get_active_uniform_block_parameter_i32_slice(
|
3116 | &self,
|
3117 | program: Self::Program,
|
3118 | uniform_block_index: u32,
|
3119 | parameter: u32,
|
3120 | out: &mut [i32],
|
3121 | ) {
|
3122 | let gl = &self.raw;
|
3123 | gl.GetActiveUniformBlockiv(
|
3124 | program.0.get(),
|
3125 | uniform_block_index,
|
3126 | parameter,
|
3127 | out.as_mut_ptr(),
|
3128 | );
|
3129 | }
|
3130 | unsafe fn get_active_uniform_block_name(
|
3131 | &self,
|
3132 | program: Self::Program,
|
3133 | uniform_block_index: u32,
|
3134 | ) -> String {
|
3135 | let gl = &self.raw;
|
3136 |
|
3137 | // Probe for the length of the name of the uniform block, and, failing
|
3138 | // that, fall back to allocating a buffer that is 256 bytes long. This
|
3139 | // should be good enough for pretty much all contexts, including faulty
|
3140 | // or partially faulty ones.
|
3141 | let len = self.get_active_uniform_block_parameter_i32(
|
3142 | program,
|
3143 | uniform_block_index,
|
3144 | crate::UNIFORM_BLOCK_NAME_LENGTH,
|
3145 | );
|
3146 | let len = if gl.GetError() == crate::NO_ERROR && len > 0 {
|
3147 | len as usize
|
3148 | } else {
|
3149 | 256
|
3150 | };
|
3151 |
|
3152 | let mut buffer = vec![0; len];
|
3153 | let mut length = 0;
|
3154 | gl.GetActiveUniformBlockName(
|
3155 | program.0.get(),
|
3156 | uniform_block_index,
|
3157 | buffer.len() as _,
|
3158 | &mut length,
|
3159 | buffer.as_mut_ptr(),
|
3160 | );
|
3161 |
|
3162 | if length > 0 {
|
3163 | assert_eq!(
|
3164 | std::mem::size_of::<u8>(),
|
3165 | std::mem::size_of::<native_gl::GLchar>(),
|
3166 | "This operation is only safe in systems in which the length of \
|
3167 | a GLchar is the same as that of an u8"
|
3168 | );
|
3169 | assert_eq!(
|
3170 | std::mem::align_of::<u8>(),
|
3171 | std::mem::align_of::<native_gl::GLchar>(),
|
3172 | "This operation is only safe in systems in which the alignment \
|
3173 | of a GLchar is the same as that of an u8"
|
3174 | );
|
3175 | let buffer = std::slice::from_raw_parts(
|
3176 | buffer.as_ptr() as *const u8,
|
3177 | (length as usize + 1).min(buffer.len()),
|
3178 | );
|
3179 |
|
3180 | let name = CStr::from_bytes_with_nul(&buffer[..])
|
3181 | .unwrap()
|
3182 | .to_str()
|
3183 | .unwrap()
|
3184 | .to_owned();
|
3185 |
|
3186 | name
|
3187 | } else {
|
3188 | String::from("" )
|
3189 | }
|
3190 | }
|
3191 |
|
3192 | unsafe fn max_shader_compiler_threads(&self, count: u32) {
|
3193 | let gl = &self.raw;
|
3194 | if gl.MaxShaderCompilerThreadsKHR_is_loaded() {
|
3195 | gl.MaxShaderCompilerThreadsKHR(count);
|
3196 | } else {
|
3197 | gl.MaxShaderCompilerThreadsARB(count);
|
3198 | }
|
3199 | }
|
3200 | }
|
3201 |
|
3202 | extern "system" fn raw_debug_message_callback(
|
3203 | source: u32,
|
3204 | gltype: u32,
|
3205 | id: u32,
|
3206 | severity: u32,
|
3207 | length: i32,
|
3208 | message: *const native_gl::GLchar,
|
3209 | user_param: *mut std::ffi::c_void,
|
3210 | ) {
|
3211 | let _result: Result<(), Box> = std::panic::catch_unwind(move || unsafe {
|
3212 | let callback: &mut DebugCallback = &mut *(user_param as *mut DebugCallback);
|
3213 | let slice: &[u8] = std::slice::from_raw_parts(data:message as *const u8, len:length as usize);
|
3214 | let msg: &str = std::str::from_utf8(slice).unwrap();
|
3215 | (callback)(source, gltype, id, severity, msg);
|
3216 | });
|
3217 | }
|
3218 | |