1// Take a look at the license at the top of the repository in the LICENSE file.
2
3use std::{i32, mem, ptr};
4
5use glib::translate::{from_glib, from_glib_full, IntoGlib, ToGlibPtr};
6
7#[doc(alias = "gst_video_convert_sample")]
8pub fn convert_sample(
9 sample: &gst::Sample,
10 caps: &gst::Caps,
11 timeout: gst::ClockTime,
12) -> Result<gst::Sample, glib::Error> {
13 skip_assert_initialized!();
14 unsafe {
15 let mut error: *mut GError = ptr::null_mut();
16 let ret: *mut GstSample = ffi::gst_video_convert_sample(
17 sample:sample.to_glib_none().0,
18 to_caps:caps.to_glib_none().0,
19 timeout:timeout.into_glib(),
20 &mut error,
21 );
22
23 if error.is_null() {
24 Ok(from_glib_full(ptr:ret))
25 } else {
26 Err(from_glib_full(ptr:error))
27 }
28 }
29}
30
31pub fn convert_sample_async<F>(
32 sample: &gst::Sample,
33 caps: &gst::Caps,
34 timeout: Option<gst::ClockTime>,
35 func: F,
36) where
37 F: FnOnce(Result<gst::Sample, glib::Error>) + Send + 'static,
38{
39 skip_assert_initialized!();
40 unsafe { convert_sample_async_unsafe(sample, caps, timeout, func) }
41}
42
43pub fn convert_sample_async_local<F>(
44 sample: &gst::Sample,
45 caps: &gst::Caps,
46 timeout: Option<gst::ClockTime>,
47 func: F,
48) where
49 F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
50{
51 skip_assert_initialized!();
52 unsafe {
53 let ctx: MainContext = glib::MainContext::ref_thread_default();
54 let _acquire: MainContextAcquireGuard<'_> = ctx
55 .acquire()
56 .expect(msg:"thread default main context already acquired by another thread");
57
58 let func: ThreadGuard = glib::thread_guard::ThreadGuard::new(func);
59
60 convert_sample_async_unsafe(sample, caps, timeout, func:move |res: Result| (func.into_inner())(res))
61 }
62}
63
64unsafe fn convert_sample_async_unsafe<F>(
65 sample: &gst::Sample,
66 caps: &gst::Caps,
67 timeout: Option<gst::ClockTime>,
68 func: F,
69) where
70 F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
71{
72 unsafe extern "C" fn convert_sample_async_trampoline<F>(
73 sample: *mut gst::ffi::GstSample,
74 error: *mut glib::ffi::GError,
75 user_data: glib::ffi::gpointer,
76 ) where
77 F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
78 {
79 let callback: &mut Option<F> = &mut *(user_data as *mut Option<F>);
80 let callback = callback.take().unwrap();
81
82 if error.is_null() {
83 callback(Ok(from_glib_full(sample)))
84 } else {
85 callback(Err(from_glib_full(error)))
86 }
87 }
88 unsafe extern "C" fn convert_sample_async_free<F>(user_data: glib::ffi::gpointer)
89 where
90 F: FnOnce(Result<gst::Sample, glib::Error>) + 'static,
91 {
92 let _: Box<Option<F>> = Box::from_raw(user_data as *mut _);
93 }
94
95 let user_data: Box<Option<F>> = Box::new(Some(func));
96
97 ffi::gst_video_convert_sample_async(
98 sample.to_glib_none().0,
99 caps.to_glib_none().0,
100 timeout.into_glib(),
101 Some(convert_sample_async_trampoline::<F>),
102 Box::into_raw(user_data) as glib::ffi::gpointer,
103 Some(convert_sample_async_free::<F>),
104 );
105}
106
107pub fn convert_sample_future(
108 sample: &gst::Sample,
109 caps: &gst::Caps,
110 timeout: Option<gst::ClockTime>,
111) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<gst::Sample, glib::Error>> + 'static>>
112{
113 skip_assert_initialized!();
114
115 use futures_channel::oneshot;
116
117 let (sender, receiver) = oneshot::channel();
118
119 let sample = sample.clone();
120 let caps = caps.clone();
121 let future = async move {
122 assert!(
123 glib::MainContext::ref_thread_default().is_owner(),
124 "Spawning futures only allowed if the thread is owning the MainContext"
125 );
126
127 convert_sample_async(&sample, &caps, timeout, move |res| {
128 let _ = sender.send(res);
129 });
130
131 receiver
132 .await
133 .expect("Sender dropped before callback was called")
134 };
135
136 Box::pin(future)
137}
138
139#[doc(alias = "gst_video_calculate_display_ratio")]
140pub fn calculate_display_ratio(
141 video_width: u32,
142 video_height: u32,
143 video_par: gst::Fraction,
144 display_par: gst::Fraction,
145) -> Option<gst::Fraction> {
146 skip_assert_initialized!();
147
148 unsafe {
149 let mut dar_n = mem::MaybeUninit::uninit();
150 let mut dar_d = mem::MaybeUninit::uninit();
151
152 let res: bool = from_glib(ffi::gst_video_calculate_display_ratio(
153 dar_n.as_mut_ptr(),
154 dar_d.as_mut_ptr(),
155 video_width,
156 video_height,
157 video_par.numer() as u32,
158 video_par.denom() as u32,
159 display_par.numer() as u32,
160 display_par.denom() as u32,
161 ));
162 if res {
163 Some(gst::Fraction::new(
164 dar_n.assume_init() as i32,
165 dar_d.assume_init() as i32,
166 ))
167 } else {
168 None
169 }
170 }
171}
172
173#[doc(alias = "gst_video_guess_framerate")]
174pub fn guess_framerate(duration: gst::ClockTime) -> Option<gst::Fraction> {
175 skip_assert_initialized!();
176
177 unsafe {
178 let mut dest_n: MaybeUninit = mem::MaybeUninit::uninit();
179 let mut dest_d: MaybeUninit = mem::MaybeUninit::uninit();
180 let res: bool = from_glib(val:ffi::gst_video_guess_framerate(
181 duration:duration.into_glib(),
182 dest_n:dest_n.as_mut_ptr(),
183 dest_d:dest_d.as_mut_ptr(),
184 ));
185 if res {
186 Some(gst::Fraction::new(
187 num:dest_n.assume_init(),
188 den:dest_d.assume_init(),
189 ))
190 } else {
191 None
192 }
193 }
194}
195
196#[cfg(feature = "v1_22")]
197#[cfg_attr(docsrs, doc(cfg(feature = "v1_22")))]
198#[doc(alias = "gst_video_is_common_aspect_ratio")]
199pub fn is_common_aspect_ratio(width: u32, height: u32, par: gst::Fraction) -> bool {
200 skip_assert_initialized!();
201
202 unsafe {
203 from_glib(ffi::gst_video_is_common_aspect_ratio(
204 width as i32,
205 height as i32,
206 par.numer(),
207 par.denom(),
208 ))
209 }
210}
211
212pub fn video_make_raw_caps(
213 formats: &[crate::VideoFormat],
214) -> crate::VideoCapsBuilder<gst::caps::NoFeature> {
215 skip_assert_initialized!();
216
217 let formats: impl Iterator = formats.iter().copied().map(|f: VideoFormat| match f {
218 crate::VideoFormat::Encoded => panic!("Invalid encoded format"),
219 crate::VideoFormat::Unknown => panic!("Invalid unknown format"),
220 _ => f,
221 });
222
223 crate::VideoCapsBuilder::new().format_list(formats)
224}
225
226#[cfg(test)]
227mod tests {
228 use std::sync::{Arc, Mutex};
229
230 use super::*;
231
232 #[test]
233 fn test_convert_sample_async() {
234 gst::init().unwrap();
235
236 let l = glib::MainLoop::new(None, false);
237
238 let mut in_buffer = gst::Buffer::with_size(320 * 240 * 4).unwrap();
239 {
240 let buffer = in_buffer.get_mut().unwrap();
241 let mut data = buffer.map_writable().unwrap();
242
243 for p in data.as_mut_slice().chunks_mut(4) {
244 p[0] = 63;
245 p[1] = 127;
246 p[2] = 191;
247 p[3] = 255;
248 }
249 }
250 let in_caps = crate::VideoInfo::builder(crate::VideoFormat::Rgba, 320, 240)
251 .build()
252 .unwrap()
253 .to_caps()
254 .unwrap();
255 let sample = gst::Sample::builder()
256 .buffer(&in_buffer)
257 .caps(&in_caps)
258 .build();
259
260 let out_caps = crate::VideoInfo::builder(crate::VideoFormat::Abgr, 320, 240)
261 .build()
262 .unwrap()
263 .to_caps()
264 .unwrap();
265
266 let l_clone = l.clone();
267 let res_store = Arc::new(Mutex::new(None));
268 let res_store_clone = res_store.clone();
269 convert_sample_async(&sample, &out_caps, gst::ClockTime::NONE, move |res| {
270 *res_store_clone.lock().unwrap() = Some(res);
271 l_clone.quit();
272 });
273
274 l.run();
275
276 let res = res_store.lock().unwrap().take().unwrap();
277 let res = res.unwrap();
278
279 let converted_out_caps = res.caps().unwrap();
280 assert_eq!(out_caps.as_ref(), converted_out_caps);
281 let out_buffer = res.buffer().unwrap();
282 {
283 let data = out_buffer.map_readable().unwrap();
284
285 for p in data.as_slice().chunks(4) {
286 assert_eq!(p, &[255, 191, 127, 63]);
287 }
288 }
289 }
290
291 #[test]
292 fn video_caps() {
293 gst::init().unwrap();
294
295 let caps =
296 video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]).build();
297 assert_eq!(caps.to_string(), "video/x-raw, format=(string){ NV12, NV16 }, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]");
298
299 #[cfg(feature = "v1_18")]
300 {
301 /* video_make_raw_caps() is a re-implementation so ensure it returns the same caps as the C API */
302 let c_caps = unsafe {
303 let formats: Vec<ffi::GstVideoFormat> =
304 [crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]
305 .iter()
306 .map(|f| f.into_glib())
307 .collect();
308 let caps = ffi::gst_video_make_raw_caps(formats.as_ptr(), formats.len() as u32);
309 gst::Caps::from_glib_full(caps)
310 };
311 assert_eq!(caps, c_caps);
312 }
313
314 let caps = video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16])
315 .width(800)
316 .height(600)
317 .framerate((30, 1).into())
318 .build();
319 assert_eq!(caps.to_string(), "video/x-raw, format=(string){ NV12, NV16 }, width=(int)800, height=(int)600, framerate=(fraction)30/1");
320 }
321
322 #[test]
323 #[should_panic(expected = "Invalid encoded format")]
324 fn video_caps_encoded() {
325 gst::init().unwrap();
326 let _caps = video_make_raw_caps(&[crate::VideoFormat::Encoded]);
327 }
328
329 #[test]
330 #[should_panic(expected = "Invalid unknown format")]
331 fn video_caps_unknown() {
332 gst::init().unwrap();
333 let _caps = video_make_raw_caps(&[crate::VideoFormat::Unknown]);
334 }
335}
336