| 1 | // Take a look at the license at the top of the repository in the LICENSE file. |
| 2 | |
| 3 | use std::{mem, ptr}; |
| 4 | |
| 5 | use crate::ffi; |
| 6 | use glib::translate::{from_glib, from_glib_full, IntoGlib, ToGlibPtr}; |
| 7 | |
| 8 | #[doc (alias = "gst_video_convert_sample" )] |
| 9 | pub fn convert_sample( |
| 10 | sample: &gst::Sample, |
| 11 | caps: &gst::Caps, |
| 12 | timeout: gst::ClockTime, |
| 13 | ) -> Result<gst::Sample, glib::Error> { |
| 14 | skip_assert_initialized!(); |
| 15 | unsafe { |
| 16 | let mut error: *mut GError = ptr::null_mut(); |
| 17 | let ret: *mut GstSample = ffi::gst_video_convert_sample( |
| 18 | sample.to_glib_none().0, |
| 19 | to_caps:caps.to_glib_none().0, |
| 20 | timeout.into_glib(), |
| 21 | &mut error, |
| 22 | ); |
| 23 | |
| 24 | if error.is_null() { |
| 25 | Ok(from_glib_full(ptr:ret)) |
| 26 | } else { |
| 27 | Err(from_glib_full(ptr:error)) |
| 28 | } |
| 29 | } |
| 30 | } |
| 31 | |
| 32 | pub fn convert_sample_async<F>( |
| 33 | sample: &gst::Sample, |
| 34 | caps: &gst::Caps, |
| 35 | timeout: Option<gst::ClockTime>, |
| 36 | func: F, |
| 37 | ) where |
| 38 | F: FnOnce(Result<gst::Sample, glib::Error>) + Send + 'static, |
| 39 | { |
| 40 | skip_assert_initialized!(); |
| 41 | unsafe { convert_sample_async_unsafe(sample, caps, timeout, func) } |
| 42 | } |
| 43 | |
| 44 | pub fn convert_sample_async_local<F>( |
| 45 | sample: &gst::Sample, |
| 46 | caps: &gst::Caps, |
| 47 | timeout: Option<gst::ClockTime>, |
| 48 | func: F, |
| 49 | ) where |
| 50 | F: FnOnce(Result<gst::Sample, glib::Error>) + 'static, |
| 51 | { |
| 52 | skip_assert_initialized!(); |
| 53 | unsafe { |
| 54 | let ctx: MainContext = glib::MainContext::ref_thread_default(); |
| 55 | let _acquire: MainContextAcquireGuard<'_> = ctx |
| 56 | .acquire() |
| 57 | .expect(msg:"thread default main context already acquired by another thread" ); |
| 58 | |
| 59 | let func: ThreadGuard = glib::thread_guard::ThreadGuard::new(func); |
| 60 | |
| 61 | convert_sample_async_unsafe(sample, caps, timeout, func:move |res: Result| (func.into_inner())(res)) |
| 62 | } |
| 63 | } |
| 64 | |
| 65 | unsafe fn convert_sample_async_unsafe<F>( |
| 66 | sample: &gst::Sample, |
| 67 | caps: &gst::Caps, |
| 68 | timeout: Option<gst::ClockTime>, |
| 69 | func: F, |
| 70 | ) where |
| 71 | F: FnOnce(Result<gst::Sample, glib::Error>) + 'static, |
| 72 | { |
| 73 | unsafe extern "C" fn convert_sample_async_trampoline<F>( |
| 74 | sample: *mut gst::ffi::GstSample, |
| 75 | error: *mut glib::ffi::GError, |
| 76 | user_data: glib::ffi::gpointer, |
| 77 | ) where |
| 78 | F: FnOnce(Result<gst::Sample, glib::Error>) + 'static, |
| 79 | { |
| 80 | let callback: &mut Option<F> = &mut *(user_data as *mut Option<F>); |
| 81 | let callback = callback.take().unwrap(); |
| 82 | |
| 83 | if error.is_null() { |
| 84 | callback(Ok(from_glib_full(sample))) |
| 85 | } else { |
| 86 | callback(Err(from_glib_full(error))) |
| 87 | } |
| 88 | } |
| 89 | unsafe extern "C" fn convert_sample_async_free<F>(user_data: glib::ffi::gpointer) |
| 90 | where |
| 91 | F: FnOnce(Result<gst::Sample, glib::Error>) + 'static, |
| 92 | { |
| 93 | let _: Box<Option<F>> = Box::from_raw(user_data as *mut _); |
| 94 | } |
| 95 | |
| 96 | let user_data: Box<Option<F>> = Box::new(Some(func)); |
| 97 | |
| 98 | ffi::gst_video_convert_sample_async( |
| 99 | sample.to_glib_none().0, |
| 100 | caps.to_glib_none().0, |
| 101 | timeout.into_glib(), |
| 102 | Some(convert_sample_async_trampoline::<F>), |
| 103 | Box::into_raw(user_data) as glib::ffi::gpointer, |
| 104 | Some(convert_sample_async_free::<F>), |
| 105 | ); |
| 106 | } |
| 107 | |
| 108 | pub fn convert_sample_future( |
| 109 | sample: &gst::Sample, |
| 110 | caps: &gst::Caps, |
| 111 | timeout: Option<gst::ClockTime>, |
| 112 | ) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<gst::Sample, glib::Error>> + 'static>> |
| 113 | { |
| 114 | skip_assert_initialized!(); |
| 115 | |
| 116 | use futures_channel::oneshot; |
| 117 | |
| 118 | let (sender, receiver) = oneshot::channel(); |
| 119 | |
| 120 | let sample = sample.clone(); |
| 121 | let caps = caps.clone(); |
| 122 | let future = async move { |
| 123 | assert!( |
| 124 | glib::MainContext::ref_thread_default().is_owner(), |
| 125 | "Spawning futures only allowed if the thread is owning the MainContext" |
| 126 | ); |
| 127 | |
| 128 | convert_sample_async(&sample, &caps, timeout, move |res| { |
| 129 | let _ = sender.send(res); |
| 130 | }); |
| 131 | |
| 132 | receiver |
| 133 | .await |
| 134 | .expect("Sender dropped before callback was called" ) |
| 135 | }; |
| 136 | |
| 137 | Box::pin(future) |
| 138 | } |
| 139 | |
| 140 | #[doc (alias = "gst_video_calculate_display_ratio" )] |
| 141 | pub fn calculate_display_ratio( |
| 142 | video_width: u32, |
| 143 | video_height: u32, |
| 144 | video_par: gst::Fraction, |
| 145 | display_par: gst::Fraction, |
| 146 | ) -> Option<gst::Fraction> { |
| 147 | skip_assert_initialized!(); |
| 148 | |
| 149 | unsafe { |
| 150 | let mut dar_n = mem::MaybeUninit::uninit(); |
| 151 | let mut dar_d = mem::MaybeUninit::uninit(); |
| 152 | |
| 153 | let res: bool = from_glib(ffi::gst_video_calculate_display_ratio( |
| 154 | dar_n.as_mut_ptr(), |
| 155 | dar_d.as_mut_ptr(), |
| 156 | video_width, |
| 157 | video_height, |
| 158 | video_par.numer() as u32, |
| 159 | video_par.denom() as u32, |
| 160 | display_par.numer() as u32, |
| 161 | display_par.denom() as u32, |
| 162 | )); |
| 163 | if res { |
| 164 | Some(gst::Fraction::new( |
| 165 | dar_n.assume_init() as i32, |
| 166 | dar_d.assume_init() as i32, |
| 167 | )) |
| 168 | } else { |
| 169 | None |
| 170 | } |
| 171 | } |
| 172 | } |
| 173 | |
| 174 | #[doc (alias = "gst_video_guess_framerate" )] |
| 175 | pub fn guess_framerate(duration: gst::ClockTime) -> Option<gst::Fraction> { |
| 176 | skip_assert_initialized!(); |
| 177 | |
| 178 | unsafe { |
| 179 | let mut dest_n: MaybeUninit = mem::MaybeUninit::uninit(); |
| 180 | let mut dest_d: MaybeUninit = mem::MaybeUninit::uninit(); |
| 181 | let res: bool = from_glib(val:ffi::gst_video_guess_framerate( |
| 182 | duration.into_glib(), |
| 183 | dest_n.as_mut_ptr(), |
| 184 | dest_d.as_mut_ptr(), |
| 185 | )); |
| 186 | if res { |
| 187 | Some(gst::Fraction::new( |
| 188 | numer:dest_n.assume_init(), |
| 189 | denom:dest_d.assume_init(), |
| 190 | )) |
| 191 | } else { |
| 192 | None |
| 193 | } |
| 194 | } |
| 195 | } |
| 196 | |
| 197 | #[cfg (feature = "v1_22" )] |
| 198 | #[cfg_attr (docsrs, doc(cfg(feature = "v1_22" )))] |
| 199 | #[doc (alias = "gst_video_is_common_aspect_ratio" )] |
| 200 | pub fn is_common_aspect_ratio(width: u32, height: u32, par: gst::Fraction) -> bool { |
| 201 | skip_assert_initialized!(); |
| 202 | |
| 203 | unsafe { |
| 204 | from_glib(ffi::gst_video_is_common_aspect_ratio( |
| 205 | width as i32, |
| 206 | height as i32, |
| 207 | par.numer(), |
| 208 | par.denom(), |
| 209 | )) |
| 210 | } |
| 211 | } |
| 212 | |
| 213 | pub fn video_make_raw_caps( |
| 214 | formats: &[crate::VideoFormat], |
| 215 | ) -> crate::VideoCapsBuilder<gst::caps::NoFeature> { |
| 216 | skip_assert_initialized!(); |
| 217 | |
| 218 | let formats: impl Iterator = formats.iter().copied().map(|f: VideoFormat| match f { |
| 219 | crate::VideoFormat::Encoded => panic!("Invalid encoded format" ), |
| 220 | crate::VideoFormat::Unknown => panic!("Invalid unknown format" ), |
| 221 | _ => f, |
| 222 | }); |
| 223 | |
| 224 | crate::VideoCapsBuilder::new().format_list(formats) |
| 225 | } |
| 226 | |
| 227 | #[cfg (test)] |
| 228 | mod tests { |
| 229 | use std::sync::{Arc, Mutex}; |
| 230 | |
| 231 | use super::*; |
| 232 | |
| 233 | #[test ] |
| 234 | fn test_convert_sample_async() { |
| 235 | gst::init().unwrap(); |
| 236 | |
| 237 | let l = glib::MainLoop::new(None, false); |
| 238 | |
| 239 | let mut in_buffer = gst::Buffer::with_size(320 * 240 * 4).unwrap(); |
| 240 | { |
| 241 | let buffer = in_buffer.get_mut().unwrap(); |
| 242 | let mut data = buffer.map_writable().unwrap(); |
| 243 | |
| 244 | for p in data.as_mut_slice().chunks_mut(4) { |
| 245 | p[0] = 63; |
| 246 | p[1] = 127; |
| 247 | p[2] = 191; |
| 248 | p[3] = 255; |
| 249 | } |
| 250 | } |
| 251 | let in_caps = crate::VideoInfo::builder(crate::VideoFormat::Rgba, 320, 240) |
| 252 | .build() |
| 253 | .unwrap() |
| 254 | .to_caps() |
| 255 | .unwrap(); |
| 256 | let sample = gst::Sample::builder() |
| 257 | .buffer(&in_buffer) |
| 258 | .caps(&in_caps) |
| 259 | .build(); |
| 260 | |
| 261 | let out_caps = crate::VideoInfo::builder(crate::VideoFormat::Abgr, 320, 240) |
| 262 | .build() |
| 263 | .unwrap() |
| 264 | .to_caps() |
| 265 | .unwrap(); |
| 266 | |
| 267 | let l_clone = l.clone(); |
| 268 | let res_store = Arc::new(Mutex::new(None)); |
| 269 | let res_store_clone = res_store.clone(); |
| 270 | convert_sample_async(&sample, &out_caps, gst::ClockTime::NONE, move |res| { |
| 271 | *res_store_clone.lock().unwrap() = Some(res); |
| 272 | l_clone.quit(); |
| 273 | }); |
| 274 | |
| 275 | l.run(); |
| 276 | |
| 277 | let res = res_store.lock().unwrap().take().unwrap(); |
| 278 | let res = res.unwrap(); |
| 279 | |
| 280 | let converted_out_caps = res.caps().unwrap(); |
| 281 | assert_eq!(out_caps.as_ref(), converted_out_caps); |
| 282 | let out_buffer = res.buffer().unwrap(); |
| 283 | { |
| 284 | let data = out_buffer.map_readable().unwrap(); |
| 285 | |
| 286 | for p in data.as_slice().chunks(4) { |
| 287 | assert_eq!(p, &[255, 191, 127, 63]); |
| 288 | } |
| 289 | } |
| 290 | } |
| 291 | |
| 292 | #[test ] |
| 293 | fn video_caps() { |
| 294 | gst::init().unwrap(); |
| 295 | |
| 296 | let caps = |
| 297 | video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]).build(); |
| 298 | assert_eq!(caps.to_string(), "video/x-raw, format=(string){ NV12, NV16 }, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]" ); |
| 299 | |
| 300 | #[cfg (feature = "v1_18" )] |
| 301 | { |
| 302 | /* video_make_raw_caps() is a re-implementation so ensure it returns the same caps as the C API */ |
| 303 | let c_caps = unsafe { |
| 304 | let formats: Vec<ffi::GstVideoFormat> = |
| 305 | [crate::VideoFormat::Nv12, crate::VideoFormat::Nv16] |
| 306 | .iter() |
| 307 | .map(|f| f.into_glib()) |
| 308 | .collect(); |
| 309 | let caps = ffi::gst_video_make_raw_caps(formats.as_ptr(), formats.len() as u32); |
| 310 | gst::Caps::from_glib_full(caps) |
| 311 | }; |
| 312 | assert_eq!(caps, c_caps); |
| 313 | } |
| 314 | |
| 315 | let caps = video_make_raw_caps(&[crate::VideoFormat::Nv12, crate::VideoFormat::Nv16]) |
| 316 | .width(800) |
| 317 | .height(600) |
| 318 | .framerate((30, 1).into()) |
| 319 | .build(); |
| 320 | assert_eq!(caps.to_string(), "video/x-raw, format=(string){ NV12, NV16 }, width=(int)800, height=(int)600, framerate=(fraction)30/1" ); |
| 321 | } |
| 322 | |
| 323 | #[test ] |
| 324 | #[should_panic (expected = "Invalid encoded format" )] |
| 325 | fn video_caps_encoded() { |
| 326 | gst::init().unwrap(); |
| 327 | let _caps = video_make_raw_caps(&[crate::VideoFormat::Encoded]); |
| 328 | } |
| 329 | |
| 330 | #[test ] |
| 331 | #[should_panic (expected = "Invalid unknown format" )] |
| 332 | fn video_caps_unknown() { |
| 333 | gst::init().unwrap(); |
| 334 | let _caps = video_make_raw_caps(&[crate::VideoFormat::Unknown]); |
| 335 | } |
| 336 | } |
| 337 | |