1 | /* GStreamer |
2 | * Copyright (C) 2008 David Schleef <ds@schleef.org> |
3 | * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>. |
4 | * Copyright (C) 2011 Nokia Corporation. All rights reserved. |
5 | * Contact: Stefan Kost <stefan.kost@nokia.com> |
6 | * Copyright (C) 2012 Collabora Ltd. |
7 | * Author : Edward Hervey <edward@collabora.com> |
8 | * |
9 | * This library is free software; you can redistribute it and/or |
10 | * modify it under the terms of the GNU Library General Public |
11 | * License as published by the Free Software Foundation; either |
12 | * version 2 of the License, or (at your option) any later version. |
13 | * |
14 | * This library is distributed in the hope that it will be useful, |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | * Library General Public License for more details. |
18 | * |
19 | * You should have received a copy of the GNU Library General Public |
20 | * License along with this library; if not, write to the |
21 | * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, |
22 | * Boston, MA 02110-1301, USA. |
23 | */ |
24 | |
25 | #ifndef _GST_VIDEO_DECODER_H_ |
26 | #define _GST_VIDEO_DECODER_H_ |
27 | |
28 | #include <gst/base/gstadapter.h> |
29 | #include <gst/video/gstvideoutils.h> |
30 | |
31 | G_BEGIN_DECLS |
32 | |
33 | #define GST_TYPE_VIDEO_DECODER \ |
34 | (gst_video_decoder_get_type()) |
35 | #define GST_VIDEO_DECODER(obj) \ |
36 | (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_DECODER,GstVideoDecoder)) |
37 | #define GST_VIDEO_DECODER_CLASS(klass) \ |
38 | (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_DECODER,GstVideoDecoderClass)) |
39 | #define GST_VIDEO_DECODER_GET_CLASS(obj) \ |
40 | (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_DECODER,GstVideoDecoderClass)) |
41 | #define GST_IS_VIDEO_DECODER(obj) \ |
42 | (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_DECODER)) |
43 | #define GST_IS_VIDEO_DECODER_CLASS(klass) \ |
44 | (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_DECODER)) |
45 | #define GST_VIDEO_DECODER_CAST(obj) ((GstVideoDecoder *)(obj)) |
46 | |
47 | /** |
48 | * GST_VIDEO_DECODER_SINK_NAME: |
49 | * |
50 | * The name of the templates for the sink pad. |
51 | */ |
52 | #define GST_VIDEO_DECODER_SINK_NAME "sink" |
53 | /** |
54 | * GST_VIDEO_DECODER_SRC_NAME: |
55 | * |
56 | * The name of the templates for the source pad. |
57 | */ |
58 | #define GST_VIDEO_DECODER_SRC_NAME "src" |
59 | |
60 | /** |
61 | * GST_VIDEO_DECODER_SRC_PAD: |
62 | * @obj: a #GstVideoDecoder |
63 | * |
64 | * Gives the pointer to the source #GstPad object of the element. |
65 | */ |
66 | #define GST_VIDEO_DECODER_SRC_PAD(obj) (((GstVideoDecoder *) (obj))->srcpad) |
67 | |
68 | /** |
69 | * GST_VIDEO_DECODER_SINK_PAD: |
70 | * @obj: a #GstVideoDecoder |
71 | * |
72 | * Gives the pointer to the sink #GstPad object of the element. |
73 | */ |
74 | #define GST_VIDEO_DECODER_SINK_PAD(obj) (((GstVideoDecoder *) (obj))->sinkpad) |
75 | /** |
76 | * GST_VIDEO_DECODER_FLOW_NEED_DATA: |
77 | * |
78 | * Returned while parsing to indicate more data is needed. |
79 | **/ |
80 | #define GST_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS |
81 | |
82 | /** |
83 | * GST_VIDEO_DECODER_INPUT_SEGMENT: |
84 | * @obj: base decoder instance |
85 | * |
86 | * Gives the segment of the element. |
87 | */ |
88 | #define GST_VIDEO_DECODER_INPUT_SEGMENT(obj) (GST_VIDEO_DECODER_CAST (obj)->input_segment) |
89 | |
90 | /** |
91 | * GST_VIDEO_DECODER_OUTPUT_SEGMENT: |
92 | * @obj: base decoder instance |
93 | * |
94 | * Gives the segment of the element. |
95 | */ |
96 | #define GST_VIDEO_DECODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_DECODER_CAST (obj)->output_segment) |
97 | |
98 | /** |
99 | * GST_VIDEO_DECODER_STREAM_LOCK: |
100 | * @decoder: video decoder instance |
101 | * |
102 | * Obtain a lock to protect the decoder function from concurrent access. |
103 | */ |
104 | #define GST_VIDEO_DECODER_STREAM_LOCK(decoder) g_rec_mutex_lock (&GST_VIDEO_DECODER (decoder)->stream_lock) |
105 | |
106 | /** |
107 | * GST_VIDEO_DECODER_STREAM_UNLOCK: |
108 | * @decoder: video decoder instance |
109 | * |
110 | * Release the lock that protects the decoder function from concurrent access. |
111 | */ |
112 | #define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) g_rec_mutex_unlock (&GST_VIDEO_DECODER (decoder)->stream_lock) |
113 | |
114 | typedef struct _GstVideoDecoder GstVideoDecoder; |
115 | typedef struct _GstVideoDecoderClass GstVideoDecoderClass; |
116 | typedef struct _GstVideoDecoderPrivate GstVideoDecoderPrivate; |
117 | |
118 | |
119 | /* do not use this one, use macro below */ |
120 | |
121 | GST_VIDEO_API |
122 | GstFlowReturn _gst_video_decoder_error (GstVideoDecoder *dec, gint weight, |
123 | GQuark domain, gint code, |
124 | gchar *txt, gchar *debug, |
125 | const gchar *file, const gchar *function, |
126 | gint line); |
127 | |
128 | /** |
129 | * GST_VIDEO_DECODER_ERROR: |
130 | * @el: the base video decoder element that generates the error |
131 | * @w: element defined weight of the error, added to error count |
132 | * @domain: like CORE, LIBRARY, RESOURCE or STREAM (see #gstreamer-GstGError) |
133 | * @code: error code defined for that domain (see #gstreamer-GstGError) |
134 | * @text: the message to display (format string and args enclosed in |
135 | * parentheses) |
136 | * @debug: debugging information for the message (format string and args |
137 | * enclosed in parentheses) |
138 | * @ret: variable to receive return value |
139 | * |
140 | * Utility function that video decoder elements can use in case they encountered |
141 | * a data processing error that may be fatal for the current "data unit" but |
142 | * need not prevent subsequent decoding. Such errors are counted and if there |
143 | * are too many, as configured in the context's max_errors, the pipeline will |
144 | * post an error message and the application will be requested to stop further |
145 | * media processing. Otherwise, it is considered a "glitch" and only a warning |
146 | * is logged. In either case, @ret is set to the proper value to |
147 | * return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK). |
148 | */ |
149 | #define GST_VIDEO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \ |
150 | G_STMT_START { \ |
151 | gchar *__txt = _gst_element_error_printf text; \ |
152 | gchar *__dbg = _gst_element_error_printf debug; \ |
153 | GstVideoDecoder *__dec = GST_VIDEO_DECODER (el); \ |
154 | ret = _gst_video_decoder_error (__dec, w, GST_ ## domain ## _ERROR, \ |
155 | GST_ ## domain ## _ERROR_ ## code, __txt, __dbg, __FILE__, \ |
156 | GST_FUNCTION, __LINE__); \ |
157 | } G_STMT_END |
158 | |
159 | /** |
160 | * GST_VIDEO_DECODER_MAX_ERRORS: |
161 | * |
162 | * Default maximum number of errors tolerated before signaling error. |
163 | */ |
164 | #define GST_VIDEO_DECODER_MAX_ERRORS 10 |
165 | |
166 | |
167 | /** |
168 | * GstVideoDecoder: |
169 | * |
170 | * The opaque #GstVideoDecoder data structure. |
171 | */ |
172 | struct _GstVideoDecoder |
173 | { |
174 | /*< private >*/ |
175 | GstElement element; |
176 | |
177 | /*< protected >*/ |
178 | GstPad *sinkpad; |
179 | GstPad *srcpad; |
180 | |
181 | /* protects all data processing, i.e. is locked |
182 | * in the chain function, finish_frame and when |
183 | * processing serialized events */ |
184 | GRecMutex stream_lock; |
185 | |
186 | /* MT-protected (with STREAM_LOCK) */ |
187 | GstSegment input_segment; |
188 | GstSegment output_segment; |
189 | |
190 | GstVideoDecoderPrivate *priv; |
191 | |
192 | /*< private >*/ |
193 | gpointer padding[GST_PADDING_LARGE]; |
194 | }; |
195 | |
196 | /** |
197 | * GstVideoDecoderClass: |
198 | * @open: Optional. |
199 | * Called when the element changes to GST_STATE_READY. |
200 | * Allows opening external resources. |
201 | * @close: Optional. |
202 | * Called when the element changes to GST_STATE_NULL. |
203 | * Allows closing external resources. |
204 | * @start: Optional. |
205 | * Called when the element starts processing. |
206 | * Allows opening external resources. |
207 | * @stop: Optional. |
208 | * Called when the element stops processing. |
209 | * Allows closing external resources. |
210 | * @set_format: Notifies subclass of incoming data format (caps). |
211 | * @parse: Required for non-packetized input. |
212 | * Allows chopping incoming data into manageable units (frames) |
213 | * for subsequent decoding. |
214 | * @reset: Optional. |
215 | * Allows subclass (decoder) to perform post-seek semantics reset. |
216 | * Deprecated. |
217 | * @handle_frame: Provides input data frame to subclass. In subframe mode, the subclass needs |
218 | * to take ownership of @GstVideoCodecFrame.input_buffer as it will be modified |
219 | * by the base class on the next subframe buffer receiving. |
220 | * @finish: Optional. |
221 | * Called to request subclass to dispatch any pending remaining |
222 | * data at EOS. Sub-classes can refuse to decode new data after. |
223 | * @drain: Optional. |
224 | * Called to request subclass to decode any data it can at this |
225 | * point, but that more data may arrive after. (e.g. at segment end). |
226 | * Sub-classes should be prepared to handle new data afterward, |
227 | * or seamless segment processing will break. Since: 1.6 |
228 | * @sink_event: Optional. |
229 | * Event handler on the sink pad. This function should return |
230 | * TRUE if the event was handled and should be discarded |
231 | * (i.e. not unref'ed). |
232 | * Subclasses should chain up to the parent implementation to |
233 | * invoke the default handler. |
234 | * @src_event: Optional. |
235 | * Event handler on the source pad. This function should return |
236 | * TRUE if the event was handled and should be discarded |
237 | * (i.e. not unref'ed). |
238 | * Subclasses should chain up to the parent implementation to |
239 | * invoke the default handler. |
240 | * @negotiate: Optional. |
241 | * Negotiate with downstream and configure buffer pools, etc. |
242 | * Subclasses should chain up to the parent implementation to |
243 | * invoke the default handler. |
244 | * @decide_allocation: Optional. |
245 | * Setup the allocation parameters for allocating output |
246 | * buffers. The passed in query contains the result of the |
247 | * downstream allocation query. |
248 | * Subclasses should chain up to the parent implementation to |
249 | * invoke the default handler. |
250 | * @propose_allocation: Optional. |
251 | * Propose buffer allocation parameters for upstream elements. |
252 | * Subclasses should chain up to the parent implementation to |
253 | * invoke the default handler. |
254 | * @flush: Optional. |
255 | * Flush all remaining data from the decoder without |
256 | * pushing it downstream. Since: 1.2 |
257 | * @sink_query: Optional. |
258 | * Query handler on the sink pad. This function should |
259 | * return TRUE if the query could be performed. Subclasses |
260 | * should chain up to the parent implementation to invoke the |
261 | * default handler. Since: 1.4 |
262 | * @src_query: Optional. |
263 | * Query handler on the source pad. This function should |
264 | * return TRUE if the query could be performed. Subclasses |
265 | * should chain up to the parent implementation to invoke the |
266 | * default handler. Since: 1.4 |
267 | * @getcaps: Optional. |
268 | * Allows for a custom sink getcaps implementation. |
269 | * If not implemented, default returns |
270 | * gst_video_decoder_proxy_getcaps |
271 | * applied to sink template caps. |
272 | * @transform_meta: Optional. Transform the metadata on the input buffer to the |
273 | * output buffer. By default this method is copies all meta without |
274 | * tags and meta with only the "video" tag. subclasses can |
275 | * implement this method and return %TRUE if the metadata is to be |
276 | * copied. Since: 1.6 |
277 | * |
278 | * Subclasses can override any of the available virtual methods or not, as |
279 | * needed. At minimum @handle_frame needs to be overridden, and @set_format |
280 | * and likely as well. If non-packetized input is supported or expected, |
281 | * @parse needs to be overridden as well. |
282 | */ |
283 | struct _GstVideoDecoderClass |
284 | { |
285 | /*< private >*/ |
286 | GstElementClass element_class; |
287 | |
288 | /*< public >*/ |
289 | gboolean (*open) (GstVideoDecoder *decoder); |
290 | |
291 | gboolean (*close) (GstVideoDecoder *decoder); |
292 | |
293 | gboolean (*start) (GstVideoDecoder *decoder); |
294 | |
295 | gboolean (*stop) (GstVideoDecoder *decoder); |
296 | |
297 | GstFlowReturn (*parse) (GstVideoDecoder *decoder, |
298 | GstVideoCodecFrame *frame, |
299 | GstAdapter *adapter, |
300 | gboolean at_eos); |
301 | |
302 | gboolean (*set_format) (GstVideoDecoder *decoder, |
303 | GstVideoCodecState * state); |
304 | |
305 | gboolean (*reset) (GstVideoDecoder *decoder, |
306 | gboolean hard); |
307 | |
308 | GstFlowReturn (*finish) (GstVideoDecoder *decoder); |
309 | |
310 | /** |
311 | * GstVideoDecoderClass::handle_frame: |
312 | * @decoder: The #GstVideoDecoder |
313 | * @frame: (transfer full): The frame to handle |
314 | */ |
315 | GstFlowReturn (*handle_frame) (GstVideoDecoder *decoder, |
316 | GstVideoCodecFrame *frame); |
317 | |
318 | gboolean (*sink_event) (GstVideoDecoder *decoder, |
319 | GstEvent *event); |
320 | |
321 | gboolean (*src_event) (GstVideoDecoder *decoder, |
322 | GstEvent *event); |
323 | |
324 | gboolean (*negotiate) (GstVideoDecoder *decoder); |
325 | |
326 | gboolean (*decide_allocation) (GstVideoDecoder *decoder, GstQuery *query); |
327 | |
328 | gboolean (*propose_allocation) (GstVideoDecoder *decoder, GstQuery * query); |
329 | |
330 | gboolean (*flush) (GstVideoDecoder *decoder); |
331 | |
332 | gboolean (*sink_query) (GstVideoDecoder *decoder, |
333 | GstQuery *query); |
334 | |
335 | gboolean (*src_query) (GstVideoDecoder *decoder, |
336 | GstQuery *query); |
337 | |
338 | GstCaps* (*getcaps) (GstVideoDecoder *decoder, |
339 | GstCaps *filter); |
340 | |
341 | GstFlowReturn (*drain) (GstVideoDecoder *decoder); |
342 | |
343 | gboolean (*transform_meta) (GstVideoDecoder *decoder, |
344 | GstVideoCodecFrame *frame, |
345 | GstMeta * meta); |
346 | |
347 | /** |
348 | * GstVideoDecoderClass::handle_missing_data: |
349 | * @decoder: The #GstVideoDecoder |
350 | * @timestamp: Timestamp of the missing data |
351 | * @duration: Duration of the missing data |
352 | * |
353 | * Returns: %TRUE if the decoder should be drained afterwards. |
354 | * |
355 | * Since: 1.20 |
356 | */ |
357 | gboolean (*handle_missing_data) (GstVideoDecoder *decoder, |
358 | GstClockTime timestamp, |
359 | GstClockTime duration); |
360 | |
361 | /*< private >*/ |
362 | gpointer padding[GST_PADDING_LARGE-7]; |
363 | }; |
364 | |
365 | /** |
366 | * GstVideoDecoderRequestSyncPointFlags: |
367 | * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT: discard all following |
368 | * input until the next sync point. |
369 | * @GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT: discard all following |
370 | * output until the next sync point. |
371 | * |
372 | * Flags to be used in combination with gst_video_decoder_request_sync_point(). |
373 | * See the function documentation for more details. |
374 | * |
375 | * Since: 1.20 |
376 | */ |
377 | typedef enum { |
378 | GST_VIDEO_DECODER_REQUEST_SYNC_POINT_DISCARD_INPUT = (1<<0), |
379 | GST_VIDEO_DECODER_REQUEST_SYNC_POINT_CORRUPT_OUTPUT = (1<<1), |
380 | } GstVideoDecoderRequestSyncPointFlags; |
381 | |
382 | GST_VIDEO_API |
383 | GType gst_video_decoder_get_type (void); |
384 | |
385 | /* Context parameters */ |
386 | |
387 | GST_VIDEO_API |
388 | void gst_video_decoder_set_packetized (GstVideoDecoder * decoder, |
389 | gboolean packetized); |
390 | |
391 | GST_VIDEO_API |
392 | gboolean gst_video_decoder_get_packetized (GstVideoDecoder * decoder); |
393 | |
394 | GST_VIDEO_API |
395 | void gst_video_decoder_set_subframe_mode (GstVideoDecoder * decoder, |
396 | gboolean subframe_mode); |
397 | |
398 | GST_VIDEO_API |
399 | gboolean gst_video_decoder_get_subframe_mode (GstVideoDecoder * decoder); |
400 | |
401 | GST_VIDEO_API |
402 | guint gst_video_decoder_get_input_subframe_index (GstVideoDecoder * decoder, GstVideoCodecFrame * frame); |
403 | |
404 | GST_VIDEO_API |
405 | guint gst_video_decoder_get_processed_subframe_index (GstVideoDecoder * decoder, GstVideoCodecFrame * frame); |
406 | |
407 | GST_VIDEO_API |
408 | void gst_video_decoder_set_estimate_rate (GstVideoDecoder * dec, |
409 | gboolean enabled); |
410 | |
411 | GST_VIDEO_API |
412 | gint gst_video_decoder_get_estimate_rate (GstVideoDecoder * dec); |
413 | |
414 | GST_VIDEO_API |
415 | void gst_video_decoder_set_max_errors (GstVideoDecoder * dec, |
416 | gint num); |
417 | |
418 | GST_VIDEO_API |
419 | gint gst_video_decoder_get_max_errors (GstVideoDecoder * dec); |
420 | |
421 | GST_VIDEO_API |
422 | void gst_video_decoder_set_needs_format (GstVideoDecoder * dec, |
423 | gboolean enabled); |
424 | |
425 | GST_VIDEO_API |
426 | gboolean gst_video_decoder_get_needs_format (GstVideoDecoder * dec); |
427 | |
428 | GST_VIDEO_API |
429 | void gst_video_decoder_set_needs_sync_point (GstVideoDecoder * dec, |
430 | gboolean enabled); |
431 | |
432 | GST_VIDEO_API |
433 | gboolean gst_video_decoder_get_needs_sync_point (GstVideoDecoder * dec); |
434 | |
435 | GST_VIDEO_API |
436 | void gst_video_decoder_set_latency (GstVideoDecoder *decoder, |
437 | GstClockTime min_latency, |
438 | GstClockTime max_latency); |
439 | |
440 | GST_VIDEO_API |
441 | void gst_video_decoder_get_latency (GstVideoDecoder *decoder, |
442 | GstClockTime *min_latency, |
443 | GstClockTime *max_latency); |
444 | |
445 | GST_VIDEO_API |
446 | void gst_video_decoder_get_allocator (GstVideoDecoder *decoder, |
447 | GstAllocator **allocator, |
448 | GstAllocationParams *params); |
449 | |
450 | GST_VIDEO_API |
451 | GstBufferPool *gst_video_decoder_get_buffer_pool (GstVideoDecoder *decoder); |
452 | |
453 | /* Object methods */ |
454 | |
455 | GST_VIDEO_API |
456 | GstVideoCodecFrame *gst_video_decoder_get_frame (GstVideoDecoder *decoder, |
457 | int frame_number); |
458 | |
459 | GST_VIDEO_API |
460 | GstVideoCodecFrame *gst_video_decoder_get_oldest_frame (GstVideoDecoder *decoder); |
461 | |
462 | GST_VIDEO_API |
463 | GList * gst_video_decoder_get_frames (GstVideoDecoder *decoder); |
464 | |
465 | /* Parsing related methods */ |
466 | |
467 | GST_VIDEO_API |
468 | void gst_video_decoder_add_to_frame (GstVideoDecoder *decoder, |
469 | int n_bytes); |
470 | |
471 | GST_VIDEO_API |
472 | GstFlowReturn gst_video_decoder_have_frame (GstVideoDecoder *decoder); |
473 | |
474 | GST_VIDEO_API |
475 | GstFlowReturn gst_video_decoder_have_last_subframe (GstVideoDecoder *decoder, |
476 | GstVideoCodecFrame * frame); |
477 | |
478 | GST_VIDEO_API |
479 | gsize gst_video_decoder_get_pending_frame_size (GstVideoDecoder *decoder); |
480 | |
481 | GST_VIDEO_API |
482 | GstBuffer *gst_video_decoder_allocate_output_buffer (GstVideoDecoder * decoder); |
483 | |
484 | GST_VIDEO_API |
485 | GstFlowReturn gst_video_decoder_allocate_output_frame_with_params (GstVideoDecoder *decoder, |
486 | GstVideoCodecFrame * frame, |
487 | GstBufferPoolAcquireParams *params); |
488 | |
489 | GST_VIDEO_API |
490 | GstFlowReturn gst_video_decoder_allocate_output_frame (GstVideoDecoder *decoder, |
491 | GstVideoCodecFrame *frame); |
492 | |
493 | GST_VIDEO_API |
494 | GstVideoCodecState *gst_video_decoder_set_output_state (GstVideoDecoder *decoder, |
495 | GstVideoFormat fmt, guint width, guint height, |
496 | GstVideoCodecState *reference); |
497 | |
498 | GST_VIDEO_API |
499 | GstVideoCodecState *gst_video_decoder_set_interlaced_output_state (GstVideoDecoder *decoder, |
500 | GstVideoFormat fmt, GstVideoInterlaceMode interlace_mode, |
501 | guint width, guint height, GstVideoCodecState *reference); |
502 | |
503 | GST_VIDEO_API |
504 | GstVideoCodecState *gst_video_decoder_get_output_state (GstVideoDecoder *decoder); |
505 | |
506 | GST_VIDEO_API |
507 | gboolean gst_video_decoder_negotiate (GstVideoDecoder * decoder); |
508 | |
509 | GST_VIDEO_API |
510 | GstClockTimeDiff gst_video_decoder_get_max_decode_time (GstVideoDecoder *decoder, |
511 | GstVideoCodecFrame *frame); |
512 | |
513 | GST_VIDEO_API |
514 | gdouble gst_video_decoder_get_qos_proportion (GstVideoDecoder * decoder); |
515 | |
516 | GST_VIDEO_API |
517 | GstFlowReturn gst_video_decoder_finish_frame (GstVideoDecoder *decoder, |
518 | GstVideoCodecFrame *frame); |
519 | GST_VIDEO_API |
520 | GstFlowReturn gst_video_decoder_finish_subframe (GstVideoDecoder *decoder, |
521 | GstVideoCodecFrame *frame); |
522 | |
523 | GST_VIDEO_API |
524 | GstFlowReturn gst_video_decoder_drop_frame (GstVideoDecoder *dec, |
525 | GstVideoCodecFrame *frame); |
526 | GST_VIDEO_API |
527 | GstFlowReturn gst_video_decoder_drop_subframe (GstVideoDecoder *dec, |
528 | GstVideoCodecFrame *frame); |
529 | |
530 | GST_VIDEO_API |
531 | void gst_video_decoder_request_sync_point (GstVideoDecoder *dec, |
532 | GstVideoCodecFrame *frame, |
533 | GstVideoDecoderRequestSyncPointFlags flags); |
534 | |
535 | GST_VIDEO_API |
536 | void gst_video_decoder_release_frame (GstVideoDecoder * dec, |
537 | GstVideoCodecFrame * frame); |
538 | |
539 | GST_VIDEO_API |
540 | void gst_video_decoder_merge_tags (GstVideoDecoder *decoder, |
541 | const GstTagList *tags, |
542 | GstTagMergeMode mode); |
543 | |
544 | GST_VIDEO_API |
545 | GstCaps * gst_video_decoder_proxy_getcaps (GstVideoDecoder * decoder, |
546 | GstCaps * caps, |
547 | GstCaps * filter); |
548 | |
549 | GST_VIDEO_API |
550 | void gst_video_decoder_set_use_default_pad_acceptcaps (GstVideoDecoder * decoder, |
551 | gboolean use); |
552 | |
553 | G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoDecoder, gst_object_unref) |
554 | |
555 | G_END_DECLS |
556 | |
557 | #endif |
558 | |
559 | |