1 | /* GStreamer |
2 | * Copyright (C) 2008 David Schleef <ds@schleef.org> |
3 | * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>. |
4 | * Copyright (C) 2011 Nokia Corporation. All rights reserved. |
5 | * Contact: Stefan Kost <stefan.kost@nokia.com> |
6 | * Copyright (C) 2012 Collabora Ltd. |
7 | * Author : Edward Hervey <edward@collabora.com> |
8 | * |
9 | * This library is free software; you can redistribute it and/or |
10 | * modify it under the terms of the GNU Library General Public |
11 | * License as published by the Free Software Foundation; either |
12 | * version 2 of the License, or (at your option) any later version. |
13 | * |
14 | * This library is distributed in the hope that it will be useful, |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 | * Library General Public License for more details. |
18 | * |
19 | * You should have received a copy of the GNU Library General Public |
20 | * License along with this library; if not, write to the |
21 | * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, |
22 | * Boston, MA 02110-1301, USA. |
23 | */ |
24 | |
25 | #ifndef _GST_VIDEO_ENCODER_H_ |
26 | #define _GST_VIDEO_ENCODER_H_ |
27 | |
28 | #include <gst/video/gstvideoutils.h> |
29 | |
30 | G_BEGIN_DECLS |
31 | |
32 | #define GST_TYPE_VIDEO_ENCODER \ |
33 | (gst_video_encoder_get_type()) |
34 | #define GST_VIDEO_ENCODER(obj) \ |
35 | (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoder)) |
36 | #define GST_VIDEO_ENCODER_CLASS(klass) \ |
37 | (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass)) |
38 | #define GST_VIDEO_ENCODER_GET_CLASS(obj) \ |
39 | (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_VIDEO_ENCODER,GstVideoEncoderClass)) |
40 | #define GST_IS_VIDEO_ENCODER(obj) \ |
41 | (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_ENCODER)) |
42 | #define GST_IS_VIDEO_ENCODER_CLASS(klass) \ |
43 | (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_ENCODER)) |
44 | #define GST_VIDEO_ENCODER_CAST(enc) ((GstVideoEncoder*)enc) |
45 | |
46 | /** |
47 | * GST_VIDEO_ENCODER_SINK_NAME: |
48 | * |
49 | * The name of the templates for the sink pad. |
50 | */ |
51 | #define GST_VIDEO_ENCODER_SINK_NAME "sink" |
52 | /** |
53 | * GST_VIDEO_ENCODER_SRC_NAME: |
54 | * |
55 | * The name of the templates for the source pad. |
56 | */ |
57 | #define GST_VIDEO_ENCODER_SRC_NAME "src" |
58 | |
59 | /** |
60 | * GST_VIDEO_ENCODER_SRC_PAD: |
61 | * @obj: a #GstVideoEncoder |
62 | * |
63 | * Gives the pointer to the source #GstPad object of the element. |
64 | */ |
65 | #define GST_VIDEO_ENCODER_SRC_PAD(obj) (((GstVideoEncoder *) (obj))->srcpad) |
66 | |
67 | /** |
68 | * GST_VIDEO_ENCODER_SINK_PAD: |
69 | * @obj: a #GstVideoEncoder |
70 | * |
71 | * Gives the pointer to the sink #GstPad object of the element. |
72 | */ |
73 | #define GST_VIDEO_ENCODER_SINK_PAD(obj) (((GstVideoEncoder *) (obj))->sinkpad) |
74 | |
75 | /** |
76 | * GST_VIDEO_ENCODER_FLOW_NEED_DATA: |
77 | * |
78 | * Returned while parsing to indicate more data is needed. |
79 | **/ |
80 | #define GST_VIDEO_ENCODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS |
81 | |
82 | /** |
83 | * GST_VIDEO_ENCODER_FLOW_DROPPED: |
84 | * |
85 | * Returned when the event/buffer should be dropped. |
86 | * |
87 | * Deprecated: since 1.8. use gst_video_encoder_finish_frame with |
88 | * a %NULL frame->output_buffer to drop the frame instead. |
89 | */ |
90 | #ifndef GST_DISABLE_DEPRECATED |
91 | #define GST_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1 |
92 | #endif |
93 | |
94 | /** |
95 | * GST_VIDEO_ENCODER_INPUT_SEGMENT: |
96 | * @obj: base parse instance |
97 | * |
98 | * Gives the segment of the element. |
99 | */ |
100 | #define GST_VIDEO_ENCODER_INPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->input_segment) |
101 | |
102 | /** |
103 | * GST_VIDEO_ENCODER_OUTPUT_SEGMENT: |
104 | * @obj: base parse instance |
105 | * |
106 | * Gives the segment of the element. |
107 | */ |
108 | #define GST_VIDEO_ENCODER_OUTPUT_SEGMENT(obj) (GST_VIDEO_ENCODER_CAST (obj)->output_segment) |
109 | |
110 | /** |
111 | * GST_VIDEO_ENCODER_STREAM_LOCK: |
112 | * @encoder: video encoder instance |
113 | * |
114 | * Obtain a lock to protect the encoder function from concurrent access. |
115 | */ |
116 | #define GST_VIDEO_ENCODER_STREAM_LOCK(encoder) g_rec_mutex_lock (&GST_VIDEO_ENCODER (encoder)->stream_lock) |
117 | |
118 | /** |
119 | * GST_VIDEO_ENCODER_STREAM_UNLOCK: |
120 | * @encoder: video encoder instance |
121 | * |
122 | * Release the lock that protects the encoder function from concurrent access. |
123 | */ |
124 | #define GST_VIDEO_ENCODER_STREAM_UNLOCK(encoder) g_rec_mutex_unlock (&GST_VIDEO_ENCODER (encoder)->stream_lock) |
125 | |
126 | typedef struct _GstVideoEncoder GstVideoEncoder; |
127 | typedef struct _GstVideoEncoderPrivate GstVideoEncoderPrivate; |
128 | typedef struct _GstVideoEncoderClass GstVideoEncoderClass; |
129 | |
130 | /** |
131 | * GstVideoEncoder: |
132 | * |
133 | * The opaque #GstVideoEncoder data structure. |
134 | */ |
135 | struct _GstVideoEncoder |
136 | { |
137 | /*< private >*/ |
138 | GstElement element; |
139 | |
140 | /*< protected >*/ |
141 | GstPad *sinkpad; |
142 | GstPad *srcpad; |
143 | |
144 | /* protects all data processing, i.e. is locked |
145 | * in the chain function, finish_frame and when |
146 | * processing serialized events */ |
147 | GRecMutex stream_lock; |
148 | |
149 | /* MT-protected (with STREAM_LOCK) */ |
150 | GstSegment input_segment; |
151 | GstSegment output_segment; |
152 | |
153 | /*< private >*/ |
154 | GstVideoEncoderPrivate *priv; |
155 | |
156 | gpointer padding[GST_PADDING_LARGE]; |
157 | }; |
158 | |
159 | /** |
160 | * GstVideoEncoderClass: |
161 | * @open: Optional. |
162 | * Called when the element changes to GST_STATE_READY. |
163 | * Allows opening external resources. |
164 | * @close: Optional. |
165 | * Called when the element changes to GST_STATE_NULL. |
166 | * Allows closing external resources. |
167 | * @start: Optional. |
168 | * Called when the element starts processing. |
169 | * Allows opening external resources. |
170 | * @stop: Optional. |
171 | * Called when the element stops processing. |
172 | * Allows closing external resources. |
173 | * @set_format: Optional. |
174 | * Notifies subclass of incoming data format. |
175 | * GstVideoCodecState fields have already been |
176 | * set according to provided caps. |
177 | * @handle_frame: Provides input frame to subclass. |
178 | * @reset: Optional. |
179 | * Allows subclass (encoder) to perform post-seek semantics reset. |
180 | * Deprecated. |
181 | * @finish: Optional. |
182 | * Called to request subclass to dispatch any pending remaining |
183 | * data (e.g. at EOS). |
184 | * @pre_push: Optional. |
185 | * Allows subclass to push frame downstream in whatever |
186 | * shape or form it deems appropriate. If not provided, |
187 | * provided encoded frame data is simply pushed downstream. |
188 | * @getcaps: Optional. |
189 | * Allows for a custom sink getcaps implementation (e.g. |
190 | * for multichannel input specification). If not implemented, |
191 | * default returns gst_video_encoder_proxy_getcaps |
192 | * applied to sink template caps. |
193 | * @sink_event: Optional. |
194 | * Event handler on the sink pad. This function should return |
195 | * TRUE if the event was handled and should be discarded |
196 | * (i.e. not unref'ed). |
197 | * Subclasses should chain up to the parent implementation to |
198 | * invoke the default handler. |
199 | * @src_event: Optional. |
200 | * Event handler on the source pad. This function should return |
201 | * TRUE if the event was handled and should be discarded |
202 | * (i.e. not unref'ed). |
203 | * Subclasses should chain up to the parent implementation to |
204 | * invoke the default handler. |
205 | * @negotiate: Optional. |
206 | * Negotiate with downstream and configure buffer pools, etc. |
207 | * Subclasses should chain up to the parent implementation to |
208 | * invoke the default handler. |
209 | * @decide_allocation: Optional. |
210 | * Setup the allocation parameters for allocating output |
211 | * buffers. The passed in query contains the result of the |
212 | * downstream allocation query. |
213 | * Subclasses should chain up to the parent implementation to |
214 | * invoke the default handler. |
215 | * @propose_allocation: Optional. |
216 | * Propose buffer allocation parameters for upstream elements. |
217 | * Subclasses should chain up to the parent implementation to |
218 | * invoke the default handler. |
219 | * @flush: Optional. |
220 | * Flush all remaining data from the encoder without |
221 | * pushing it downstream. Since: 1.2 |
222 | * @sink_query: Optional. |
223 | * Query handler on the sink pad. This function should |
224 | * return TRUE if the query could be performed. Subclasses |
225 | * should chain up to the parent implementation to invoke the |
226 | * default handler. Since: 1.4 |
227 | * @src_query: Optional. |
228 | * Query handler on the source pad. This function should |
229 | * return TRUE if the query could be performed. Subclasses |
230 | * should chain up to the parent implementation to invoke the |
231 | * default handler. Since: 1.4 |
232 | * @transform_meta: Optional. Transform the metadata on the input buffer to the |
233 | * output buffer. By default this method is copies all meta without |
234 | * tags and meta with only the "video" tag. subclasses can |
235 | * implement this method and return %TRUE if the metadata is to be |
236 | * copied. Since: 1.6 |
237 | * |
238 | * Subclasses can override any of the available virtual methods or not, as |
239 | * needed. At minimum @handle_frame needs to be overridden, and @set_format |
240 | * and @get_caps are likely needed as well. |
241 | */ |
242 | struct _GstVideoEncoderClass |
243 | { |
244 | /*< private >*/ |
245 | GstElementClass element_class; |
246 | |
247 | /*< public >*/ |
248 | /* virtual methods for subclasses */ |
249 | gboolean (*open) (GstVideoEncoder *encoder); |
250 | |
251 | gboolean (*close) (GstVideoEncoder *encoder); |
252 | |
253 | gboolean (*start) (GstVideoEncoder *encoder); |
254 | |
255 | gboolean (*stop) (GstVideoEncoder *encoder); |
256 | |
257 | gboolean (*set_format) (GstVideoEncoder *encoder, |
258 | GstVideoCodecState *state); |
259 | |
260 | GstFlowReturn (*handle_frame) (GstVideoEncoder *encoder, |
261 | GstVideoCodecFrame *frame); |
262 | |
263 | gboolean (*reset) (GstVideoEncoder *encoder, |
264 | gboolean hard); |
265 | |
266 | GstFlowReturn (*finish) (GstVideoEncoder *encoder); |
267 | |
268 | GstFlowReturn (*pre_push) (GstVideoEncoder *encoder, |
269 | GstVideoCodecFrame *frame); |
270 | |
271 | GstCaps * (*getcaps) (GstVideoEncoder *enc, |
272 | GstCaps *filter); |
273 | |
274 | gboolean (*sink_event) (GstVideoEncoder *encoder, |
275 | GstEvent *event); |
276 | |
277 | gboolean (*src_event) (GstVideoEncoder *encoder, |
278 | GstEvent *event); |
279 | |
280 | gboolean (*negotiate) (GstVideoEncoder *encoder); |
281 | |
282 | gboolean (*decide_allocation) (GstVideoEncoder *encoder, GstQuery *query); |
283 | |
284 | gboolean (*propose_allocation) (GstVideoEncoder * encoder, |
285 | GstQuery * query); |
286 | gboolean (*flush) (GstVideoEncoder *encoder); |
287 | |
288 | gboolean (*sink_query) (GstVideoEncoder *encoder, |
289 | GstQuery *query); |
290 | |
291 | gboolean (*src_query) (GstVideoEncoder *encoder, |
292 | GstQuery *query); |
293 | |
294 | gboolean (*transform_meta) (GstVideoEncoder *encoder, |
295 | GstVideoCodecFrame *frame, |
296 | GstMeta * meta); |
297 | |
298 | /*< private >*/ |
299 | gpointer _gst_reserved[GST_PADDING_LARGE-4]; |
300 | }; |
301 | |
302 | GST_VIDEO_API |
303 | GType gst_video_encoder_get_type (void); |
304 | |
305 | GST_VIDEO_API |
306 | GstVideoCodecState* gst_video_encoder_get_output_state (GstVideoEncoder *encoder); |
307 | |
308 | GST_VIDEO_API |
309 | GstVideoCodecState* gst_video_encoder_set_output_state (GstVideoEncoder * encoder, |
310 | GstCaps * caps, |
311 | GstVideoCodecState * reference); |
312 | |
313 | GST_VIDEO_API |
314 | gboolean gst_video_encoder_negotiate (GstVideoEncoder * encoder); |
315 | |
316 | GST_VIDEO_API |
317 | GstVideoCodecFrame* gst_video_encoder_get_frame (GstVideoEncoder *encoder, |
318 | int frame_number); |
319 | |
320 | GST_VIDEO_API |
321 | GstVideoCodecFrame* gst_video_encoder_get_oldest_frame (GstVideoEncoder *encoder); |
322 | |
323 | GST_VIDEO_API |
324 | GList * gst_video_encoder_get_frames (GstVideoEncoder *encoder); |
325 | |
326 | GST_VIDEO_API |
327 | GstBuffer * gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, |
328 | gsize size); |
329 | |
330 | GST_VIDEO_API |
331 | GstFlowReturn gst_video_encoder_allocate_output_frame (GstVideoEncoder *encoder, |
332 | GstVideoCodecFrame *frame, |
333 | gsize size); |
334 | |
335 | GST_VIDEO_API |
336 | GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder, |
337 | GstVideoCodecFrame *frame); |
338 | |
339 | GST_VIDEO_API |
340 | GstFlowReturn gst_video_encoder_finish_subframe (GstVideoEncoder * encoder, |
341 | GstVideoCodecFrame * frame); |
342 | |
343 | GST_VIDEO_API |
344 | GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc, |
345 | GstCaps * caps, |
346 | GstCaps * filter); |
347 | |
348 | GST_VIDEO_API |
349 | void gst_video_encoder_set_latency (GstVideoEncoder *encoder, |
350 | GstClockTime min_latency, |
351 | GstClockTime max_latency); |
352 | |
353 | GST_VIDEO_API |
354 | void gst_video_encoder_get_latency (GstVideoEncoder *encoder, |
355 | GstClockTime *min_latency, |
356 | GstClockTime *max_latency); |
357 | |
358 | GST_VIDEO_API |
359 | void (GstVideoEncoder *encoder, |
360 | GList *); |
361 | |
362 | GST_VIDEO_API |
363 | void gst_video_encoder_merge_tags (GstVideoEncoder *encoder, |
364 | const GstTagList *tags, |
365 | GstTagMergeMode mode); |
366 | |
367 | GST_VIDEO_API |
368 | void gst_video_encoder_get_allocator (GstVideoEncoder *encoder, |
369 | GstAllocator **allocator, |
370 | GstAllocationParams *params); |
371 | |
372 | GST_VIDEO_API |
373 | void gst_video_encoder_set_min_pts(GstVideoEncoder *encoder, GstClockTime min_pts); |
374 | |
375 | GST_VIDEO_API |
376 | void gst_video_encoder_set_qos_enabled (GstVideoEncoder * encoder, gboolean enabled); |
377 | |
378 | GST_VIDEO_API |
379 | gboolean gst_video_encoder_is_qos_enabled (GstVideoEncoder * encoder); |
380 | |
381 | GST_VIDEO_API |
382 | GstClockTimeDiff gst_video_encoder_get_max_encode_time (GstVideoEncoder *encoder, GstVideoCodecFrame * frame); |
383 | |
384 | GST_VIDEO_API |
385 | void gst_video_encoder_set_min_force_key_unit_interval (GstVideoEncoder * encoder, |
386 | GstClockTime interval); |
387 | GST_VIDEO_API |
388 | GstClockTime gst_video_encoder_get_min_force_key_unit_interval (GstVideoEncoder * encoder); |
389 | |
390 | G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoEncoder, gst_object_unref) |
391 | |
392 | G_END_DECLS |
393 | |
394 | #endif |
395 | |
396 | |