| 1 | /* GStreamer |
| 2 | * Copyright (C) 2008 David Schleef <ds@schleef.org> |
| 3 | * Copyright (C) 2012 Collabora Ltd. |
| 4 | * Author : Edward Hervey <edward@collabora.com> |
| 5 | * |
| 6 | * This library is free software; you can redistribute it and/or |
| 7 | * modify it under the terms of the GNU Library General Public |
| 8 | * License as published by the Free Software Foundation; either |
| 9 | * version 2 of the License, or (at your option) any later version. |
| 10 | * |
| 11 | * This library is distributed in the hope that it will be useful, |
| 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of |
| 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
| 14 | * Library General Public License for more details. |
| 15 | * |
| 16 | * You should have received a copy of the GNU Library General Public |
| 17 | * License along with this library; if not, write to the |
| 18 | * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, |
| 19 | * Boston, MA 02110-1301, USA. |
| 20 | */ |
| 21 | |
| 22 | #ifndef __GST_VIDEO_H__ |
| 23 | #include <gst/video/video.h> |
| 24 | #endif |
| 25 | |
| 26 | #ifndef _GST_VIDEO_UTILS_H_ |
| 27 | #define _GST_VIDEO_UTILS_H_ |
| 28 | |
| 29 | #include <gst/gst.h> |
| 30 | #include <gst/video/video-prelude.h> |
| 31 | #include <gst/video/video-hdr.h> |
| 32 | |
| 33 | G_BEGIN_DECLS |
| 34 | #define GST_TYPE_VIDEO_CODEC_STATE \ |
| 35 | (gst_video_codec_state_get_type()) |
| 36 | |
| 37 | #define GST_TYPE_VIDEO_CODEC_FRAME \ |
| 38 | (gst_video_codec_frame_get_type()) |
| 39 | |
| 40 | typedef struct _GstVideoCodecState GstVideoCodecState; |
| 41 | typedef struct _GstVideoCodecFrame GstVideoCodecFrame; |
| 42 | |
| 43 | /** |
| 44 | * GstVideoCodecState: |
| 45 | * @info: The #GstVideoInfo describing the stream |
| 46 | * @caps: The #GstCaps used in the caps negotiation of the pad. |
| 47 | * @codec_data: a #GstBuffer corresponding to the |
| 48 | * 'codec_data' field of a stream, or NULL. |
| 49 | * @allocation_caps: The #GstCaps for allocation query and pool |
| 50 | * negotiation. Since: 1.10 |
| 51 | * @mastering_display_info: Mastering display color volume information |
| 52 | * (HDR metadata) for the stream. Since: 1.20 |
| 53 | * @content_light_level: Content light level information for the stream. |
| 54 | * Since: 1.20 |
| 55 | * |
| 56 | * Structure representing the state of an incoming or outgoing video |
| 57 | * stream for encoders and decoders. |
| 58 | * |
| 59 | * Decoders and encoders will receive such a state through their |
| 60 | * respective @set_format vmethods. |
| 61 | * |
| 62 | * Decoders and encoders can set the downstream state, by using the |
| 63 | * gst_video_decoder_set_output_state() or |
| 64 | * gst_video_encoder_set_output_state() methods. |
| 65 | */ |
| 66 | /** |
| 67 | * GstVideoCodecState.mastering_display_info: |
| 68 | * |
| 69 | * Mastering display color volume information (HDR metadata) for the stream. |
| 70 | * |
| 71 | * Since: 1.20 |
| 72 | */ |
| 73 | /** |
| 74 | * GstVideoCodecState.content_light_level: |
| 75 | * |
| 76 | * Content light level information for the stream. |
| 77 | * |
| 78 | * Since: 1.20 |
| 79 | */ |
| 80 | struct _GstVideoCodecState |
| 81 | { |
| 82 | /*< private >*/ |
| 83 | gint ref_count; |
| 84 | |
| 85 | /*< public >*/ |
| 86 | GstVideoInfo info; |
| 87 | |
| 88 | GstCaps *caps; |
| 89 | |
| 90 | GstBuffer *codec_data; |
| 91 | |
| 92 | GstCaps *allocation_caps; |
| 93 | |
| 94 | GstVideoMasteringDisplayInfo *mastering_display_info; |
| 95 | GstVideoContentLightLevel *content_light_level; |
| 96 | |
| 97 | /*< private >*/ |
| 98 | gpointer padding[GST_PADDING_LARGE - 3]; |
| 99 | }; |
| 100 | |
| 101 | /** |
| 102 | * GstVideoCodecFrameFlags: |
| 103 | * @GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY: is the frame only meant to be decoded |
| 104 | * @GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT: is the frame a synchronization point (keyframe) |
| 105 | * @GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME: should the output frame be made a keyframe |
| 106 | * @GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS: should the encoder output stream headers |
| 107 | * @GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED: the buffer data is corrupted (Since: 1.20) |
| 108 | * |
| 109 | * Flags for #GstVideoCodecFrame |
| 110 | */ |
| 111 | typedef enum |
| 112 | { |
| 113 | GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY = (1<<0), |
| 114 | GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT = (1<<1), |
| 115 | GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME = (1<<2), |
| 116 | = (1<<3), |
| 117 | /** |
| 118 | * GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED: |
| 119 | * |
| 120 | * The buffer data is corrupted. |
| 121 | * |
| 122 | * Since: 1.20 |
| 123 | */ |
| 124 | GST_VIDEO_CODEC_FRAME_FLAG_CORRUPTED = (1<<4), |
| 125 | } GstVideoCodecFrameFlags; |
| 126 | |
| 127 | /** |
| 128 | * GST_VIDEO_CODEC_FRAME_FLAGS: |
| 129 | * @frame: a #GstVideoCodecFrame |
| 130 | * |
| 131 | * The entire set of flags for the @frame |
| 132 | */ |
| 133 | #define GST_VIDEO_CODEC_FRAME_FLAGS(frame) ((frame)->flags) |
| 134 | |
| 135 | /** |
| 136 | * GST_VIDEO_CODEC_FRAME_FLAG_IS_SET: |
| 137 | * @frame: a #GstVideoCodecFrame |
| 138 | * @flag: a flag to check for |
| 139 | * |
| 140 | * Checks whether the given @flag is set |
| 141 | */ |
| 142 | #define GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame,flag) !!(GST_VIDEO_CODEC_FRAME_FLAGS(frame) & (flag)) |
| 143 | |
| 144 | /** |
| 145 | * GST_VIDEO_CODEC_FRAME_FLAG_SET: |
| 146 | * @frame: a #GstVideoCodecFrame |
| 147 | * @flag: Flag to set, can be any number of bits in guint32. |
| 148 | * |
| 149 | * This macro sets the given bits |
| 150 | */ |
| 151 | #define GST_VIDEO_CODEC_FRAME_FLAG_SET(frame,flag) (GST_VIDEO_CODEC_FRAME_FLAGS(frame) |= (flag)) |
| 152 | |
| 153 | /** |
| 154 | * GST_VIDEO_CODEC_FRAME_FLAG_UNSET: |
| 155 | * @frame: a #GstVideoCodecFrame |
| 156 | * @flag: Flag to unset |
| 157 | * |
| 158 | * This macro usets the given bits. |
| 159 | */ |
| 160 | #define GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame,flag) (GST_VIDEO_CODEC_FRAME_FLAGS(frame) &= ~(flag)) |
| 161 | |
| 162 | /** |
| 163 | * GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY: |
| 164 | * @frame: a #GstVideoCodecFrame |
| 165 | * |
| 166 | * Tests if the buffer should only be decoded but not sent downstream. |
| 167 | */ |
| 168 | #define GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY)) |
| 169 | |
| 170 | /** |
| 171 | * GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY: |
| 172 | * @frame: a #GstVideoCodecFrame |
| 173 | * |
| 174 | * Sets the buffer to not be sent downstream. |
| 175 | * |
| 176 | * Decoder implementation can use this if they have frames that |
| 177 | * are not meant to be displayed. |
| 178 | * |
| 179 | * Encoder implementation can safely ignore this field. |
| 180 | */ |
| 181 | #define GST_VIDEO_CODEC_FRAME_SET_DECODE_ONLY(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY)) |
| 182 | |
| 183 | /** |
| 184 | * GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT: |
| 185 | * @frame: a #GstVideoCodecFrame |
| 186 | * |
| 187 | * Tests if the frame is a synchronization point (like a keyframe). |
| 188 | * |
| 189 | * Decoder implementations can use this to detect keyframes. |
| 190 | */ |
| 191 | #define GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT)) |
| 192 | |
| 193 | /** |
| 194 | * GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT: |
| 195 | * @frame: a #GstVideoCodecFrame |
| 196 | * |
| 197 | * Sets the frame to be a synchronization point (like a keyframe). |
| 198 | * |
| 199 | * Encoder implementations should set this accordingly. |
| 200 | * |
| 201 | * Decoder implementing parsing features should set this when they |
| 202 | * detect such a synchronization point. |
| 203 | */ |
| 204 | #define GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT)) |
| 205 | #define GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT(frame) (GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame, GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT)) |
| 206 | |
| 207 | |
| 208 | /** |
| 209 | * GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME: |
| 210 | * @frame: a #GstVideoCodecFrame |
| 211 | * |
| 212 | * Tests if the frame must be encoded as a keyframe. Applies only to |
| 213 | * frames provided to encoders. Decoders can safely ignore this field. |
| 214 | */ |
| 215 | #define GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME(frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME)) |
| 216 | #define GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME(frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME)) |
| 217 | #define GST_VIDEO_CODEC_FRAME_UNSET_FORCE_KEYFRAME(frame) (GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME)) |
| 218 | |
| 219 | /** |
| 220 | * GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME_HEADERS: |
| 221 | * @frame: a #GstVideoCodecFrame |
| 222 | * |
| 223 | * Tests if encoder should output stream headers before outputting the |
| 224 | * resulting encoded buffer for the given frame. |
| 225 | * |
| 226 | * Applies only to frames provided to encoders. Decoders can safely |
| 227 | * ignore this field. |
| 228 | */ |
| 229 | #define (frame) (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS)) |
| 230 | #define (frame) (GST_VIDEO_CODEC_FRAME_FLAG_SET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS)) |
| 231 | #define (frame) (GST_VIDEO_CODEC_FRAME_FLAG_UNSET(frame, GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS)) |
| 232 | |
| 233 | /** |
| 234 | * GstVideoCodecFrame: |
| 235 | * @pts: Presentation timestamp |
| 236 | * @dts: Decoding timestamp |
| 237 | * @duration: Duration of the frame |
| 238 | * @system_frame_number: Unique identifier for the frame. Use this if you need |
| 239 | * to get hold of the frame later (like when data is being decoded). |
| 240 | * Typical usage in decoders is to set this on the opaque value provided |
| 241 | * to the library and get back the frame using gst_video_decoder_get_frame() |
| 242 | * @distance_from_sync: Distance in frames from the last synchronization point. |
| 243 | * @input_buffer: the input #GstBuffer that created this frame. The buffer is owned |
| 244 | * by the frame and references to the frame instead of the buffer should |
| 245 | * be kept. |
| 246 | * @output_buffer: the output #GstBuffer. Implementations should set this either |
| 247 | * directly, or by using the |
| 248 | * gst_video_decoder_allocate_output_frame() or |
| 249 | * gst_video_decoder_allocate_output_buffer() methods. The buffer is |
| 250 | * owned by the frame and references to the frame instead of the |
| 251 | * buffer should be kept. |
| 252 | * @deadline: Running time when the frame will be used. |
| 253 | * |
| 254 | * A #GstVideoCodecFrame represents a video frame both in raw and |
| 255 | * encoded form. |
| 256 | */ |
| 257 | struct _GstVideoCodecFrame |
| 258 | { |
| 259 | /*< private >*/ |
| 260 | gint ref_count; |
| 261 | guint32 flags; |
| 262 | |
| 263 | /*< public >*/ |
| 264 | guint32 system_frame_number; /* ED */ |
| 265 | |
| 266 | /*< private >*/ |
| 267 | guint32 decode_frame_number; /* ED */ |
| 268 | guint32 presentation_frame_number; /* ED */ |
| 269 | |
| 270 | /*< public >*/ |
| 271 | GstClockTime dts; /* ED */ |
| 272 | GstClockTime pts; /* ED */ |
| 273 | GstClockTime duration; /* ED */ |
| 274 | |
| 275 | int distance_from_sync; /* ED */ |
| 276 | |
| 277 | GstBuffer *input_buffer; /* ED */ |
| 278 | GstBuffer *output_buffer; /* ED */ |
| 279 | |
| 280 | GstClockTime deadline; /* D */ |
| 281 | |
| 282 | /*< private >*/ |
| 283 | |
| 284 | /* Events that should be pushed downstream *before* |
| 285 | * the next output_buffer */ |
| 286 | /* FIXME 2.0: Use a GQueue or similar */ |
| 287 | GList *events; /* ED */ |
| 288 | |
| 289 | gpointer user_data; |
| 290 | GDestroyNotify user_data_destroy_notify; |
| 291 | |
| 292 | union { |
| 293 | struct { |
| 294 | /*< private >*/ |
| 295 | GstClockTime ts; |
| 296 | GstClockTime ts2; |
| 297 | guint num_subframes; |
| 298 | guint subframes_processed; |
| 299 | } ABI; |
| 300 | gpointer padding[GST_PADDING_LARGE]; |
| 301 | } abidata; |
| 302 | }; |
| 303 | |
| 304 | /* GstVideoCodecState */ |
| 305 | |
| 306 | GST_VIDEO_API |
| 307 | GType gst_video_codec_state_get_type (void); |
| 308 | |
| 309 | GST_VIDEO_API |
| 310 | GstVideoCodecState *gst_video_codec_state_ref (GstVideoCodecState * state); |
| 311 | |
| 312 | GST_VIDEO_API |
| 313 | void gst_video_codec_state_unref (GstVideoCodecState * state); |
| 314 | |
| 315 | |
| 316 | /* GstVideoCodecFrame */ |
| 317 | |
| 318 | GST_VIDEO_API |
| 319 | GType gst_video_codec_frame_get_type (void); |
| 320 | |
| 321 | GST_VIDEO_API |
| 322 | GstVideoCodecFrame *gst_video_codec_frame_ref (GstVideoCodecFrame * frame); |
| 323 | |
| 324 | GST_VIDEO_API |
| 325 | void gst_video_codec_frame_unref (GstVideoCodecFrame * frame); |
| 326 | |
| 327 | GST_VIDEO_API |
| 328 | void gst_video_codec_frame_set_user_data (GstVideoCodecFrame *frame, |
| 329 | gpointer user_data, |
| 330 | GDestroyNotify notify); |
| 331 | |
| 332 | GST_VIDEO_API |
| 333 | gpointer gst_video_codec_frame_get_user_data (GstVideoCodecFrame *frame); |
| 334 | |
| 335 | G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoCodecFrame, gst_video_codec_frame_unref) |
| 336 | |
| 337 | G_DEFINE_AUTOPTR_CLEANUP_FUNC(GstVideoCodecState, gst_video_codec_state_unref) |
| 338 | |
| 339 | G_END_DECLS |
| 340 | |
| 341 | #endif |
| 342 | |