1/*
2 * Copyright (c) 2007-2011 Intel Corporation. All Rights Reserved.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the
6 * "Software"), to deal in the Software without restriction, including
7 * without limitation the rights to use, copy, modify, merge, publish,
8 * distribute, sub license, and/or sell copies of the Software, and to
9 * permit persons to whom the Software is furnished to do so, subject to
10 * the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the
13 * next paragraph) shall be included in all copies or substantial portions
14 * of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
17 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
19 * IN NO EVENT SHALL INTEL AND/OR ITS SUPPLIERS BE LIABLE FOR
20 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
21 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
22 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23 */
24
25/**
26 * \file va_vpp.h
27 * \brief The video processing API
28 *
29 * This file contains the \ref api_vpp "Video processing API".
30 */
31
32#ifndef VA_VPP_H
33#define VA_VPP_H
34
35#ifdef __cplusplus
36extern "C" {
37#endif
38
39/**
40 * \defgroup api_vpp Video processing API
41 *
42 * @{
43 *
44 * The video processing API uses the same paradigm as for decoding:
45 * - Query for supported filters;
46 * - Set up a video processing pipeline;
47 * - Send video processing parameters through VA buffers.
48 *
49 * \section api_vpp_caps Query for supported filters
50 *
51 * Checking whether video processing is supported can be performed
52 * with vaQueryConfigEntrypoints() and the profile argument set to
53 * #VAProfileNone. If video processing is supported, then the list of
54 * returned entry-points will include #VAEntrypointVideoProc.
55 *
56 * \code
57 * VAEntrypoint *entrypoints;
58 * int i, num_entrypoints, supportsVideoProcessing = 0;
59 *
60 * num_entrypoints = vaMaxNumEntrypoints();
61 * entrypoints = malloc(num_entrypoints * sizeof(entrypoints[0]);
62 * vaQueryConfigEntrypoints(va_dpy, VAProfileNone,
63 * entrypoints, &num_entrypoints);
64 *
65 * for (i = 0; !supportsVideoProcessing && i < num_entrypoints; i++) {
66 * if (entrypoints[i] == VAEntrypointVideoProc)
67 * supportsVideoProcessing = 1;
68 * }
69 * \endcode
70 *
71 * Then, the vaQueryVideoProcFilters() function is used to query the
72 * list of video processing filters.
73 *
74 * \code
75 * VAProcFilterType filters[VAProcFilterCount];
76 * unsigned int num_filters = VAProcFilterCount;
77 *
78 * // num_filters shall be initialized to the length of the array
79 * vaQueryVideoProcFilters(va_dpy, vpp_ctx, &filters, &num_filters);
80 * \endcode
81 *
82 * Finally, individual filter capabilities can be checked with
83 * vaQueryVideoProcFilterCaps().
84 *
85 * \code
86 * VAProcFilterCap denoise_caps;
87 * unsigned int num_denoise_caps = 1;
88 * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
89 * VAProcFilterNoiseReduction,
90 * &denoise_caps, &num_denoise_caps
91 * );
92 *
93 * VAProcFilterCapDeinterlacing deinterlacing_caps[VAProcDeinterlacingCount];
94 * unsigned int num_deinterlacing_caps = VAProcDeinterlacingCount;
95 * vaQueryVideoProcFilterCaps(va_dpy, vpp_ctx,
96 * VAProcFilterDeinterlacing,
97 * &deinterlacing_caps, &num_deinterlacing_caps
98 * );
99 * \endcode
100 *
101 * \section api_vpp_setup Set up a video processing pipeline
102 *
103 * A video processing pipeline buffer is created for each source
104 * surface we want to process. However, buffers holding filter
105 * parameters can be created once and for all. Rationale is to avoid
106 * multiple creation/destruction chains of filter buffers and also
107 * because filter parameters generally won't change frame after
108 * frame. e.g. this makes it possible to implement a checkerboard of
109 * videos where the same filters are applied to each video source.
110 *
111 * The general control flow is demonstrated by the following pseudo-code:
112 * \code
113 * // Create filters
114 * VABufferID denoise_filter, deint_filter;
115 * VABufferID filter_bufs[VAProcFilterCount];
116 * unsigned int num_filter_bufs;
117 *
118 * for (i = 0; i < num_filters; i++) {
119 * switch (filters[i]) {
120 * case VAProcFilterNoiseReduction: { // Noise reduction filter
121 * VAProcFilterParameterBuffer denoise;
122 * denoise.type = VAProcFilterNoiseReduction;
123 * denoise.value = 0.5;
124 * vaCreateBuffer(va_dpy, vpp_ctx,
125 * VAProcFilterParameterBufferType, sizeof(denoise), 1,
126 * &denoise, &denoise_filter
127 * );
128 * filter_bufs[num_filter_bufs++] = denoise_filter;
129 * break;
130 * }
131 *
132 * case VAProcFilterDeinterlacing: // Motion-adaptive deinterlacing
133 * for (j = 0; j < num_deinterlacing_caps; j++) {
134 * VAProcFilterCapDeinterlacing * const cap = &deinterlacing_caps[j];
135 * if (cap->type != VAProcDeinterlacingMotionAdaptive)
136 * continue;
137 *
138 * VAProcFilterParameterBufferDeinterlacing deint;
139 * deint.type = VAProcFilterDeinterlacing;
140 * deint.algorithm = VAProcDeinterlacingMotionAdaptive;
141 * vaCreateBuffer(va_dpy, vpp_ctx,
142 * VAProcFilterParameterBufferType, sizeof(deint), 1,
143 * &deint, &deint_filter
144 * );
145 * filter_bufs[num_filter_bufs++] = deint_filter;
146 * }
147 * }
148 * }
149 * \endcode
150 *
151 * Once the video processing pipeline is set up, the caller shall check the
152 * implied capabilities and requirements with vaQueryVideoProcPipelineCaps().
153 * This function can be used to validate the number of reference frames are
154 * needed by the specified deinterlacing algorithm, the supported color
155 * primaries, etc.
156 * \code
157 * // Create filters
158 * VAProcPipelineCaps pipeline_caps;
159 * VASurfaceID *forward_references;
160 * unsigned int num_forward_references;
161 * VASurfaceID *backward_references;
162 * unsigned int num_backward_references;
163 * VAProcColorStandardType in_color_standards[VAProcColorStandardCount];
164 * VAProcColorStandardType out_color_standards[VAProcColorStandardCount];
165 *
166 * pipeline_caps.input_color_standards = NULL;
167 * pipeline_caps.num_input_color_standards = ARRAY_ELEMS(in_color_standards);
168 * pipeline_caps.output_color_standards = NULL;
169 * pipeline_caps.num_output_color_standards = ARRAY_ELEMS(out_color_standards);
170 * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx,
171 * filter_bufs, num_filter_bufs,
172 * &pipeline_caps
173 * );
174 *
175 * num_forward_references = pipeline_caps.num_forward_references;
176 * forward_references =
177 * malloc(num__forward_references * sizeof(VASurfaceID));
178 * num_backward_references = pipeline_caps.num_backward_references;
179 * backward_references =
180 * malloc(num_backward_references * sizeof(VASurfaceID));
181 * \endcode
182 *
183 * \section api_vpp_submit Send video processing parameters through VA buffers
184 *
185 * Video processing pipeline parameters are submitted for each source
186 * surface to process. Video filter parameters can also change, per-surface.
187 * e.g. the list of reference frames used for deinterlacing.
188 *
189 * \code
190 * foreach (iteration) {
191 * vaBeginPicture(va_dpy, vpp_ctx, vpp_surface);
192 * foreach (surface) {
193 * VARectangle output_region;
194 * VABufferID pipeline_buf;
195 * VAProcPipelineParameterBuffer *pipeline_param;
196 *
197 * vaCreateBuffer(va_dpy, vpp_ctx,
198 * VAProcPipelineParameterBuffer, sizeof(*pipeline_param), 1,
199 * NULL, &pipeline_buf
200 * );
201 *
202 * // Setup output region for this surface
203 * // e.g. upper left corner for the first surface
204 * output_region.x = BORDER;
205 * output_region.y = BORDER;
206 * output_region.width =
207 * (vpp_surface_width - (Nx_surfaces + 1) * BORDER) / Nx_surfaces;
208 * output_region.height =
209 * (vpp_surface_height - (Ny_surfaces + 1) * BORDER) / Ny_surfaces;
210 *
211 * vaMapBuffer(va_dpy, pipeline_buf, &pipeline_param);
212 * pipeline_param->surface = surface;
213 * pipeline_param->surface_region = NULL;
214 * pipeline_param->output_region = &output_region;
215 * pipeline_param->output_background_color = 0;
216 * if (first surface to render)
217 * pipeline_param->output_background_color = 0xff000000; // black
218 * pipeline_param->filter_flags = VA_FILTER_SCALING_HQ;
219 * pipeline_param->filters = filter_bufs;
220 * pipeline_param->num_filters = num_filter_bufs;
221 * vaUnmapBuffer(va_dpy, pipeline_buf);
222 *
223 * // Update reference frames for deinterlacing, if necessary
224 * pipeline_param->forward_references = forward_references;
225 * pipeline_param->num_forward_references = num_forward_references_used;
226 * pipeline_param->backward_references = backward_references;
227 * pipeline_param->num_backward_references = num_bacward_references_used;
228 *
229 * // Apply filters
230 * vaRenderPicture(va_dpy, vpp_ctx, &pipeline_buf, 1);
231 * }
232 * vaEndPicture(va_dpy, vpp_ctx);
233 * }
234 * \endcode
235 */
236
237/** \brief Video filter types. */
238typedef enum _VAProcFilterType {
239 VAProcFilterNone = 0,
240 /** \brief Noise reduction filter. */
241 VAProcFilterNoiseReduction,
242 /** \brief Deinterlacing filter. */
243 VAProcFilterDeinterlacing,
244 /** \brief Sharpening filter. */
245 VAProcFilterSharpening,
246 /** \brief Color balance parameters. */
247 VAProcFilterColorBalance,
248 /** \brief Skin Tone Enhancement. */
249 VAProcFilterSkinToneEnhancement,
250 /** \brief Total Color Correction. */
251 VAProcFilterTotalColorCorrection,
252 /** \brief Human Vision System(HVS) Noise reduction filter. */
253 VAProcFilterHVSNoiseReduction,
254 /** \brief High Dynamic Range Tone Mapping. */
255 VAProcFilterHighDynamicRangeToneMapping,
256 /** \brief Three-Dimensional Look Up Table (3DLUT). */
257 VAProcFilter3DLUT,
258 /** \brief Number of video filters. */
259 VAProcFilterCount
260} VAProcFilterType;
261
262/** \brief Deinterlacing types. */
263typedef enum _VAProcDeinterlacingType {
264 VAProcDeinterlacingNone = 0,
265 /** \brief Bob deinterlacing algorithm. */
266 VAProcDeinterlacingBob,
267 /** \brief Weave deinterlacing algorithm. */
268 VAProcDeinterlacingWeave,
269 /** \brief Motion adaptive deinterlacing algorithm. */
270 VAProcDeinterlacingMotionAdaptive,
271 /** \brief Motion compensated deinterlacing algorithm. */
272 VAProcDeinterlacingMotionCompensated,
273 /** \brief Number of deinterlacing algorithms. */
274 VAProcDeinterlacingCount
275} VAProcDeinterlacingType;
276
277/** \brief Color balance types. */
278typedef enum _VAProcColorBalanceType {
279 VAProcColorBalanceNone = 0,
280 /** \brief Hue. */
281 VAProcColorBalanceHue,
282 /** \brief Saturation. */
283 VAProcColorBalanceSaturation,
284 /** \brief Brightness. */
285 VAProcColorBalanceBrightness,
286 /** \brief Contrast. */
287 VAProcColorBalanceContrast,
288 /** \brief Automatically adjusted saturation. */
289 VAProcColorBalanceAutoSaturation,
290 /** \brief Automatically adjusted brightness. */
291 VAProcColorBalanceAutoBrightness,
292 /** \brief Automatically adjusted contrast. */
293 VAProcColorBalanceAutoContrast,
294 /** \brief Number of color balance attributes. */
295 VAProcColorBalanceCount
296} VAProcColorBalanceType;
297
298/** \brief Color standard types.
299 *
300 * These define a set of color properties corresponding to particular
301 * video standards.
302 *
303 * Where matrix_coefficients is specified, it applies only to YUV data -
304 * RGB data always use the identity matrix (matrix_coefficients = 0).
305 */
306typedef enum _VAProcColorStandardType {
307 VAProcColorStandardNone = 0,
308 /** \brief ITU-R BT.601.
309 *
310 * It is unspecified whether this will use 525-line or 625-line values;
311 * specify the colour primaries and matrix coefficients explicitly if
312 * it is known which one is required.
313 *
314 * Equivalent to:
315 * colour_primaries = 5 or 6
316 * transfer_characteristics = 6
317 * matrix_coefficients = 5 or 6
318 */
319 VAProcColorStandardBT601,
320 /** \brief ITU-R BT.709.
321 *
322 * Equivalent to:
323 * colour_primaries = 1
324 * transfer_characteristics = 1
325 * matrix_coefficients = 1
326 */
327 VAProcColorStandardBT709,
328 /** \brief ITU-R BT.470-2 System M.
329 *
330 * Equivalent to:
331 * colour_primaries = 4
332 * transfer_characteristics = 4
333 * matrix_coefficients = 4
334 */
335 VAProcColorStandardBT470M,
336 /** \brief ITU-R BT.470-2 System B, G.
337 *
338 * Equivalent to:
339 * colour_primaries = 5
340 * transfer_characteristics = 5
341 * matrix_coefficients = 5
342 */
343 VAProcColorStandardBT470BG,
344 /** \brief SMPTE-170M.
345 *
346 * Equivalent to:
347 * colour_primaries = 6
348 * transfer_characteristics = 6
349 * matrix_coefficients = 6
350 */
351 VAProcColorStandardSMPTE170M,
352 /** \brief SMPTE-240M.
353 *
354 * Equivalent to:
355 * colour_primaries = 7
356 * transfer_characteristics = 7
357 * matrix_coefficients = 7
358 */
359 VAProcColorStandardSMPTE240M,
360 /** \brief Generic film.
361 *
362 * Equivalent to:
363 * colour_primaries = 8
364 * transfer_characteristics = 1
365 * matrix_coefficients = 1
366 */
367 VAProcColorStandardGenericFilm,
368 /** \brief sRGB.
369 *
370 * Equivalent to:
371 * colour_primaries = 1
372 * transfer_characteristics = 13
373 * matrix_coefficients = 0
374 */
375 VAProcColorStandardSRGB,
376 /** \brief stRGB.
377 *
378 * ???
379 */
380 VAProcColorStandardSTRGB,
381 /** \brief xvYCC601.
382 *
383 * Equivalent to:
384 * colour_primaries = 1
385 * transfer_characteristics = 11
386 * matrix_coefficients = 5
387 */
388 VAProcColorStandardXVYCC601,
389 /** \brief xvYCC709.
390 *
391 * Equivalent to:
392 * colour_primaries = 1
393 * transfer_characteristics = 11
394 * matrix_coefficients = 1
395 */
396 VAProcColorStandardXVYCC709,
397 /** \brief ITU-R BT.2020.
398 *
399 * Equivalent to:
400 * colour_primaries = 9
401 * transfer_characteristics = 14
402 * matrix_coefficients = 9
403 */
404 VAProcColorStandardBT2020,
405 /** \brief Explicitly specified color properties.
406 *
407 * Use corresponding color properties section.
408 * For example, HDR10 content:
409 * colour_primaries = 9 (BT2020)
410 * transfer_characteristics = 16 (SMPTE ST2084)
411 * matrix_coefficients = 9
412 */
413 VAProcColorStandardExplicit,
414 /** \brief Number of color standards. */
415 VAProcColorStandardCount
416} VAProcColorStandardType;
417
418/** \brief Total color correction types. */
419typedef enum _VAProcTotalColorCorrectionType {
420 VAProcTotalColorCorrectionNone = 0,
421 /** \brief Red Saturation. */
422 VAProcTotalColorCorrectionRed,
423 /** \brief Green Saturation. */
424 VAProcTotalColorCorrectionGreen,
425 /** \brief Blue Saturation. */
426 VAProcTotalColorCorrectionBlue,
427 /** \brief Cyan Saturation. */
428 VAProcTotalColorCorrectionCyan,
429 /** \brief Magenta Saturation. */
430 VAProcTotalColorCorrectionMagenta,
431 /** \brief Yellow Saturation. */
432 VAProcTotalColorCorrectionYellow,
433 /** \brief Number of color correction attributes. */
434 VAProcTotalColorCorrectionCount
435} VAProcTotalColorCorrectionType;
436
437/** \brief High Dynamic Range Metadata types. */
438typedef enum _VAProcHighDynamicRangeMetadataType {
439 VAProcHighDynamicRangeMetadataNone = 0,
440 /** \brief Metadata type for HDR10. */
441 VAProcHighDynamicRangeMetadataHDR10,
442 /** \brief Number of Metadata type. */
443 VAProcHighDynamicRangeMetadataTypeCount
444} VAProcHighDynamicRangeMetadataType;
445
446/** \brief Video Processing Mode. */
447typedef enum _VAProcMode {
448 /**
449 * \brief Default Mode.
450 * In this mode, pipeline is decided in driver to the appropriate mode.
451 * e.g. a mode that's a balance between power and performance.
452 */
453 VAProcDefaultMode = 0,
454 /**
455 * \brief Power Saving Mode.
456 * In this mode, pipeline is optimized for power saving.
457 */
458 VAProcPowerSavingMode,
459 /**
460 * \brief Performance Mode.
461 * In this mode, pipeline is optimized for performance.
462 */
463 VAProcPerformanceMode
464} VAProcMode;
465
466/** @name Video blending flags */
467/**@{*/
468/** \brief Global alpha blending. */
469#define VA_BLEND_GLOBAL_ALPHA 0x0001
470/** \brief Premultiplied alpha blending (RGBA surfaces only). */
471#define VA_BLEND_PREMULTIPLIED_ALPHA 0x0002
472/** \brief Luma color key (YUV surfaces only). */
473#define VA_BLEND_LUMA_KEY 0x0010
474/**@}*/
475
476/** \brief Video blending state definition. */
477typedef struct _VABlendState {
478 /** \brief Video blending flags. */
479 unsigned int flags;
480 /**
481 * \brief Global alpha value.
482 *
483 * Valid if \flags has VA_BLEND_GLOBAL_ALPHA.
484 * Valid range is 0.0 to 1.0 inclusive.
485 */
486 float global_alpha;
487 /**
488 * \brief Minimum luma value.
489 *
490 * Valid if \flags has VA_BLEND_LUMA_KEY.
491 * Valid range is 0.0 to 1.0 inclusive.
492 * \ref min_luma shall be set to a sensible value lower than \ref max_luma.
493 */
494 float min_luma;
495 /**
496 * \brief Maximum luma value.
497 *
498 * Valid if \flags has VA_BLEND_LUMA_KEY.
499 * Valid range is 0.0 to 1.0 inclusive.
500 * \ref max_luma shall be set to a sensible value larger than \ref min_luma.
501 */
502 float max_luma;
503} VABlendState;
504
505/** @name Video pipeline flags */
506/**@{*/
507/** \brief Specifies whether to apply subpictures when processing a surface. */
508#define VA_PROC_PIPELINE_SUBPICTURES 0x00000001
509/**
510 * \brief Specifies whether to apply power or performance
511 * optimizations to a pipeline.
512 *
513 * When processing several surfaces, it may be necessary to prioritize
514 * more certain pipelines than others. This flag is only a hint to the
515 * video processor so that it can omit certain filters to save power
516 * for example. Typically, this flag could be used with video surfaces
517 * decoded from a secondary bitstream.
518 */
519#define VA_PROC_PIPELINE_FAST 0x00000002
520/**@}*/
521
522/** @name Video filter flags */
523/**@{*/
524/** \brief Specifies whether the filter shall be present in the pipeline. */
525#define VA_PROC_FILTER_MANDATORY 0x00000001
526/**@}*/
527
528/** @name Pipeline end flags */
529/**@{*/
530/** \brief Specifies the pipeline is the last. */
531#define VA_PIPELINE_FLAG_END 0x00000004
532/**@}*/
533
534/** @name Chroma Siting flag */
535/**@{*/
536/** vertical chroma sitting take bit 0-1, horizontal chroma sitting take bit 2-3
537 * vertical chromma siting | horizontal chroma sitting to be chroma sitting */
538#define VA_CHROMA_SITING_UNKNOWN 0x00
539/** \brief Chroma samples are co-sited vertically on the top with the luma samples. */
540#define VA_CHROMA_SITING_VERTICAL_TOP 0x01
541/** \brief Chroma samples are not co-sited vertically with the luma samples. */
542#define VA_CHROMA_SITING_VERTICAL_CENTER 0x02
543/** \brief Chroma samples are co-sited vertically on the bottom with the luma samples. */
544#define VA_CHROMA_SITING_VERTICAL_BOTTOM 0x03
545/** \brief Chroma samples are co-sited horizontally on the left with the luma samples. */
546#define VA_CHROMA_SITING_HORIZONTAL_LEFT 0x04
547/** \brief Chroma samples are not co-sited horizontally with the luma samples. */
548#define VA_CHROMA_SITING_HORIZONTAL_CENTER 0x08
549/**@}*/
550
551/**
552 * This is to indicate that the color-space conversion uses full range or reduced range.
553 * VA_SOURCE_RANGE_FULL(Full range): Y/Cb/Cr is in [0, 255]. It is mainly used
554 * for JPEG/JFIF formats. The combination with the BT601 flag means that
555 * JPEG/JFIF color-space conversion matrix is used.
556 * VA_SOURCE_RANGE_REDUCED(Reduced range): Y is in [16, 235] and Cb/Cr is in [16, 240].
557 * It is mainly used for the YUV->RGB color-space conversion in SDTV/HDTV/UHDTV.
558 */
559#define VA_SOURCE_RANGE_UNKNOWN 0
560#define VA_SOURCE_RANGE_REDUCED 1
561#define VA_SOURCE_RANGE_FULL 2
562
563/** @name Tone Mapping flags multiple HDR mode*/
564/**@{*/
565/** \brief Tone Mapping from HDR content to HDR display. */
566#define VA_TONE_MAPPING_HDR_TO_HDR 0x0001
567/** \brief Tone Mapping from HDR content to SDR display. */
568#define VA_TONE_MAPPING_HDR_TO_SDR 0x0002
569/** \brief Tone Mapping from HDR content to EDR display. */
570#define VA_TONE_MAPPING_HDR_TO_EDR 0x0004
571/** \brief Tone Mapping from SDR content to HDR display. */
572#define VA_TONE_MAPPING_SDR_TO_HDR 0x0008
573/**@}*/
574
575/** \brief Video processing pipeline capabilities. */
576typedef struct _VAProcPipelineCaps {
577 /** \brief Pipeline flags. See VAProcPipelineParameterBuffer::pipeline_flags. */
578 uint32_t pipeline_flags;
579 /** \brief Extra filter flags. See VAProcPipelineParameterBuffer::filter_flags. */
580 uint32_t filter_flags;
581 /** \brief Number of forward reference frames that are needed. */
582 uint32_t num_forward_references;
583 /** \brief Number of backward reference frames that are needed. */
584 uint32_t num_backward_references;
585 /** \brief List of color standards supported on input. */
586 VAProcColorStandardType *input_color_standards;
587 /** \brief Number of elements in \ref input_color_standards array. */
588 uint32_t num_input_color_standards;
589 /** \brief List of color standards supported on output. */
590 VAProcColorStandardType *output_color_standards;
591 /** \brief Number of elements in \ref output_color_standards array. */
592 uint32_t num_output_color_standards;
593
594 /**
595 * \brief Rotation flags.
596 *
597 * For each rotation angle supported by the underlying hardware,
598 * the corresponding bit is set in \ref rotation_flags. See
599 * "Rotation angles" for a description of rotation angles.
600 *
601 * A value of 0 means the underlying hardware does not support any
602 * rotation. Otherwise, a check for a specific rotation angle can be
603 * performed as follows:
604 *
605 * \code
606 * VAProcPipelineCaps pipeline_caps;
607 * ...
608 * vaQueryVideoProcPipelineCaps(va_dpy, vpp_ctx,
609 * filter_bufs, num_filter_bufs,
610 * &pipeline_caps
611 * );
612 * ...
613 * if (pipeline_caps.rotation_flags & (1 << VA_ROTATION_xxx)) {
614 * // Clockwise rotation by xxx degrees is supported
615 * ...
616 * }
617 * \endcode
618 */
619 uint32_t rotation_flags;
620 /** \brief Blend flags. See "Video blending flags". */
621 uint32_t blend_flags;
622 /**
623 * \brief Mirroring flags.
624 *
625 * For each mirroring direction supported by the underlying hardware,
626 * the corresponding bit is set in \ref mirror_flags. See
627 * "Mirroring directions" for a description of mirroring directions.
628 *
629 */
630 uint32_t mirror_flags;
631 /** \brief Number of additional output surfaces supported by the pipeline */
632 uint32_t num_additional_outputs;
633
634 /** \brief Number of elements in \ref input_pixel_format array. */
635 uint32_t num_input_pixel_formats;
636 /** \brief List of input pixel formats in fourcc. */
637 uint32_t *input_pixel_format;
638 /** \brief Number of elements in \ref output_pixel_format array. */
639 uint32_t num_output_pixel_formats;
640 /** \brief List of output pixel formats in fourcc. */
641 uint32_t *output_pixel_format;
642
643 /** \brief Max supported input width in pixels. */
644 uint32_t max_input_width;
645 /** \brief Max supported input height in pixels. */
646 uint32_t max_input_height;
647 /** \brief Min supported input width in pixels. */
648 uint32_t min_input_width;
649 /** \brief Min supported input height in pixels. */
650 uint32_t min_input_height;
651
652 /** \brief Max supported output width in pixels. */
653 uint32_t max_output_width;
654 /** \brief Max supported output height in pixels. */
655 uint32_t max_output_height;
656 /** \brief Min supported output width in pixels. */
657 uint32_t min_output_width;
658 /** \brief Min supported output height in pixels. */
659 uint32_t min_output_height;
660 /** \brief Reserved bytes for future use, must be zero */
661#if defined(__AMD64__) || defined(__x86_64__) || defined(__amd64__) || defined(__LP64__)
662 uint32_t va_reserved[VA_PADDING_HIGH - 2];
663#else
664 uint32_t va_reserved[VA_PADDING_HIGH];
665#endif
666} VAProcPipelineCaps;
667
668/** \brief Specification of values supported by the filter. */
669typedef struct _VAProcFilterValueRange {
670 /** \brief Minimum value supported, inclusive. */
671 float min_value;
672 /** \brief Maximum value supported, inclusive. */
673 float max_value;
674 /** \brief Default value. */
675 float default_value;
676 /** \brief Step value that alters the filter behaviour in a sensible way. */
677 float step;
678
679 /** \brief Reserved bytes for future use, must be zero */
680 uint32_t va_reserved[VA_PADDING_LOW];
681} VAProcFilterValueRange;
682
683typedef struct _VAProcColorProperties {
684 /** Chroma sample location.\c VA_CHROMA_SITING_VERTICAL_XXX | VA_CHROMA_SITING_HORIZONTAL_XXX */
685 uint8_t chroma_sample_location;
686 /** Color range. \c VA_SOURCE_RANGE_XXX*/
687 uint8_t color_range;
688 /** Colour primaries.
689 *
690 * See ISO/IEC 23001-8 or ITU H.273, section 8.1 and table 2.
691 * Only used if the color standard in use is \c VAColorStandardExplicit.
692 * Below list the typical colour primaries for the reference.
693 * ---------------------------------------------------------------------------------
694 * | Value | Primaries | Informative Remark |
695 * --------------------------------------------------------------------------------
696 * | 1 |primary x y |Rec.ITU-R BT.709-5 |
697 * | |green 0.300 0.600 |IEC 61966-2-1(sRGB or sYCC) |
698 * | |blue 0.150 0.060 | |
699 * | |red 0.640 0.330 | |
700 * | |whiteD65 0.3127 0.3290 | |
701 * ---------------------------------------------------------------------------------
702 * | 6 |primary x y |Rec.ITU-R BT.601-6 525 |
703 * | |green 0.310 0.595 | |
704 * | |blue 0.155 0.070 | |
705 * | |red 0.630 0.340 | |
706 * | |whiteD65 0.3127 0.3290 | |
707 * ---------------------------------------------------------------------------------
708 * | 9 |primary x y |Rec.ITU-R BT.2020 |
709 * | |green 0.170 0.797 | |
710 * | |blue 0.131 0.046 | |
711 * | |red 0.708 0.292 | |
712 * | |whiteD65 0.3127 0.3290 | |
713 * ---------------------------------------------------------------------------------
714 */
715 uint8_t colour_primaries;
716 /** Transfer characteristics.
717 *
718 * See ISO/IEC 23001-8 or ITU H.273, section 8.2 and table 3.
719 * Only used if the color standard in use is \c VAColorStandardExplicit.
720 * Below list the typical transfer characteristics for the reference.
721 * -----------------------------------------------------------
722 * | Value | Informative Remark |
723 * -----------------------------------------------------------
724 * | 1 |Rec.ITU-R BT.709-5 |
725 * | |colour gamut system |
726 * -----------------------------------------------------------
727 * | 4 |Assumed display gamma 2.2 |
728 * -----------------------------------------------------------
729 * | 6 |Rec.ITU-R BT.601-6 525 or 625 |
730 * -----------------------------------------------------------
731 * | 8 |Linear transfer characteristics |
732 * -----------------------------------------------------------
733 * | 13 |IEC 61966-2-1(sRGB or sYCC) |
734 * -----------------------------------------------------------
735 * | 14,15 |Rec.ITU-R BT.2020 |
736 * -----------------------------------------------------------
737 * | 16 |SMPTE ST 2084 for 10,12,14 and 16bit system |
738 * -----------------------------------------------------------
739 */
740 uint8_t transfer_characteristics;
741 /** Matrix coefficients.
742 *
743 * See ISO/IEC 23001-8 or ITU H.273, section 8.3 and table 4.
744 * Only used if the color standard in use is \c VAColorStandardExplicit.
745 */
746 uint8_t matrix_coefficients;
747 /** Reserved bytes for future use, must be zero. */
748 uint8_t reserved[3];
749} VAProcColorProperties;
750
751/** \brief Describes High Dynamic Range Meta Data for HDR10.
752 *
753 * Specifies the colour volume(the colour primaries, white point and luminance range) of
754 * a display considered to be the mastering display for the associated video content -e.g.,
755 * the colour volume of a display that was used for viewing while authoring the video content.
756 * See ITU-T H.265 D.3.27 Mastering display colour volume SEI message semantics.
757 *
758 * Specifies upper bounds for the nominal light level of the content. See ITU-T H.265 D.3.35
759 * Content light level information SEI message semantics.
760 *
761 * This structure can be used to indicate the HDR10 metadata for 1) the content which was authored;
762 * 2) the display on which the content will be presented. If it is for display, max_content_light_level
763 * and max_pic_average_light_level are ignored.
764 */
765typedef struct _VAHdrMetaDataHDR10 {
766 /**
767 * \brief X chromaticity coordinate of the mastering display.
768 *
769 * Index value c equal to 0 should correspond to the green primary.
770 * Index value c equal to 1 should correspond to the blue primary.
771 * Index value c equal to 2 should correspond to the red primary.
772 * The value for display_primaries_x shall be in the range of 0 to 50000 inclusive.
773 */
774 uint16_t display_primaries_x[3];
775 /**
776 * \brief Y chromaticity coordinate of the mastering display.
777 *
778 * Index value c equal to 0 should correspond to the green primary.
779 * Index value c equal to 1 should correspond to the blue primary.
780 * Index value c equal to 2 should correspond to the red primary.
781 * The value for display_primaries_y shall be in the range of 0 to 50000 inclusive.
782 */
783 uint16_t display_primaries_y[3];
784 /**
785 * \brief X chromaticity coordinate of the white point of the mastering display.
786 *
787 * The value for white_point_x shall be in the range of 0 to 50000 inclusive.
788 */
789 uint16_t white_point_x;
790 /**
791 * \brief Y chromaticity coordinate of the white point of the mastering display.
792 *
793 * The value for white_point_y shall be in the range of 0 to 50000 inclusive.
794 */
795 uint16_t white_point_y;
796 /**
797 * \brief The maximum display luminance of the mastering display.
798 *
799 * The value is in units of 0.0001 candelas per square metre.
800 */
801 uint32_t max_display_mastering_luminance;
802 /**
803 * \brief The minumum display luminance of the mastering display.
804 *
805 * The value is in units of 0.0001 candelas per square metre.
806 */
807 uint32_t min_display_mastering_luminance;
808 /**
809 * \brief The maximum content light level (MaxCLL).
810 *
811 * The value is in units of 1 candelas per square metre.
812 */
813 uint16_t max_content_light_level;
814 /**
815 * \brief The maximum picture average light level (MaxFALL).
816 *
817 * The value is in units of 1 candelas per square metre.
818 */
819 uint16_t max_pic_average_light_level;
820 /** Resevered */
821 uint16_t reserved[VA_PADDING_HIGH];
822} VAHdrMetaDataHDR10;
823
824/** \brief Capabilities specification for the High Dynamic Range filter. */
825typedef struct _VAProcFilterCapHighDynamicRange {
826 /** \brief high dynamic range type. */
827 VAProcHighDynamicRangeMetadataType metadata_type;
828 /**
829 * \brief flag for high dynamic range tone mapping
830 *
831 * The flag is the combination of VA_TONE_MAPPING_XXX_TO_XXX.
832 * It could be VA_TONE_MAPPING_HDR_TO_HDR | VA_TONE_MAPPING_HDR_TO_SDR.
833 * SDR content to SDR display is always supported by default since it is legacy path.
834 */
835 uint16_t caps_flag;
836 /** \brief Reserved bytes for future use, must be zero */
837 uint16_t va_reserved[VA_PADDING_HIGH];
838} VAProcFilterCapHighDynamicRange;
839
840/** \brief High Dynamic Range Meta Data. */
841typedef struct _VAHdrMetaData {
842 /** \brief high dynamic range metadata type, HDR10 etc. */
843 VAProcHighDynamicRangeMetadataType metadata_type;
844 /**
845 * \brief Pointer to high dynamic range metadata.
846 *
847 * The pointer could point to VAHdrMetaDataHDR10 or other HDR meta data.
848 */
849 void* metadata;
850 /**
851 * \brief Size of high dynamic range metadata.
852 */
853 uint32_t metadata_size;
854 /** \brief Reserved bytes for future use, must be zero */
855 uint32_t reserved[VA_PADDING_LOW];
856} VAHdrMetaData;
857
858/**
859 * \brief Video processing pipeline configuration.
860 *
861 * This buffer defines a video processing pipeline. The actual filters to
862 * be applied are provided in the \c filters field, they can be re-used
863 * in other processing pipelines.
864 *
865 * The target surface is specified by the \c render_target argument of
866 * \c vaBeginPicture(). The general usage model is described as follows:
867 * - \c vaBeginPicture(): specify the target surface that receives the
868 * processed output;
869 * - \c vaRenderPicture(): specify a surface to be processed and composed
870 * into the \c render_target. Use as many \c vaRenderPicture() calls as
871 * necessary surfaces to compose ;
872 * - \c vaEndPicture(): tell the driver to start processing the surfaces
873 * with the requested filters.
874 *
875 * If a filter (e.g. noise reduction) needs to be applied with different
876 * values for multiple surfaces, the application needs to create as many
877 * filter parameter buffers as necessary. i.e. the filter parameters shall
878 * not change between two calls to \c vaRenderPicture().
879 *
880 * For composition usage models, the first surface to process will generally
881 * use an opaque background color, i.e. \c output_background_color set with
882 * the most significant byte set to \c 0xff. For instance, \c 0xff000000 for
883 * a black background. Then, subsequent surfaces would use a transparent
884 * background color.
885 */
886typedef struct _VAProcPipelineParameterBuffer {
887 /**
888 * \brief Source surface ID.
889 *
890 * ID of the source surface to process. If subpictures are associated
891 * with the video surfaces then they shall be rendered to the target
892 * surface, if the #VA_PROC_PIPELINE_SUBPICTURES pipeline flag is set.
893 */
894 VASurfaceID surface;
895 /**
896 * \brief Region within the source surface to be processed.
897 *
898 * Pointer to a #VARectangle defining the region within the source
899 * surface to be processed. If NULL, \c surface_region implies the
900 * whole surface.
901 */
902 const VARectangle *surface_region;
903 /**
904 * \brief Requested input color standard.
905 *
906 * Color properties are implicitly converted throughout the processing
907 * pipeline. The video processor chooses the best moment to apply
908 * this conversion. The set of supported color standards for input shall
909 * be queried with vaQueryVideoProcPipelineCaps().
910 *
911 * If this is set to VAProcColorStandardExplicit, the color properties
912 * are specified explicitly in surface_color_properties instead.
913 */
914 VAProcColorStandardType surface_color_standard;
915 /**
916 * \brief Region within the output surface.
917 *
918 * Pointer to a #VARectangle defining the region within the output
919 * surface that receives the processed pixels. If NULL, \c output_region
920 * implies the whole surface.
921 *
922 * Note that any pixels residing outside the specified region will
923 * be filled in with the \ref output_background_color.
924 */
925 const VARectangle *output_region;
926 /**
927 * \brief Background color.
928 *
929 * Background color used to fill in pixels that reside outside of the
930 * specified \ref output_region. The color is specified in ARGB format:
931 * [31:24] alpha, [23:16] red, [15:8] green, [7:0] blue.
932 *
933 * Unless the alpha value is zero or the \ref output_region represents
934 * the whole target surface size, implementations shall not render the
935 * source surface to the target surface directly. Rather, in order to
936 * maintain the exact semantics of \ref output_background_color, the
937 * driver shall use a temporary surface and fill it in with the
938 * appropriate background color. Next, the driver will blend this
939 * temporary surface into the target surface.
940 */
941 uint32_t output_background_color;
942 /**
943 * \brief Requested output color standard.
944 *
945 * If this is set to VAProcColorStandardExplicit, the color properties
946 * are specified explicitly in output_color_properties instead.
947 */
948 VAProcColorStandardType output_color_standard;
949 /**
950 * \brief Pipeline filters. See video pipeline flags.
951 *
952 * Flags to control the pipeline, like whether to apply subpictures
953 * or not, notify the driver that it can opt for power optimizations,
954 * should this be needed.
955 */
956 uint32_t pipeline_flags;
957 /**
958 * \brief Extra filter flags. See vaPutSurface() flags.
959 *
960 * Filter flags are used as a fast path, wherever possible, to use
961 * vaPutSurface() flags instead of explicit filter parameter buffers.
962 *
963 * Allowed filter flags API-wise. Use vaQueryVideoProcPipelineCaps()
964 * to check for implementation details:
965 * - Bob-deinterlacing: \c VA_FRAME_PICTURE, \c VA_TOP_FIELD,
966 * \c VA_BOTTOM_FIELD. Note that any deinterlacing filter
967 * (#VAProcFilterDeinterlacing) will override those flags.
968 * - Color space conversion: \c VA_SRC_BT601, \c VA_SRC_BT709,
969 * \c VA_SRC_SMPTE_240.
970 * - Scaling: \c VA_FILTER_SCALING_DEFAULT, \c VA_FILTER_SCALING_FAST,
971 * \c VA_FILTER_SCALING_HQ, \c VA_FILTER_SCALING_NL_ANAMORPHIC.
972 * - Interpolation Method: \c VA_FILTER_INTERPOLATION_DEFAULT,
973 * \c VA_FILTER_INTERPOLATION_NEAREST_NEIGHBOR,
974 * \c VA_FILTER_INTERPOLATION_BILINEAR, \c VA_FILTER_INTERPOLATION_ADVANCED.
975 */
976 uint32_t filter_flags;
977 /**
978 * \brief Array of filters to apply to the surface.
979 *
980 * The list of filters shall be ordered in the same way the driver expects
981 * them. i.e. as was returned from vaQueryVideoProcFilters().
982 * Otherwise, a #VA_STATUS_ERROR_INVALID_FILTER_CHAIN is returned
983 * from vaRenderPicture() with this buffer.
984 *
985 * #VA_STATUS_ERROR_UNSUPPORTED_FILTER is returned if the list
986 * contains an unsupported filter.
987 *
988 */
989 VABufferID *filters;
990 /** \brief Actual number of filters. */
991 uint32_t num_filters;
992 /** \brief Array of forward reference frames (past frames). */
993 VASurfaceID *forward_references;
994 /** \brief Number of forward reference frames that were supplied. */
995 uint32_t num_forward_references;
996 /** \brief Array of backward reference frames (future frames). */
997 VASurfaceID *backward_references;
998 /** \brief Number of backward reference frames that were supplied. */
999 uint32_t num_backward_references;
1000 /**
1001 * \brief Rotation state. See rotation angles.
1002 *
1003 * The rotation angle is clockwise. There is no specific rotation
1004 * center for this operation. Rather, The source \ref surface is
1005 * first rotated by the specified angle and then scaled to fit the
1006 * \ref output_region.
1007 *
1008 * This means that the top-left hand corner (0,0) of the output
1009 * (rotated) surface is expressed as follows:
1010 * - \ref VA_ROTATION_NONE: (0,0) is the top left corner of the
1011 * source surface -- no rotation is performed ;
1012 * - \ref VA_ROTATION_90: (0,0) is the bottom-left corner of the
1013 * source surface ;
1014 * - \ref VA_ROTATION_180: (0,0) is the bottom-right corner of the
1015 * source surface -- the surface is flipped around the X axis ;
1016 * - \ref VA_ROTATION_270: (0,0) is the top-right corner of the
1017 * source surface.
1018 *
1019 * Check VAProcPipelineCaps::rotation_flags first prior to
1020 * defining a specific rotation angle. Otherwise, the hardware can
1021 * perfectly ignore this variable if it does not support any
1022 * rotation.
1023 */
1024 uint32_t rotation_state;
1025 /**
1026 * \brief blending state. See "Video blending state definition".
1027 *
1028 * If \ref blend_state is NULL, then default operation mode depends
1029 * on the source \ref surface format:
1030 * - RGB: per-pixel alpha blending ;
1031 * - YUV: no blending, i.e override the underlying pixels.
1032 *
1033 * Otherwise, \ref blend_state is a pointer to a #VABlendState
1034 * structure that shall be live until vaEndPicture().
1035 *
1036 * Implementation note: the driver is responsible for checking the
1037 * blend state flags against the actual source \ref surface format.
1038 * e.g. premultiplied alpha blending is only applicable to RGB
1039 * surfaces, and luma keying is only applicable to YUV surfaces.
1040 * If a mismatch occurs, then #VA_STATUS_ERROR_INVALID_BLEND_STATE
1041 * is returned.
1042 */
1043 const VABlendState *blend_state;
1044 /**
1045 * \bried mirroring state. See "Mirroring directions".
1046 *
1047 * Mirroring of an image can be performed either along the
1048 * horizontal or vertical axis. It is assumed that the rotation
1049 * operation is always performed before the mirroring operation.
1050 */
1051 uint32_t mirror_state;
1052 /** \brief Array of additional output surfaces. */
1053 VASurfaceID *additional_outputs;
1054 /** \brief Number of additional output surfaces. */
1055 uint32_t num_additional_outputs;
1056 /**
1057 * \brief Flag to indicate the input surface flag
1058 *
1059 * bit0~3: Surface sample type
1060 * - 0000: Progressive --> VA_FRAME_PICTURE
1061 * - 0001: Single Top Field --> VA_TOP_FIELD
1062 * - 0010: Single Bottom Field --> VA_BOTTOM_FIELD
1063 * - 0100: Interleaved Top Field First --> VA_TOP_FIELD_FIRST
1064 * - 1000: Interleaved Bottom Field First --> VA_BOTTOM_FIELD_FIRST
1065 *
1066 * For interlaced scaling, examples as follow:
1067 * - 1. Interleaved to Interleaved (Suppose input is top field first)
1068 * -- set input_surface_flag as VA_TOP_FIELD_FIRST
1069 * -- set output_surface_flag as VA_TOP_FIELD_FIRST
1070 * - 2. Interleaved to Field (Suppose input is top field first)
1071 * An interleaved frame need to be passed twice.
1072 * First cycle to get the first field:
1073 * -- set input_surface_flag as VA_TOP_FIELD_FIRST
1074 * -- set output_surface_flag as VA_TOP_FIELD
1075 * Second cycle to get the second field:
1076 * -- set input_surface_flag as VA_TOP_FIELD_FIRST
1077 * -- set output_surface_flag as VA_BOTTOM_FIELD
1078 * - 3. Field to Interleaved (Suppose first field is top field)
1079 * -- create two surfaces, one for top field, the other for bottom field
1080 * -- set surface with the first field surface id
1081 * -- set backward_reference with the second field surface id
1082 * -- set input_surface_flag as VA_TOP_FIELD
1083 * -- set output_surface_flag as VA_TOP_FIELD_FIRST
1084 * - 4. Field to Field:
1085 * -- set flag according to each frame.
1086 *
1087 * bit31: Surface encryption
1088 * - 0: non-protected
1089 * - 1: protected
1090 *
1091 * bit4~30 for future
1092 */
1093 uint32_t input_surface_flag;
1094 /**
1095 * \brief Flag to indicate the output surface flag
1096 *
1097 * bit0~3: Surface sample type
1098 * - 0000: Progressive --> VA_FRAME_PICTURE
1099 * - 0001: Top Field --> VA_TOP_FIELD
1100 * - 0010: Bottom Field --> VA_BOTTOM_FIELD
1101 * - 0100: Top Field First --> VA_TOP_FIELD_FIRST
1102 * - 1000: Bottom Field First --> VA_BOTTOM_FIELD_FIRST
1103 *
1104 * bit31: Surface encryption
1105 * - 0: non-protected
1106 * - 1: protected
1107 *
1108 * bit4~30 for future
1109 */
1110 uint32_t output_surface_flag;
1111 /**
1112 * \brief Input Color Properties. See "VAProcColorProperties".
1113 */
1114 VAProcColorProperties input_color_properties;
1115 /**
1116 * \brief Output Color Properties. See "VAProcColorProperties".
1117 */
1118 VAProcColorProperties output_color_properties;
1119 /**
1120 * \brief Processing mode. See "VAProcMode".
1121 */
1122 VAProcMode processing_mode;
1123 /**
1124 * \brief Output High Dynamic Metadata.
1125 *
1126 * If output_metadata is NULL, then output default to SDR.
1127 */
1128 VAHdrMetaData *output_hdr_metadata;
1129
1130 /** \brief Reserved bytes for future use, must be zero */
1131#if defined(__AMD64__) || defined(__x86_64__) || defined(__amd64__)|| defined(__LP64__)
1132 uint32_t va_reserved[VA_PADDING_LARGE - 16];
1133#else
1134 uint32_t va_reserved[VA_PADDING_LARGE - 13];
1135#endif
1136} VAProcPipelineParameterBuffer;
1137
1138/**
1139 * \brief Filter parameter buffer base.
1140 *
1141 * This is a helper structure used by driver implementations only.
1142 * Users are not supposed to allocate filter parameter buffers of this
1143 * type.
1144 */
1145typedef struct _VAProcFilterParameterBufferBase {
1146 /** \brief Filter type. */
1147 VAProcFilterType type;
1148} VAProcFilterParameterBufferBase;
1149
1150/**
1151 * \brief Default filter parametrization.
1152 *
1153 * Unless there is a filter-specific parameter buffer,
1154 * #VAProcFilterParameterBuffer is the default type to use.
1155 */
1156typedef struct _VAProcFilterParameterBuffer {
1157 /** \brief Filter type. */
1158 VAProcFilterType type;
1159 /** \brief Value. */
1160 float value;
1161
1162 /** \brief Reserved bytes for future use, must be zero */
1163 uint32_t va_reserved[VA_PADDING_LOW];
1164} VAProcFilterParameterBuffer;
1165
1166/** @name De-interlacing flags */
1167/**@{*/
1168/**
1169 * \brief Bottom field first in the input frame.
1170 * if this is not set then assumes top field first.
1171 */
1172#define VA_DEINTERLACING_BOTTOM_FIELD_FIRST 0x0001
1173/**
1174 * \brief Bottom field used in deinterlacing.
1175 * if this is not set then assumes top field is used.
1176 */
1177#define VA_DEINTERLACING_BOTTOM_FIELD 0x0002
1178/**
1179 * \brief A single field is stored in the input frame.
1180 * if this is not set then assumes the frame contains two interleaved fields.
1181 */
1182#define VA_DEINTERLACING_ONE_FIELD 0x0004
1183/**
1184 * \brief Film Mode Detection is enabled. If enabled, driver performs inverse
1185 * of various pulldowns, such as 3:2 pulldown.
1186 * if this is not set then assumes FMD is disabled.
1187 */
1188#define VA_DEINTERLACING_FMD_ENABLE 0x0008
1189
1190//Scene change parameter for ADI on Linux, if enabled, driver use spatial DI(Bob), instead of ADI. if not, use old behavior for ADI
1191//Input stream is TFF(set flags = 0), SRC0,1,2,3 are interlaced frame (top +bottom fields), DSTs are progressive frames
1192//30i->30p
1193//SRC0 -> BOBDI, no reference, set flag = 0, output DST0
1194//SRC1 -> ADI, reference frame=SRC0, set flags = 0, call VP, output DST1
1195//SRC2 -> ADI, reference frame=SRC1, set flags = 0x0010(decimal 16), call VP, output DST2(T4)
1196//SRC3 -> ADI, reference frame=SRC2, set flags = 0, call VP, output DST3
1197//30i->60p
1198//SRC0 -> BOBDI, no reference, set flag = 0, output DST0
1199//SRC0 -> BOBDI, no reference, set flag =0x0002, output DST1
1200
1201//SRC1 -> ADI, reference frame =SRC0, set flags = 0, call VP, output DST2
1202//SRC1 -> ADI, reference frame =SRC0, set flags = 0x0012(decimal18), call VP, output DST3(B3)
1203
1204//SRC2 -> ADI, reference frame =SRC1, set flags = 0x0010(decimal 16), call VP, output DST4(T4)
1205//SRC2 -> ADI, reference frame =SRC1, set flags = 0x0002, call VP, output DST5
1206
1207//SRC3 -> ADI, reference frame =SRC2, set flags = 0, call VP, output DST6
1208//SRC3 -> ADI, reference frame =SRC1, set flags = 0x0002, call VP, output DST7
1209
1210#define VA_DEINTERLACING_SCD_ENABLE 0x0010
1211
1212/**@}*/
1213
1214/** \brief Deinterlacing filter parametrization. */
1215typedef struct _VAProcFilterParameterBufferDeinterlacing {
1216 /** \brief Filter type. Shall be set to #VAProcFilterDeinterlacing. */
1217 VAProcFilterType type;
1218 /** \brief Deinterlacing algorithm. */
1219 VAProcDeinterlacingType algorithm;
1220 /** \brief Deinterlacing flags. */
1221 uint32_t flags;
1222
1223 /** \brief Reserved bytes for future use, must be zero */
1224 uint32_t va_reserved[VA_PADDING_LOW];
1225} VAProcFilterParameterBufferDeinterlacing;
1226
1227/**
1228 * \brief Color balance filter parametrization.
1229 *
1230 * This buffer defines color balance attributes. A VA buffer can hold
1231 * several color balance attributes by creating a VA buffer of desired
1232 * number of elements. This can be achieved by the following pseudo-code:
1233 *
1234 * \code
1235 * enum { kHue, kSaturation, kBrightness, kContrast };
1236 *
1237 * // Initial color balance parameters
1238 * static const VAProcFilterParameterBufferColorBalance colorBalanceParams[4] =
1239 * {
1240 * [kHue] =
1241 * { VAProcFilterColorBalance, VAProcColorBalanceHue, 0.5 },
1242 * [kSaturation] =
1243 * { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 },
1244 * [kBrightness] =
1245 * { VAProcFilterColorBalance, VAProcColorBalanceBrightness, 0.5 },
1246 * [kSaturation] =
1247 * { VAProcFilterColorBalance, VAProcColorBalanceSaturation, 0.5 }
1248 * };
1249 *
1250 * // Create buffer
1251 * VABufferID colorBalanceBuffer;
1252 * vaCreateBuffer(va_dpy, vpp_ctx,
1253 * VAProcFilterParameterBufferType, sizeof(*pColorBalanceParam), 4,
1254 * colorBalanceParams,
1255 * &colorBalanceBuffer
1256 * );
1257 *
1258 * VAProcFilterParameterBufferColorBalance *pColorBalanceParam;
1259 * vaMapBuffer(va_dpy, colorBalanceBuffer, &pColorBalanceParam);
1260 * {
1261 * // Change brightness only
1262 * pColorBalanceBuffer[kBrightness].value = 0.75;
1263 * }
1264 * vaUnmapBuffer(va_dpy, colorBalanceBuffer);
1265 * \endcode
1266 */
1267typedef struct _VAProcFilterParameterBufferColorBalance {
1268 /** \brief Filter type. Shall be set to #VAProcFilterColorBalance. */
1269 VAProcFilterType type;
1270 /** \brief Color balance attribute. */
1271 VAProcColorBalanceType attrib;
1272 /**
1273 * \brief Color balance value.
1274 *
1275 * Special case for automatically adjusted attributes. e.g.
1276 * #VAProcColorBalanceAutoSaturation,
1277 * #VAProcColorBalanceAutoBrightness,
1278 * #VAProcColorBalanceAutoContrast.
1279 * - If \ref value is \c 1.0 +/- \c FLT_EPSILON, the attribute is
1280 * automatically adjusted and overrides any other attribute of
1281 * the same type that would have been set explicitly;
1282 * - If \ref value is \c 0.0 +/- \c FLT_EPSILON, the attribute is
1283 * disabled and other attribute of the same type is used instead.
1284 */
1285 float value;
1286
1287 /** \brief Reserved bytes for future use, must be zero */
1288 uint32_t va_reserved[VA_PADDING_LOW];
1289} VAProcFilterParameterBufferColorBalance;
1290
1291/** \brief Total color correction filter parametrization. */
1292typedef struct _VAProcFilterParameterBufferTotalColorCorrection {
1293 /** \brief Filter type. Shall be set to #VAProcFilterTotalColorCorrection. */
1294 VAProcFilterType type;
1295 /** \brief Color to correct. */
1296 VAProcTotalColorCorrectionType attrib;
1297 /** \brief Color correction value. */
1298 float value;
1299} VAProcFilterParameterBufferTotalColorCorrection;
1300
1301/** @name Video Processing Human Vision System (HVS) Denoise Mode.*/
1302/**@{*/
1303/**
1304 * \brief Default Mode.
1305 * This mode is decided in driver to the appropriate mode.
1306 */
1307#define VA_PROC_HVS_DENOISE_DEFAULT 0x0000
1308/**
1309 * \brief Auto BDRate Mode.
1310 * Indicates auto BD rate improvement in pre-processing (such as before video encoding), ignore Strength.
1311 */
1312#define VA_PROC_HVS_DENOISE_AUTO_BDRATE 0x0001
1313/**
1314 * \brief Auto Subjective Mode.
1315 * Indicates auto subjective quality improvement in pre-processing (such as before video encoding), ignore Strength.
1316 */
1317#define VA_PROC_HVS_DENOISE_AUTO_SUBJECTIVE 0x0002
1318/**
1319 * \brief Manual Mode.
1320 * Indicates manual mode, allow to adjust the denoise strength manually (need to set Strength explicitly).
1321 */
1322#define VA_PROC_HVS_DENOISE_MANUAL 0x0003
1323/**@}*/
1324
1325/** \brief Human Vision System(HVS) Noise reduction filter parametrization. */
1326typedef struct _VAProcFilterParameterBufferHVSNoiseReduction {
1327 /** \brief Filter type. Shall be set to #VAProcFilterHVSNoiseReduction. */
1328 VAProcFilterType type;
1329 /** \brief QP for encoding, used for HVS Denoise */
1330 uint16_t qp;
1331 /**
1332 * \brief QP to Noise Reduction Strength Mode, used for Human Vision System Based Noise Reduction.
1333 * Controls Noise Reduction strength of conservative and aggressive mode.
1334 * It is an integer from [0-16].
1335 * Value 0 means completely turn off Noise Reduction;
1336 * Value 16 means the most aggressive mode of Noise Reduction;
1337 * Value 10 is the default value.
1338 */
1339 uint16_t strength;
1340 /**
1341 * \brief HVS Denoise Mode which controls denoise method.
1342 * It is a value of VA_PROC_HVS_DENOISE_xxx.
1343 * Please see the definition of VA_PROC_HVS_DENOISE_xxx.
1344 */
1345 uint16_t mode;
1346 /** \brief Reserved bytes for future use, must be zero */
1347 uint16_t va_reserved[VA_PADDING_HIGH - 1];
1348} VAProcFilterParameterBufferHVSNoiseReduction;
1349
1350/** \brief High Dynamic Range(HDR) Tone Mapping filter parametrization. */
1351typedef struct _VAProcFilterParameterBufferHDRToneMapping {
1352 /** \brief Filter type. Shall be set to #VAProcFilterHighDynamicRangeToneMapping.*/
1353 VAProcFilterType type;
1354 /**
1355 * \brief High Dynamic Range metadata, could be HDR10 etc.
1356 *
1357 * This metadata is mainly for the input surface. Given that dynamic metadata is changing
1358 * on frame-by-frame or scene-by-scene basis for HDR10 plus, differentiate the metadata
1359 * for the input and output.
1360 */
1361 VAHdrMetaData data;
1362 /** \brief Reserved bytes for future use, must be zero */
1363 uint32_t va_reserved[VA_PADDING_HIGH];
1364} VAProcFilterParameterBufferHDRToneMapping;
1365
1366/** @name 3DLUT Channel Layout and Mapping */
1367/**@{*/
1368/** \brief 3DLUT Channel Layout is unknown. */
1369#define VA_3DLUT_CHANNEL_UNKNOWN 0x00000000
1370/** \brief 3DLUT Channel Layout is R, G, B, the default layout. Map RGB to RGB. */
1371#define VA_3DLUT_CHANNEL_RGB_RGB 0x00000001
1372/** \brief 3DLUT Channel Layout is Y, U, V. Map YUV to RGB. */
1373#define VA_3DLUT_CHANNEL_YUV_RGB 0x00000002
1374/** \brief 3DLUT Channel Layout is V, U, Y. Map VUY to RGB. */
1375#define VA_3DLUT_CHANNEL_VUY_RGB 0x00000004
1376/**@}*/
1377
1378/**
1379 * \brief 3DLUT filter parametrization.
1380 *
1381 * 3DLUT (Three Dimensional Look Up Table) is often used when converting an image or a video frame
1382 * from one color representation to another, for example, when converting log and gamma encodings,
1383 * changing the color space, applying a color correction, changing the dynamic range, gamut mapping etc.
1384 *
1385 * This buffer defines 3DLUT attributes and memory layout. The typical 3DLUT has fixed number(lut_size)
1386 * per dimension and memory layout is 3 dimensional array as 3dlut[stride_0][stride_1][stride_2] (lut_size
1387 * shall be smaller than stride_0/1/2).
1388 *
1389 * API user should query hardware capability by using the VAProcFilterCap3DLUT to get the 3DLUT attributes
1390 * which hardware supports, and use these attributes. For example, if the user queries hardware, the API user
1391 * could get caps with 3dlut[33][33][64] (lut_size = 33, lut_stride[0/1/2] = 33/33/64). API user shall not
1392 * use the attributes which hardware can not support.
1393 *
1394 * 3DLUT is usually used to transform input RGB/YUV values in one color space to output RGB values in another
1395 * color space. Based on 1) the format and color space of VPP input and output and 2) 3DLUT memory layout and
1396 * channel mapping, driver will enable some color space conversion implicitly if needed. For example, the input of
1397 * VPP is P010 format in BT2020 color space, the output of VPP is NV12 in BT709 color space and the 3DLUT channel
1398 * mapping is VA_3DLUT_CHANNEL_RGB_RGB, driver could build the data pipeline as P010(BT2020)->RGB(BT2020)
1399 * ->3DULT(BT709)->NV12(BT709). Please note, the limitation of 3DLUT filter color space is that the color space of
1400 * 3DLUT filter input data needs to be same as the input data of VPP; the color space of 3DLUT filter output data
1401 * needs to be same as the output data of VPP; format does not have such limitation.
1402 */
1403typedef struct _VAProcFilterParameterBuffer3DLUT {
1404 /** \brief Filter type. Shall be set to #VAProcFilter3DLUT.*/
1405 VAProcFilterType type;
1406
1407 /** \brief lut_surface contains 3DLUT data in the 3DLUT memory layout, must be linear */
1408 VASurfaceID lut_surface;
1409 /**
1410 * \brief lut_size is the number of valid points on every dimension of the three dimensional look up table.
1411 * The size of LUT (lut_size) shall be same among every dimension of the three dimensional look up table.
1412 * The size of LUT (lut_size) shall be smaller than lut_stride[0/1/2].
1413 */
1414 uint16_t lut_size;
1415 /**
1416 * \brief lut_stride are the number of points on every dimension of the three dimensional look up table.
1417 * Three dimension can has 3 different stride, lut3d[lut_stride[0]][lut_stride[1]][lut_stride[2]].
1418 * But the valid point shall start from 0, the range of valid point is [0, lut_size-1] for every dimension.
1419 */
1420 uint16_t lut_stride[3];
1421 /** \brief bit_depth is the number of bits for every channel R, G or B (or Y, U, V) */
1422 uint16_t bit_depth;
1423 /** \brief num_channel is the number of channels */
1424 uint16_t num_channel;
1425
1426 /** \brief channel_mapping defines the mapping of input and output channels, could be one of VA_3DLUT_CHANNEL_XXX*/
1427 uint32_t channel_mapping;
1428
1429 /** \brief reserved bytes for future use, must be zero */
1430 uint32_t va_reserved[VA_PADDING_HIGH];
1431} VAProcFilterParameterBuffer3DLUT;
1432
1433/** \brief Capabilities specification for the 3DLUT filter. */
1434typedef struct _VAProcFilterCap3DLUT {
1435 /** \brief lut_size is the number of valid points on every dimension of the three dimensional look up table. */
1436 uint16_t lut_size;
1437 /** \brief lut_stride are the number of points on every dimension of the three dimensional look up table. lut3d[lut_stride[0]][lut_stride[1]][lut_stride[2]]*/
1438 uint16_t lut_stride[3];
1439 /** \brief bit_depth is the number of bits for every channel R, G or B (or Y, U, V) */
1440 uint16_t bit_depth;
1441 /** \brief num_channel is the number of channels */
1442 uint16_t num_channel;
1443 /** \brief channel_mapping defines the mapping of channels, could be some combination of VA_3DLUT_CHANNEL_XXX*/
1444 uint32_t channel_mapping;
1445
1446 /** \brief Reserved bytes for future use, must be zero */
1447 uint32_t va_reserved[VA_PADDING_HIGH];
1448} VAProcFilterCap3DLUT;
1449
1450/**
1451 * \brief Default filter cap specification (single range value).
1452 *
1453 * Unless there is a filter-specific cap structure, #VAProcFilterCap is the
1454 * default type to use for output caps from vaQueryVideoProcFilterCaps().
1455 */
1456typedef struct _VAProcFilterCap {
1457 /** \brief Range of supported values for the filter. */
1458 VAProcFilterValueRange range;
1459
1460 /** \brief Reserved bytes for future use, must be zero */
1461 uint32_t va_reserved[VA_PADDING_LOW];
1462} VAProcFilterCap;
1463
1464/** \brief Capabilities specification for the deinterlacing filter. */
1465typedef struct _VAProcFilterCapDeinterlacing {
1466 /** \brief Deinterlacing algorithm. */
1467 VAProcDeinterlacingType type;
1468
1469 /** \brief Reserved bytes for future use, must be zero */
1470 uint32_t va_reserved[VA_PADDING_LOW];
1471} VAProcFilterCapDeinterlacing;
1472
1473/** \brief Capabilities specification for the color balance filter. */
1474typedef struct _VAProcFilterCapColorBalance {
1475 /** \brief Color balance operation. */
1476 VAProcColorBalanceType type;
1477 /** \brief Range of supported values for the specified operation. */
1478 VAProcFilterValueRange range;
1479
1480 /** \brief Reserved bytes for future use, must be zero */
1481 uint32_t va_reserved[VA_PADDING_LOW];
1482} VAProcFilterCapColorBalance;
1483
1484/** \brief Capabilities specification for the Total Color Correction filter. */
1485typedef struct _VAProcFilterCapTotalColorCorrection {
1486 /** \brief Color to correct. */
1487 VAProcTotalColorCorrectionType type;
1488 /** \brief Range of supported values for the specified color. */
1489 VAProcFilterValueRange range;
1490} VAProcFilterCapTotalColorCorrection;
1491
1492/**
1493 * \brief Queries video processing filters.
1494 *
1495 * This function returns the list of video processing filters supported
1496 * by the driver. The \c filters array is allocated by the user and
1497 * \c num_filters shall be initialized to the number of allocated
1498 * elements in that array. Upon successful return, the actual number
1499 * of filters will be overwritten into \c num_filters. Otherwise,
1500 * \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and \c num_filters
1501 * is adjusted to the number of elements that would be returned if enough
1502 * space was available.
1503 *
1504 * The list of video processing filters supported by the driver shall
1505 * be ordered in the way they can be iteratively applied. This is needed
1506 * for both correctness, i.e. some filters would not mean anything if
1507 * applied at the beginning of the pipeline; but also for performance
1508 * since some filters can be applied in a single pass (e.g. noise
1509 * reduction + deinterlacing).
1510 *
1511 * @param[in] dpy the VA display
1512 * @param[in] context the video processing context
1513 * @param[out] filters the output array of #VAProcFilterType elements
1514 * @param[in,out] num_filters the number of elements allocated on input,
1515 * the number of elements actually filled in on output
1516 */
1517VAStatus
1518vaQueryVideoProcFilters(
1519 VADisplay dpy,
1520 VAContextID context,
1521 VAProcFilterType *filters,
1522 unsigned int *num_filters
1523);
1524
1525/**
1526 * \brief Queries video filter capabilities.
1527 *
1528 * This function returns the list of capabilities supported by the driver
1529 * for a specific video filter. The \c filter_caps array is allocated by
1530 * the user and \c num_filter_caps shall be initialized to the number
1531 * of allocated elements in that array. Upon successful return, the
1532 * actual number of filters will be overwritten into \c num_filter_caps.
1533 * Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned and
1534 * \c num_filter_caps is adjusted to the number of elements that would be
1535 * returned if enough space was available.
1536 *
1537 * @param[in] dpy the VA display
1538 * @param[in] context the video processing context
1539 * @param[in] type the video filter type
1540 * @param[out] filter_caps the output array of #VAProcFilterCap elements
1541 * @param[in,out] num_filter_caps the number of elements allocated on input,
1542 * the number of elements actually filled in output
1543 */
1544VAStatus
1545vaQueryVideoProcFilterCaps(
1546 VADisplay dpy,
1547 VAContextID context,
1548 VAProcFilterType type,
1549 void *filter_caps,
1550 unsigned int *num_filter_caps
1551);
1552
1553/**
1554 * \brief Queries video processing pipeline capabilities.
1555 *
1556 * This function returns the video processing pipeline capabilities. The
1557 * \c filters array defines the video processing pipeline and is an array
1558 * of buffers holding filter parameters.
1559 *
1560 * Note: the #VAProcPipelineCaps structure contains user-provided arrays.
1561 * If non-NULL, the corresponding \c num_* fields shall be filled in on
1562 * input with the number of elements allocated. Upon successful return,
1563 * the actual number of elements will be overwritten into the \c num_*
1564 * fields. Otherwise, \c VA_STATUS_ERROR_MAX_NUM_EXCEEDED is returned
1565 * and \c num_* fields are adjusted to the number of elements that would
1566 * be returned if enough space was available.
1567 *
1568 * @param[in] dpy the VA display
1569 * @param[in] context the video processing context
1570 * @param[in] filters the array of VA buffers defining the video
1571 * processing pipeline
1572 * @param[in] num_filters the number of elements in filters
1573 * @param[in,out] pipeline_caps the video processing pipeline capabilities
1574 */
1575VAStatus
1576vaQueryVideoProcPipelineCaps(
1577 VADisplay dpy,
1578 VAContextID context,
1579 VABufferID *filters,
1580 unsigned int num_filters,
1581 VAProcPipelineCaps *pipeline_caps
1582);
1583
1584/**@}*/
1585
1586#ifdef __cplusplus
1587}
1588#endif
1589
1590#endif /* VA_VPP_H */
1591

source code of include/va/va_vpp.h