FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
hwcontext_vdpau.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include <stdint.h>
22 #include <string.h>
23 
24 #include <vdpau/vdpau.h>
25 
26 #include "buffer.h"
27 #include "common.h"
28 #include "hwcontext.h"
29 #include "hwcontext_internal.h"
30 #include "hwcontext_vdpau.h"
31 #include "mem.h"
32 #include "pixfmt.h"
33 #include "pixdesc.h"
34 
35 typedef struct VDPAUDeviceContext {
36  VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps;
37  VdpVideoSurfaceGetBitsYCbCr *get_data;
38  VdpVideoSurfacePutBitsYCbCr *put_data;
39  VdpVideoSurfaceCreate *surf_create;
40  VdpVideoSurfaceDestroy *surf_destroy;
41 
43  int nb_pix_fmts[3];
45 
46 typedef struct VDPAUFramesContext {
47  VdpVideoSurfaceGetBitsYCbCr *get_data;
48  VdpVideoSurfacePutBitsYCbCr *put_data;
49  VdpChromaType chroma_type;
51 
52  const enum AVPixelFormat *pix_fmts;
55 
56 typedef struct VDPAUPixFmtMap {
57  VdpYCbCrFormat vdpau_fmt;
60 
61 static const VDPAUPixFmtMap pix_fmts_420[] = {
62  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 },
63  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P },
64  { 0, AV_PIX_FMT_NONE, },
65 };
66 
67 static const VDPAUPixFmtMap pix_fmts_422[] = {
68  { VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 },
69  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P },
70  { VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 },
71  { VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 },
72  { 0, AV_PIX_FMT_NONE, },
73 };
74 
75 static const VDPAUPixFmtMap pix_fmts_444[] = {
76  { VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV444P },
77  { 0, AV_PIX_FMT_NONE, },
78 };
79 
80 static const struct {
81  VdpChromaType chroma_type;
84 } vdpau_pix_fmts[] = {
85  { VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 },
86  { VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 },
87  { VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 },
88 };
89 
90 static int count_pixfmts(const VDPAUPixFmtMap *map)
91 {
92  int count = 0;
93  while (map->pix_fmt != AV_PIX_FMT_NONE) {
94  map++;
95  count++;
96  }
97  return count;
98 }
99 
101 {
102  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
103  VDPAUDeviceContext *priv = ctx->internal->priv;
104  int i;
105 
106  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) {
107  const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map;
108  int nb_pix_fmts;
109 
110  nb_pix_fmts = count_pixfmts(map);
111  priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i]));
112  if (!priv->pix_fmts[i])
113  return AVERROR(ENOMEM);
114 
115  nb_pix_fmts = 0;
116  while (map->pix_fmt != AV_PIX_FMT_NONE) {
117  VdpBool supported;
118  VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type,
119  map->vdpau_fmt, &supported);
120  if (err == VDP_STATUS_OK && supported)
121  priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt;
122  map++;
123  }
124  priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE;
125  priv->nb_pix_fmts[i] = nb_pix_fmts;
126  }
127 
128  return 0;
129 }
130 
131 #define GET_CALLBACK(id, result) \
132 do { \
133  void *tmp; \
134  err = hwctx->get_proc_address(hwctx->device, id, &tmp); \
135  if (err != VDP_STATUS_OK) { \
136  av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \
137  return AVERROR_UNKNOWN; \
138  } \
139  result = tmp; \
140 } while (0)
141 
143 {
144  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
145  VDPAUDeviceContext *priv = ctx->internal->priv;
146  VdpStatus err;
147  int ret;
148 
149  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
150  priv->get_transfer_caps);
151  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data);
152  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data);
153  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create);
154  GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy);
155 
156  ret = vdpau_init_pixmfts(ctx);
157  if (ret < 0) {
158  av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n");
159  return ret;
160  }
161 
162  return 0;
163 }
164 
166 {
167  VDPAUDeviceContext *priv = ctx->internal->priv;
168  int i;
169 
170  for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++)
171  av_freep(&priv->pix_fmts[i]);
172 }
173 
175  const void *hwconfig,
176  AVHWFramesConstraints *constraints)
177 {
178  VDPAUDeviceContext *priv = ctx->internal->priv;
179  int nb_sw_formats = 0;
180  int i;
181 
183  sizeof(*constraints->valid_sw_formats));
184  if (!constraints->valid_sw_formats)
185  return AVERROR(ENOMEM);
186 
187  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
188  if (priv->nb_pix_fmts[i] > 1)
189  constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format;
190  }
191  constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE;
192 
193  constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
194  if (!constraints->valid_hw_formats)
195  return AVERROR(ENOMEM);
196 
197  constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU;
198  constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
199 
200  return 0;
201 }
202 
203 static void vdpau_buffer_free(void *opaque, uint8_t *data)
204 {
205  AVHWFramesContext *ctx = opaque;
206  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
207  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data;
208 
209  device_priv->surf_destroy(surf);
210 }
211 
212 static AVBufferRef *vdpau_pool_alloc(void *opaque, int size)
213 {
214  AVHWFramesContext *ctx = opaque;
215  VDPAUFramesContext *priv = ctx->internal->priv;
216  AVVDPAUDeviceContext *device_hwctx = ctx->device_ctx->hwctx;
217  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
218 
219  AVBufferRef *ret;
220  VdpVideoSurface surf;
221  VdpStatus err;
222 
223  err = device_priv->surf_create(device_hwctx->device, priv->chroma_type,
224  ctx->width, ctx->height, &surf);
225  if (err != VDP_STATUS_OK) {
226  av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n");
227  return NULL;
228  }
229 
230  ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf),
232  if (!ret) {
233  device_priv->surf_destroy(surf);
234  return NULL;
235  }
236 
237  return ret;
238 }
239 
241 {
242  VDPAUDeviceContext *device_priv = ctx->device_ctx->internal->priv;
243  VDPAUFramesContext *priv = ctx->internal->priv;
244 
245  int i;
246 
247  for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) {
248  if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) {
249  priv->chroma_type = vdpau_pix_fmts[i].chroma_type;
250  priv->chroma_idx = i;
251  priv->pix_fmts = device_priv->pix_fmts[i];
252  priv->nb_pix_fmts = device_priv->nb_pix_fmts[i];
253  break;
254  }
255  }
256  if (priv->nb_pix_fmts < 2) {
257  av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n",
259  return AVERROR(ENOSYS);
260  }
261 
262  if (!ctx->pool) {
263  ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx,
265  if (!ctx->internal->pool_internal)
266  return AVERROR(ENOMEM);
267  }
268 
269  priv->get_data = device_priv->get_data;
270  priv->put_data = device_priv->put_data;
271 
272  return 0;
273 }
274 
276 {
277  frame->buf[0] = av_buffer_pool_get(ctx->pool);
278  if (!frame->buf[0])
279  return AVERROR(ENOMEM);
280 
281  frame->data[3] = frame->buf[0]->data;
282  frame->format = AV_PIX_FMT_VDPAU;
283  frame->width = ctx->width;
284  frame->height = ctx->height;
285 
286  return 0;
287 }
288 
291  enum AVPixelFormat **formats)
292 {
293  VDPAUFramesContext *priv = ctx->internal->priv;
294 
295  enum AVPixelFormat *fmts;
296 
297  if (priv->nb_pix_fmts == 1) {
298  av_log(ctx, AV_LOG_ERROR,
299  "No target formats are supported for this chroma type\n");
300  return AVERROR(ENOSYS);
301  }
302 
303  fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts));
304  if (!fmts)
305  return AVERROR(ENOMEM);
306 
307  memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts));
308  *formats = fmts;
309 
310  return 0;
311 }
312 
314  const AVFrame *src)
315 {
316  VDPAUFramesContext *priv = ctx->internal->priv;
317  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3];
318 
319  void *data[3];
320  uint32_t linesize[3];
321 
322  const VDPAUPixFmtMap *map;
323  VdpYCbCrFormat vdpau_format;
324  VdpStatus err;
325  int i;
326 
327  for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) {
328  data[i] = dst->data[i];
329  if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) {
330  av_log(ctx, AV_LOG_ERROR,
331  "The linesize %d cannot be represented as uint32\n",
332  dst->linesize[i]);
333  return AVERROR(ERANGE);
334  }
335  linesize[i] = dst->linesize[i];
336  }
337 
338  map = vdpau_pix_fmts[priv->chroma_idx].map;
339  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
340  if (map[i].pix_fmt == dst->format) {
341  vdpau_format = map[i].vdpau_fmt;
342  break;
343  }
344  }
345  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
346  av_log(ctx, AV_LOG_ERROR,
347  "Unsupported target pixel format: %s\n",
349  return AVERROR(EINVAL);
350  }
351 
352  if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
353  FFSWAP(void*, data[1], data[2]);
354 
355  err = priv->get_data(surf, vdpau_format, data, linesize);
356  if (err != VDP_STATUS_OK) {
357  av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n");
358  return AVERROR_UNKNOWN;
359  }
360 
361  return 0;
362 }
363 
365  const AVFrame *src)
366 {
367  VDPAUFramesContext *priv = ctx->internal->priv;
368  VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3];
369 
370  const void *data[3];
371  uint32_t linesize[3];
372 
373  const VDPAUPixFmtMap *map;
374  VdpYCbCrFormat vdpau_format;
375  VdpStatus err;
376  int i;
377 
378  for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) {
379  data[i] = src->data[i];
380  if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) {
381  av_log(ctx, AV_LOG_ERROR,
382  "The linesize %d cannot be represented as uint32\n",
383  src->linesize[i]);
384  return AVERROR(ERANGE);
385  }
386  linesize[i] = src->linesize[i];
387  }
388 
389  map = vdpau_pix_fmts[priv->chroma_idx].map;
390  for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) {
391  if (map[i].pix_fmt == src->format) {
392  vdpau_format = map[i].vdpau_fmt;
393  break;
394  }
395  }
396  if (map[i].pix_fmt == AV_PIX_FMT_NONE) {
397  av_log(ctx, AV_LOG_ERROR,
398  "Unsupported source pixel format: %s\n",
400  return AVERROR(EINVAL);
401  }
402 
403  if (vdpau_format == VDP_YCBCR_FORMAT_YV12)
404  FFSWAP(const void*, data[1], data[2]);
405 
406  err = priv->put_data(surf, vdpau_format, data, linesize);
407  if (err != VDP_STATUS_OK) {
408  av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n");
409  return AVERROR_UNKNOWN;
410  }
411 
412  return 0;
413 }
414 
415 #if HAVE_VDPAU_X11
416 #include <vdpau/vdpau_x11.h>
417 #include <X11/Xlib.h>
418 
419 typedef struct VDPAUDevicePriv {
420  VdpDeviceDestroy *device_destroy;
421  Display *dpy;
422 } VDPAUDevicePriv;
423 
424 static void vdpau_device_free(AVHWDeviceContext *ctx)
425 {
426  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
427  VDPAUDevicePriv *priv = ctx->user_opaque;
428 
429  if (priv->device_destroy)
430  priv->device_destroy(hwctx->device);
431  if (priv->dpy)
432  XCloseDisplay(priv->dpy);
433  av_freep(&priv);
434 }
435 
436 static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device,
437  AVDictionary *opts, int flags)
438 {
439  AVVDPAUDeviceContext *hwctx = ctx->hwctx;
440 
441  VDPAUDevicePriv *priv;
442  VdpStatus err;
443  VdpGetInformationString *get_information_string;
444  const char *display, *vendor;
445 
446  priv = av_mallocz(sizeof(*priv));
447  if (!priv)
448  return AVERROR(ENOMEM);
449 
450  ctx->user_opaque = priv;
451  ctx->free = vdpau_device_free;
452 
453  priv->dpy = XOpenDisplay(device);
454  if (!priv->dpy) {
455  av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n",
456  XDisplayName(device));
457  return AVERROR_UNKNOWN;
458  }
459  display = XDisplayString(priv->dpy);
460 
461  err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy),
462  &hwctx->device, &hwctx->get_proc_address);
463  if (err != VDP_STATUS_OK) {
464  av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n",
465  display);
466  return AVERROR_UNKNOWN;
467  }
468 
469  GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string);
470  GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy);
471 
472  get_information_string(&vendor);
473  av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on "
474  "X11 display %s\n", vendor, display);
475 
476  return 0;
477 }
478 #endif
479 
482  .name = "VDPAU",
483 
484  .device_hwctx_size = sizeof(AVVDPAUDeviceContext),
485  .device_priv_size = sizeof(VDPAUDeviceContext),
486  .frames_priv_size = sizeof(VDPAUFramesContext),
487 
488 #if HAVE_VDPAU_X11
489  .device_create = vdpau_device_create,
490 #endif
492  .device_uninit = vdpau_device_uninit,
493  .frames_get_constraints = vdpau_frames_get_constraints,
494  .frames_init = vdpau_frames_init,
495  .frames_get_buffer = vdpau_get_buffer,
496  .transfer_get_formats = vdpau_transfer_get_formats,
497  .transfer_data_to = vdpau_transfer_data_to,
498  .transfer_data_from = vdpau_transfer_data_from,
499 
501 };
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:60
packed YUV 4:2:2, 16bpp, Cb Y0 Cr Y1
Definition: pixfmt.h:77
#define NULL
Definition: coverity.c:32
This struct is allocated as AVHWDeviceContext.hwctx.
static enum AVPixelFormat pix_fmt
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
ptrdiff_t const GLvoid * data
Definition: opengl_enc.c:101
enum AVPixelFormat frames_sw_format
#define GET_CALLBACK(id, result)
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:67
static int device_init(AVFormatContext *ctx, int *width, int *height, uint32_t pixelformat)
Definition: v4l2.c:188
Memory handling functions.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:410
VdpGetProcAddress * get_proc_address
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, enum AVHWFrameTransferDirection dir, enum AVPixelFormat **formats)
VdpVideoSurfacePutBitsYCbCr * put_data
VdpVideoSurfacePutBitsYCbCr * put_data
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:228
VdpChromaType chroma_type
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
const HWContextType ff_hwcontext_type_vdpau
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:236
#define src
Definition: vp8dsp.c:254
VdpVideoSurfaceGetBitsYCbCr * get_data
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
An API-specific header for AV_HWDEVICE_TYPE_VDPAU.
enum AVPixelFormat * pix_fmts
AVBufferPool * pool_internal
enum AVHWDeviceType type
uint8_t
VdpChromaType chroma_type
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx)
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, const AVFrame *src)
static AVFrame * frame
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:91
static const struct @261 vdpau_pix_fmts[]
static int flags
Definition: log.c:55
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:192
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:113
ptrdiff_t size
Definition: opengl_enc.c:101
enum AVPixelFormat * pix_fmts[3]
#define av_log(a,...)
static const VDPAUPixFmtMap pix_fmts_422[]
int width
Definition: frame.h:276
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static void vdpau_buffer_free(void *opaque, uint8_t *data)
void(* free)(struct AVHWDeviceContext *ctx)
This field may be set by the caller before calling av_hwdevice_ctx_init().
Definition: hwcontext.h:103
#define AVERROR(e)
Definition: error.h:43
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:85
AVBufferRef * av_buffer_create(uint8_t *data, int size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:28
GLsizei count
Definition: opengl_enc.c:109
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:66
AVDictionary * opts
Definition: movenc.c:50
static int vdpau_device_init(AVHWDeviceContext *ctx)
AVHWDeviceContext * device_ctx
The parent AVHWDeviceContext.
Definition: hwcontext.h:148
static const VDPAUPixFmtMap pix_fmts_444[]
AVFormatContext * ctx
Definition: movenc.c:48
VdpYCbCrFormat vdpau_fmt
static int vdpau_frames_init(AVHWFramesContext *ctx)
AVBufferPool * av_buffer_pool_init2(int size, void *opaque, AVBufferRef *(*alloc)(void *opaque, int size), void(*pool_free)(void *opaque))
Allocate and initialize a buffer pool with a more complex allocator.
Definition: buffer.c:218
#define FF_ARRAY_ELEMS(a)
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities * get_transfer_caps
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:291
VdpVideoSurfaceCreate * surf_create
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, const void *hwconfig, AVHWFramesConstraints *constraints)
This struct describes the constraints on hardware frames attached to a given device with a hardware-s...
Definition: hwcontext.h:432
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:249
uint8_t * data
The data buffer.
Definition: buffer.h:89
packed YUV 4:2:2, 16bpp, Y0 Cb Y1 Cr
Definition: pixfmt.h:63
HW acceleration through VDPAU, Picture.data[3] contains a VdpVideoSurface.
Definition: pixfmt.h:193
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:123
refcounted data buffer API
enum AVPixelFormat * valid_hw_formats
A list of possible values for format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:437
const VDPAUPixFmtMap * map
AVHWFramesInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:133
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:266
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:232
void * user_opaque
Arbitrary user data, to be used e.g.
Definition: hwcontext.h:108
A reference to a data buffer.
Definition: buffer.h:81
static void vdpau_device_uninit(AVHWDeviceContext *ctx)
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:62
common internal and external API header
static int count_pixfmts(const VDPAUPixFmtMap *map)
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:71
interleaved chroma YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:197
enum AVPixelFormat pix_fmt
static const VDPAUPixFmtMap pix_fmts_420[]
VdpVideoSurfaceDestroy * surf_destroy
AVHWFrameTransferDirection
Definition: hwcontext.h:394
pixel format definitions
AVBufferPool * pool
A pool from which the frames are allocated by av_hwframe_get_buffer().
Definition: hwcontext.h:189
enum AVPixelFormat * valid_sw_formats
A list of possible values for sw_format in the hw_frames_ctx, terminated by AV_PIX_FMT_NONE.
Definition: hwcontext.h:444
int height
Definition: frame.h:276
static AVBufferRef * vdpau_pool_alloc(void *opaque, int size)
#define av_freep(p)
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:334
#define av_malloc_array(a, b)
formats
Definition: signature.h:48
#define FFSWAP(type, a, b)
Definition: common.h:99
AVHWDeviceInternal * internal
Private data used internally by libavutil.
Definition: hwcontext.h:70
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:2279
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:221
AVPixelFormat
Pixel format.
Definition: pixfmt.h:60
VdpVideoSurfaceGetBitsYCbCr * get_data