FFmpeg
amfenc.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/avassert.h"
22 #include "libavutil/imgutils.h"
23 #include "libavutil/hwcontext.h"
25 #if CONFIG_D3D11VA
27 #endif
28 #if CONFIG_DXVA2
29 #define COBJMACROS
31 #endif
32 #include "libavutil/mem.h"
33 #include "libavutil/pixdesc.h"
34 #include "libavutil/time.h"
35 
36 #include "amfenc.h"
37 #include "encode.h"
38 
39 #define AMF_AV_FRAME_REF L"av_frame_ref"
40 #define PTS_PROP L"PtsProp"
41 
42 #if CONFIG_D3D11VA
43 #include <d3d11.h>
44 #endif
45 
46 #ifdef _WIN32
47 #include "compat/w32dlfcn.h"
48 #else
49 #include <dlfcn.h>
50 #endif
51 
52 #define FFMPEG_AMF_WRITER_ID L"ffmpeg_amf"
53 
54 
58 #if CONFIG_D3D11VA
60 #endif
61 #if CONFIG_DXVA2
63 #endif
74 };
75 
77 
78 static int amf_init_encoder(AVCodecContext *avctx)
79 {
81  const wchar_t *codec_id = NULL;
82  AMF_RESULT res;
84  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
85  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
86  int alloc_size;
87  wchar_t name[512];
88 
89 
90  alloc_size = swprintf(name, amf_countof(name), L"%s%lld",PTS_PROP, next_encoder_index) + 1;
91  ctx->pts_property_name = av_memdup(name, alloc_size * sizeof(wchar_t));
92  if(!ctx->pts_property_name)
93  return AVERROR(ENOMEM);
94 
95  alloc_size = swprintf(name, amf_countof(name), L"%s%lld",AMF_AV_FRAME_REF, next_encoder_index) + 1;
96  ctx->av_frame_property_name = av_memdup(name, alloc_size * sizeof(wchar_t));
97  if(!ctx->av_frame_property_name)
98  return AVERROR(ENOMEM);
99 
101 
102  switch (avctx->codec->id) {
103  case AV_CODEC_ID_H264:
104  codec_id = AMFVideoEncoderVCE_AVC;
105  break;
106  case AV_CODEC_ID_HEVC:
107  codec_id = AMFVideoEncoder_HEVC;
108  break;
109  case AV_CODEC_ID_AV1 :
110  codec_id = AMFVideoEncoder_AV1;
111  break;
112  default:
113  break;
114  }
115  AMF_RETURN_IF_FALSE(ctx, codec_id != NULL, AVERROR(EINVAL), "Codec %d is not supported\n", avctx->codec->id);
116 
117  if (avctx->hw_frames_ctx)
118  pix_fmt = ((AVHWFramesContext*)avctx->hw_frames_ctx->data)->sw_format;
119  else
120  pix_fmt = avctx->pix_fmt;
121 
122  if (pix_fmt == AV_PIX_FMT_P010) {
123  AMF_RETURN_IF_FALSE(ctx, amf_device_ctx->version >= AMF_MAKE_FULL_VERSION(1, 4, 32, 0), AVERROR_UNKNOWN, "10-bit encoder is not supported by AMD GPU drivers versions lower than 23.30.\n");
124  }
125 
126  ctx->format = av_av_to_amf_format(pix_fmt);
127  AMF_RETURN_IF_FALSE(ctx, ctx->format != AMF_SURFACE_UNKNOWN, AVERROR(EINVAL),
128  "Format %s is not supported\n", av_get_pix_fmt_name(pix_fmt));
129 
130  res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory, amf_device_ctx->context, codec_id, &ctx->encoder);
131  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_ENCODER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", codec_id, res);
132 
133  ctx->submitted_frame = 0;
134  ctx->encoded_frame = 0;
135  ctx->eof = 0;
136 
137  return 0;
138 }
139 
141 {
142  AMFEncoderContext *ctx = avctx->priv_data;
143 
144  if (ctx->encoder) {
145  ctx->encoder->pVtbl->Terminate(ctx->encoder);
146  ctx->encoder->pVtbl->Release(ctx->encoder);
147  ctx->encoder = NULL;
148  }
149 
150  av_buffer_unref(&ctx->device_ctx_ref);
151  av_fifo_freep2(&ctx->timestamp_list);
152 
153  if (ctx->output_list) {
154  // release remaining AMF output buffers
155  while(av_fifo_can_read(ctx->output_list)) {
156  AMFBuffer* buffer = NULL;
157  av_fifo_read(ctx->output_list, &buffer, 1);
158  if(buffer != NULL)
159  buffer->pVtbl->Release(buffer);
160  }
161  av_fifo_freep2(&ctx->output_list);
162  }
163  av_freep(&ctx->pts_property_name);
164  av_freep(&ctx->av_frame_property_name);
165 
166  return 0;
167 }
168 
169 static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame,
170  AMFSurface* surface)
171 {
172  AMFPlane *plane;
173  uint8_t *dst_data[4] = {0};
174  int dst_linesize[4] = {0};
175  int planes;
176  int i;
177 
178  planes = (int)surface->pVtbl->GetPlanesCount(surface);
179  av_assert0(planes < FF_ARRAY_ELEMS(dst_data));
180 
181  for (i = 0; i < planes; i++) {
182  plane = surface->pVtbl->GetPlaneAt(surface, i);
183  dst_data[i] = plane->pVtbl->GetNative(plane);
184  dst_linesize[i] = plane->pVtbl->GetHPitch(plane);
185  }
186  av_image_copy2(dst_data, dst_linesize,
187  frame->data, frame->linesize, frame->format,
188  avctx->width, avctx->height);
189 
190  return 0;
191 }
192 
193 static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
194 {
195  AMFEncoderContext *ctx = avctx->priv_data;
196  int ret;
197  AMFVariantStruct var = {0};
198  int64_t timestamp = AV_NOPTS_VALUE;
199  int64_t size = buffer->pVtbl->GetSize(buffer);
200  enum AVPictureType pict_type = 0;
201  int average_qp = -1;
202 
203  if ((ret = ff_get_encode_buffer(avctx, pkt, size, 0)) < 0) {
204  return ret;
205  }
206  memcpy(pkt->data, buffer->pVtbl->GetNative(buffer), size);
207 
208  switch (avctx->codec->id) {
209  case AV_CODEC_ID_H264:
210  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE, &var);
211  pkt->flags |= AV_PKT_FLAG_KEY * (var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR);
212  pict_type = var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_IDR ? AV_PICTURE_TYPE_I :
213  var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_I ? AV_PICTURE_TYPE_I :
214  var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_P ? AV_PICTURE_TYPE_P :
215  var.int64Value == AMF_VIDEO_ENCODER_OUTPUT_DATA_TYPE_B ? AV_PICTURE_TYPE_B : 0;
216 
217  var.int64Value = -1;
218  if ((buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_STATISTIC_AVERAGE_QP, &var)) == AMF_OK) {
219  average_qp = FFMAX((int)var.int64Value, -1);
220  }
221  break;
222  case AV_CODEC_ID_HEVC:
223  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE, &var);
224  pkt->flags |= AV_PKT_FLAG_KEY * (var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR);
225  pict_type = var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_IDR ? AV_PICTURE_TYPE_I :
226  var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_I ? AV_PICTURE_TYPE_I :
227  var.int64Value == AMF_VIDEO_ENCODER_HEVC_OUTPUT_DATA_TYPE_P ? AV_PICTURE_TYPE_P : 0;
228 
229  var.int64Value = -1;
230  if ((buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_HEVC_STATISTIC_AVERAGE_QP, &var)) == AMF_OK) {
231  average_qp = FFMAX((int)var.int64Value, -1);
232  }
233  break;
234  case AV_CODEC_ID_AV1:
235  buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE, &var);
236  pkt->flags |= AV_PKT_FLAG_KEY * (var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY);
237  pict_type = var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_KEY ? AV_PICTURE_TYPE_I :
238  var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_INTRA_ONLY ? AV_PICTURE_TYPE_I :
239  var.int64Value == AMF_VIDEO_ENCODER_AV1_OUTPUT_FRAME_TYPE_INTER ? AV_PICTURE_TYPE_P : 0;
240 
241  var.int64Value = -1;
242  if ((buffer->pVtbl->GetProperty(buffer, AMF_VIDEO_ENCODER_AV1_STATISTIC_AVERAGE_Q_INDEX, &var)) == AMF_OK) {
243  average_qp = FFMAX((int)var.int64Value, -1); // av1 qindex
244  if (average_qp >= 0) {
245  average_qp = (average_qp > 244) ? (average_qp <= 249 ? 62 : 63) : (average_qp + 3) >> 2; // av1 quantizer
246  }
247  }
248  break;
249  default:
250  break;
251  }
252 
253  if (average_qp >= 0) {
254  ff_encode_add_stats_side_data(pkt, average_qp * FF_QP2LAMBDA, NULL, 0, pict_type);
255  }
256 
257  buffer->pVtbl->GetProperty(buffer, ctx->pts_property_name, &var);
258 
259  pkt->pts = var.int64Value; // original pts
260 
261  AMF_RETURN_IF_FALSE(ctx, av_fifo_read(ctx->timestamp_list, &timestamp, 1) >= 0,
262  AVERROR_UNKNOWN, "timestamp_list is empty\n");
263 
264  // calc dts shift if max_b_frames > 0
265  if ((ctx->max_b_frames > 0 || ((ctx->pa_adaptive_mini_gop == 1) ? true : false)) && ctx->dts_delay == 0) {
266  int64_t timestamp_last = AV_NOPTS_VALUE;
267  size_t can_read = av_fifo_can_read(ctx->timestamp_list);
268 
269  AMF_RETURN_IF_FALSE(ctx, can_read > 0, AVERROR_UNKNOWN,
270  "timestamp_list is empty while max_b_frames = %d\n", avctx->max_b_frames);
271  av_fifo_peek(ctx->timestamp_list, &timestamp_last, 1, can_read - 1);
272  if (timestamp < 0 || timestamp_last < AV_NOPTS_VALUE) {
273  return AVERROR(ERANGE);
274  }
275  ctx->dts_delay = timestamp_last - timestamp;
276  }
277  pkt->dts = timestamp - ctx->dts_delay;
278  return 0;
279 }
280 
281 // amfenc API implementation
283 {
284  int ret;
285  AMFEncoderContext *ctx = avctx->priv_data;
286  AVHWDeviceContext *hwdev_ctx = NULL;
287 
288  // hardcoded to current HW queue size - will auto-realloc if too small
289  ctx->timestamp_list = av_fifo_alloc2(avctx->max_b_frames + 16, sizeof(int64_t),
291  if (!ctx->timestamp_list) {
292  return AVERROR(ENOMEM);
293  }
294  ctx->output_list = av_fifo_alloc2(2, sizeof(AMFBuffer*), AV_FIFO_FLAG_AUTO_GROW);
295  if (!ctx->output_list)
296  return AVERROR(ENOMEM);
297 
298  ctx->dts_delay = 0;
299 
300  ctx->hwsurfaces_in_queue = 0;
301 
302  if (avctx->hw_device_ctx) {
303  hwdev_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
304  if (hwdev_ctx->type == AV_HWDEVICE_TYPE_AMF)
305  {
306  ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
307  }
308  else {
310  AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
311  }
312  } else if (avctx->hw_frames_ctx) {
313  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
314  if (frames_ctx->device_ref ) {
315  if (frames_ctx->format == AV_PIX_FMT_AMF_SURFACE) {
316  ctx->device_ctx_ref = av_buffer_ref(frames_ctx->device_ref);
317  }
318  else {
319  ret = av_hwdevice_ctx_create_derived(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, frames_ctx->device_ref, 0);
320  AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
321  }
322  }
323  }
324  else {
325  ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
326  AMF_RETURN_IF_FALSE(ctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
327  }
328 
329  if (ctx->pa_lookahead_buffer_depth >= ctx->hwsurfaces_in_queue_max) {
330  av_log(avctx, AV_LOG_WARNING,
331  "async_depth (%d) too small for lookahead (%d), increasing to (%d)\n",
332  ctx->hwsurfaces_in_queue_max,
333  ctx->pa_lookahead_buffer_depth,
334  ctx->pa_lookahead_buffer_depth + 1);
335  ctx->hwsurfaces_in_queue_max = ctx->pa_lookahead_buffer_depth + 1;
336  }
337 
338  if ((ret = amf_init_encoder(avctx)) == 0) {
339  return 0;
340  }
341 
342  ff_amf_encode_close(avctx);
343  return ret;
344 }
345 
346 static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
347 {
348  AMF_RESULT res;
349  AMFVariantStruct var;
350  res = AMFVariantInit(&var);
351  if (res == AMF_OK) {
352  AMFGuid guid_AMFInterface = IID_AMFInterface();
353  AMFInterface *amf_interface;
354  res = val->pVtbl->QueryInterface(val, &guid_AMFInterface, (void**)&amf_interface);
355 
356  if (res == AMF_OK) {
357  res = AMFVariantAssignInterface(&var, amf_interface);
358  amf_interface->pVtbl->Release(amf_interface);
359  }
360  if (res == AMF_OK) {
361  res = object->pVtbl->SetProperty(object, name, var);
362  }
363  AMFVariantClear(&var);
364  }
365  return res;
366 }
367 
368 static AMF_RESULT amf_store_attached_frame_ref(AMFEncoderContext *ctx, const AVFrame *frame, AMFSurface *surface)
369 {
370  AMF_RESULT res = AMF_FAIL;
371  int64_t data;
373  if (frame_ref) {
374  memcpy(&data, &frame_ref, sizeof(frame_ref)); // store pointer in 8 bytes
375  AMF_ASSIGN_PROPERTY_INT64(res, surface, ctx->av_frame_property_name, data);
376  }
377  return res;
378 }
379 
381 {
382  AMFVariantStruct var = {0};
383  AMF_RESULT res = buffer->pVtbl->GetProperty(buffer, ctx->av_frame_property_name, &var);
384  if(res == AMF_OK && var.int64Value){
386  memcpy(&frame_ref, &var.int64Value, sizeof(frame_ref));
388  }
389  return res;
390 }
391 
392 static int amf_submit_frame(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
393 {
394  AMFEncoderContext *ctx = avctx->priv_data;
395  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
396  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
397  AMFSurface *surface;
398  AMF_RESULT res;
399  int ret;
400  int hw_surface = 0;
401  int output_delay = FFMAX(ctx->max_b_frames, 0) + ((avctx->flags & AV_CODEC_FLAG_LOW_DELAY) ? 0 : 1);
402 
403 // prepare surface from frame
404  switch (frame->format) {
405 #if CONFIG_D3D11VA
406  case AV_PIX_FMT_D3D11:
407  {
408  static const GUID AMFTextureArrayIndexGUID = { 0x28115527, 0xe7c3, 0x4b66, { 0x99, 0xd3, 0x4f, 0x2a, 0xe6, 0xb4, 0x7f, 0xaf } };
409  ID3D11Texture2D *texture = (ID3D11Texture2D*)frame->data[0]; // actual texture
410  int index = (intptr_t)frame->data[1]; // index is a slice in texture array is - set to tell AMF which slice to use
411  av_assert0(frame->hw_frames_ctx && avctx->hw_frames_ctx &&
412  frame->hw_frames_ctx->data == avctx->hw_frames_ctx->data);
413  texture->lpVtbl->SetPrivateData(texture, &AMFTextureArrayIndexGUID, sizeof(index), &index);
414  res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX11Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
415  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX11Native() failed with error %d\n", res);
416  hw_surface = 1;
417  }
418  break;
419 #endif
420 #if CONFIG_DXVA2
422  {
423  IDirect3DSurface9 *texture = (IDirect3DSurface9 *)frame->data[3]; // actual texture
424  res = amf_device_ctx->context->pVtbl->CreateSurfaceFromDX9Native(amf_device_ctx->context, texture, &surface, NULL); // wrap to AMF surface
425  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "CreateSurfaceFromDX9Native() failed with error %d\n", res);
426  hw_surface = 1;
427  }
428  break;
429 #endif
431  {
432  surface = (AMFSurface*)frame->data[0];
433  surface->pVtbl->Acquire(surface);
434  hw_surface = 1;
435  }
436  break;
437  default:
438  {
439  res = amf_device_ctx->context->pVtbl->AllocSurface(amf_device_ctx->context, AMF_MEMORY_HOST, ctx->format, avctx->width, avctx->height, &surface);
440  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR(ENOMEM), "AllocSurface() failed with error %d\n", res);
441  amf_copy_surface(avctx, frame, surface);
442  }
443  break;
444  }
445  if (hw_surface) {
447  ctx->hwsurfaces_in_queue++;
448  // input HW surfaces can be vertically aligned by 16; tell AMF the real size
449  surface->pVtbl->SetCrop(surface, 0, 0, frame->width, frame->height);
450  }
451  // HDR10 metadata
452  if (frame->color_trc == AVCOL_TRC_SMPTE2084) {
453  AMFBuffer * hdrmeta_buffer = NULL;
454  res = amf_device_ctx->context->pVtbl->AllocBuffer(amf_device_ctx->context, AMF_MEMORY_HOST, sizeof(AMFHDRMetadata), &hdrmeta_buffer);
455  if (res == AMF_OK) {
456  AMFHDRMetadata * hdrmeta = (AMFHDRMetadata*)hdrmeta_buffer->pVtbl->GetNative(hdrmeta_buffer);
457  if (av_amf_extract_hdr_metadata(frame, hdrmeta) == 0) {
458  switch (avctx->codec->id) {
459  case AV_CODEC_ID_H264:
460  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_INPUT_HDR_METADATA, hdrmeta_buffer); break;
461  case AV_CODEC_ID_HEVC:
462  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_HEVC_INPUT_HDR_METADATA, hdrmeta_buffer); break;
463  case AV_CODEC_ID_AV1:
464  AMF_ASSIGN_PROPERTY_INTERFACE(res, ctx->encoder, AMF_VIDEO_ENCODER_AV1_INPUT_HDR_METADATA, hdrmeta_buffer); break;
465  }
466  res = amf_set_property_buffer(surface, L"av_frame_hdrmeta", hdrmeta_buffer);
467  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SetProperty failed for \"av_frame_hdrmeta\" with error %d\n", res);
468  }
469  hdrmeta_buffer->pVtbl->Release(hdrmeta_buffer);
470  }
471  }
472  surface->pVtbl->SetPts(surface, frame->pts);
473 
474  AMF_ASSIGN_PROPERTY_INT64(res, surface, ctx->pts_property_name, frame->pts);
475 
476  switch (avctx->codec->id) {
477  case AV_CODEC_ID_H264:
478  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_ENCODER_STATISTICS_FEEDBACK, 1);
479  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_AUD, !!ctx->aud);
480  switch (frame->pict_type) {
481  case AV_PICTURE_TYPE_I:
482  if (ctx->forced_idr) {
483  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_SPS, 1);
484  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_INSERT_PPS, 1);
485  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_IDR);
486  } else {
487  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_I);
488  }
489  break;
490  case AV_PICTURE_TYPE_P:
491  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_P);
492  break;
493  case AV_PICTURE_TYPE_B:
494  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_PICTURE_TYPE_B);
495  break;
496  }
497  break;
498  case AV_CODEC_ID_HEVC:
499  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_ENCODER_HEVC_STATISTICS_FEEDBACK, 1);
500  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_AUD, !!ctx->aud);
501  switch (frame->pict_type) {
502  case AV_PICTURE_TYPE_I:
503  if (ctx->forced_idr) {
504  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_INSERT_HEADER, 1);
505  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_IDR);
506  } else {
507  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_I);
508  }
509  break;
510  case AV_PICTURE_TYPE_P:
511  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_HEVC_FORCE_PICTURE_TYPE, AMF_VIDEO_ENCODER_HEVC_PICTURE_TYPE_P);
512  break;
513  }
514  break;
515  case AV_CODEC_ID_AV1:
516  AMF_ASSIGN_PROPERTY_BOOL(res, surface, AMF_VIDEO_ENCODER_AV1_STATISTICS_FEEDBACK, 1);
517  if (frame->pict_type == AV_PICTURE_TYPE_I) {
518  if (ctx->forced_idr) {
519  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_INSERT_SEQUENCE_HEADER, 1);
520  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_KEY);
521  } else {
522  AMF_ASSIGN_PROPERTY_INT64(res, surface, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE, AMF_VIDEO_ENCODER_AV1_FORCE_FRAME_TYPE_INTRA_ONLY);
523  }
524  }
525  break;
526  default:
527  break;
528  }
529  // submit surface
530  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
531  if (res == AMF_INPUT_FULL) { // handle full queue
532  //store surface for later submission
533  *surface_resubmit = surface;
534  } else {
535  surface->pVtbl->Release(surface);
536  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
537  ctx->submitted_frame++;
538  ret = av_fifo_write(ctx->timestamp_list, &frame->pts, 1);
539  if (ret < 0)
540  return ret;
541  if(ctx->submitted_frame <= ctx->encoded_frame + output_delay)
542  return AVERROR(EAGAIN); // too soon to poll or wait
543  }
544  return 0;
545 }
546 
547 static int amf_submit_frame_locked(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
548 {
549  int ret;
550  AMFEncoderContext *ctx = avctx->priv_data;
551  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
552  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext *)hw_device_ctx->hwctx;
553 
554  if (amf_device_ctx->lock)
555  amf_device_ctx->lock(amf_device_ctx->lock_ctx);
556  ret = amf_submit_frame(avctx, frame, surface_resubmit);
557  if (amf_device_ctx->unlock)
558  amf_device_ctx->unlock(amf_device_ctx->lock_ctx);
559 
560  return ret;
561 }
562 static AMF_RESULT amf_query_output(AVCodecContext *avctx, AMFBuffer **buffer)
563 {
564  AMFEncoderContext *ctx = avctx->priv_data;
565  AMFData *data = NULL;
566  AMF_RESULT ret = ctx->encoder->pVtbl->QueryOutput(ctx->encoder, &data);
567  *buffer = NULL;
568  if (data) {
569  AMFGuid guid = IID_AMFBuffer();
570  data->pVtbl->QueryInterface(data, &guid, (void**)buffer); // query for buffer interface
571  data->pVtbl->Release(data);
572  if (amf_release_attached_frame_ref(ctx, *buffer) == AMF_OK)
573  ctx->hwsurfaces_in_queue--;
574  ctx->encoded_frame++;
575  }
576  return ret;
577 }
578 
580 {
581  AMFEncoderContext *ctx = avctx->priv_data;
582  AMFSurface *surface = NULL;
583  AMF_RESULT res;
584  int ret;
585  AMF_RESULT res_query;
586  AMFBuffer* buffer = NULL;
588  int block_and_wait;
589  int64_t pts = 0;
590  int output_delay = FFMAX(ctx->max_b_frames, 0) + ((avctx->flags & AV_CODEC_FLAG_LOW_DELAY) ? 0 : 1);
591 
592  if (!ctx->encoder){
594  return AVERROR(EINVAL);
595  }
596  // check if some outputs are available
597  av_fifo_read(ctx->output_list, &buffer, 1);
598  if (buffer != NULL) { // return already retrieved output
599  ret = amf_copy_buffer(avctx, avpkt, buffer);
600  buffer->pVtbl->Release(buffer);
601  return ret;
602  }
603 
604  ret = ff_encode_get_frame(avctx, frame);
605  if(ret < 0){
606  if(ret != AVERROR_EOF){
608  if(ret == AVERROR(EAGAIN)){
609  if(ctx->submitted_frame <= ctx->encoded_frame + output_delay) // too soon to poll
610  return ret;
611  }
612  }
613  }
614  if(ret != AVERROR(EAGAIN)){
615  if (!frame->buf[0]) { // submit drain
616  if (!ctx->eof) { // submit drain one time only
617  if(!ctx->delayed_drain) {
618  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
619  if (res == AMF_INPUT_FULL) {
620  ctx->delayed_drain = 1; // input queue is full: resubmit Drain() in receive loop
621  } else {
622  if (res == AMF_OK) {
623  ctx->eof = 1; // drain started
624  }
625  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Drain() failed with error %d\n", res);
626  }
627  }
628  }
629  } else { // submit frame
630  ret = amf_submit_frame_locked(avctx, frame, &surface);
631  if(ret < 0){
633  return ret;
634  }
635  pts = frame->pts;
636  }
637  }
639 
640  do {
641  block_and_wait = 0;
642  // poll data
643  res_query = amf_query_output(avctx, &buffer);
644  if (buffer) {
645  ret = amf_copy_buffer(avctx, avpkt, buffer);
646  buffer->pVtbl->Release(buffer);
647 
648  AMF_RETURN_IF_FALSE(ctx, ret >= 0, ret, "amf_copy_buffer() failed with error %d\n", ret);
649 
650  if (ctx->delayed_drain) { // try to resubmit drain
651  res = ctx->encoder->pVtbl->Drain(ctx->encoder);
652  if (res != AMF_INPUT_FULL) {
653  ctx->delayed_drain = 0;
654  ctx->eof = 1; // drain started
655  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "Repeated Drain() failed with error %d\n", res);
656  } else {
657  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed drain submission got AMF_INPUT_FULL- should not happen\n");
658  }
659  }
660  } else if (ctx->delayed_drain || (ctx->eof && res_query != AMF_EOF) || (ctx->hwsurfaces_in_queue >= ctx->hwsurfaces_in_queue_max) || surface) {
661  block_and_wait = 1;
662  // Only sleep if the driver doesn't support waiting in QueryOutput()
663  // or if we already have output data so we will skip calling it.
664  if (!ctx->query_timeout_supported || avpkt->data || avpkt->buf) {
665  av_usleep(1000);
666  }
667  }
668  } while (block_and_wait);
669 
670  if (res_query == AMF_EOF) {
671  ret = AVERROR_EOF;
672  } else if (buffer == NULL) {
673  ret = AVERROR(EAGAIN);
674  } else {
675  if(surface) {
676  // resubmit surface
677  do {
678  res = ctx->encoder->pVtbl->SubmitInput(ctx->encoder, (AMFData*)surface);
679  if (res != AMF_INPUT_FULL)
680  break;
681 
682  if (!ctx->query_timeout_supported)
683  av_usleep(1000);
684 
685  // Need to free up space in the encoder queue.
686  // The number of retrieved outputs is limited currently to 21
687  amf_query_output(avctx, &buffer);
688  if (buffer != NULL) {
689  ret = av_fifo_write(ctx->output_list, &buffer, 1);
690  if (ret < 0)
691  return ret;
692  }
693  } while(res == AMF_INPUT_FULL);
694 
695  surface->pVtbl->Release(surface);
696  if (res == AMF_INPUT_FULL) {
697  av_log(avctx, AV_LOG_WARNING, "Data acquired but delayed SubmitInput returned AMF_INPUT_FULL- should not happen\n");
698  } else {
699  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_UNKNOWN, "SubmitInput() failed with error %d\n", res);
700 
701  ret = av_fifo_write(ctx->timestamp_list, &pts, 1);
702 
703  ctx->submitted_frame++;
704 
705  if (ret < 0)
706  return ret;
707  }
708  }
709  ret = 0;
710  }
711  return ret;
712 }
713 
715 #if CONFIG_D3D11VA
716  HW_CONFIG_ENCODER_FRAMES(D3D11, D3D11VA),
717  HW_CONFIG_ENCODER_DEVICE(NONE, D3D11VA),
718 #endif
719 #if CONFIG_DXVA2
720  HW_CONFIG_ENCODER_FRAMES(DXVA2_VLD, DXVA2),
722 #endif
723  HW_CONFIG_ENCODER_FRAMES(AMF_SURFACE, AMF),
725  NULL,
726 };
amf_release_attached_frame_ref
static AMF_RESULT amf_release_attached_frame_ref(AMFEncoderContext *ctx, AMFBuffer *buffer)
Definition: amfenc.c:380
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVPixelFormat
AVPixelFormat
Pixel format.
Definition: pixfmt.h:71
name
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf default minimum maximum flags name is the option name
Definition: writing_filters.txt:88
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
AVPictureType
AVPictureType
Definition: avutil.h:276
int64_t
long long int64_t
Definition: coverity.c:34
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_fifo_peek
int av_fifo_peek(const AVFifo *f, void *buf, size_t nb_elems, size_t offset)
Read data from a FIFO without modifying FIFO state.
Definition: fifo.c:255
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AVPacket::data
uint8_t * data
Definition: packet.h:588
encode.h
data
const char data[16]
Definition: mxf.c:149
amf_set_property_buffer
static AMF_RESULT amf_set_property_buffer(AMFSurface *object, const wchar_t *name, AMFBuffer *val)
Definition: amfenc.c:346
AV_PIX_FMT_BGRA
@ AV_PIX_FMT_BGRA
packed BGRA 8:8:8:8, 32bpp, BGRABGRA...
Definition: pixfmt.h:102
FFMAX
#define FFMAX(a, b)
Definition: macros.h:47
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
amf_copy_surface
static int amf_copy_surface(AVCodecContext *avctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:169
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
NONE
#define NONE
Definition: vf_drawvg.c:261
AV_PKT_FLAG_KEY
#define AV_PKT_FLAG_KEY
The packet contains a keyframe.
Definition: packet.h:643
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
av_memdup
void * av_memdup(const void *p, size_t size)
Duplicate a buffer with av_malloc().
Definition: mem.c:304
AMF_AV_FRAME_REF
#define AMF_AV_FRAME_REF
Definition: amfenc.c:39
AVCodecContext::codec
const struct AVCodec * codec
Definition: avcodec.h:448
av_fifo_write
int av_fifo_write(AVFifo *f, const void *buf, size_t nb_elems)
Write data into a FIFO.
Definition: fifo.c:188
ff_amf_encode_close
int av_cold ff_amf_encode_close(AVCodecContext *avctx)
Common encoder termination function.
Definition: amfenc.c:140
ff_amf_encode_init
int ff_amf_encode_init(AVCodecContext *avctx)
Common encoder initization function.
Definition: amfenc.c:282
AVCodecContext::flags
int flags
AV_CODEC_FLAG_*.
Definition: avcodec.h:496
val
static double val(void *priv, double ch)
Definition: aeval.c:77
AV_CODEC_FLAG_LOW_DELAY
#define AV_CODEC_FLAG_LOW_DELAY
Force low delay.
Definition: avcodec.h:314
pts
static int64_t pts
Definition: transcode_aac.c:644
ff_encode_add_stats_side_data
int ff_encode_add_stats_side_data(AVPacket *pkt, int quality, const int64_t error[], int error_count, enum AVPictureType pict_type)
Definition: encode.c:919
av_av_to_amf_format
enum AMF_SURFACE_FORMAT av_av_to_amf_format(enum AVPixelFormat fmt)
Definition: hwcontext_amf.c:133
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
avassert.h
FF_ARRAY_ELEMS
#define FF_ARRAY_ELEMS(a)
Definition: sinewin_tablegen.c:29
av_cold
#define av_cold
Definition: attributes.h:111
av_fifo_read
int av_fifo_read(AVFifo *f, void *buf, size_t nb_elems)
Read data from a FIFO.
Definition: fifo.c:240
AV_PIX_FMT_DXVA2_VLD
@ AV_PIX_FMT_DXVA2_VLD
HW decoding through DXVA2, Picture.data[3] contains a LPDIRECT3DSURFACE9 pointer.
Definition: pixfmt.h:134
AMFEncoderContext
AMF encoder context.
Definition: amfenc.h:40
pix_fmt
static enum AVPixelFormat pix_fmt
Definition: demux_decode.c:41
amf_submit_frame
static int amf_submit_frame(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
Definition: amfenc.c:392
av_assert0
#define av_assert0(cond)
assert() equivalent, that is always enabled.
Definition: avassert.h:42
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
amf_init_encoder
static int amf_init_encoder(AVCodecContext *avctx)
Definition: amfenc.c:78
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
amf_query_output
static AMF_RESULT amf_query_output(AVCodecContext *avctx, AMFBuffer **buffer)
Definition: amfenc.c:562
av_frame_clone
AVFrame * av_frame_clone(const AVFrame *src)
Create a new frame that references the same data as src.
Definition: frame.c:483
hwcontext_amf.h
codec_id
enum AVCodecID codec_id
Definition: vaapi_decode.c:410
AV_PIX_FMT_YUV420P
@ AV_PIX_FMT_YUV420P
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:73
av_usleep
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
AV_CODEC_ID_H264
@ AV_CODEC_ID_H264
Definition: codec_id.h:79
AV_PIX_FMT_RGBA
@ AV_PIX_FMT_RGBA
packed RGBA 8:8:8:8, 32bpp, RGBARGBA...
Definition: pixfmt.h:100
if
if(ret)
Definition: filter_design.txt:179
AVPacket::buf
AVBufferRef * buf
A reference to the reference-counted buffer where the packet data is stored.
Definition: packet.h:571
NULL
#define NULL
Definition: coverity.c:32
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AV_CODEC_ID_AV1
@ AV_CODEC_ID_AV1
Definition: codec_id.h:284
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
planes
static const struct @577 planes[]
ff_amf_receive_packet
int ff_amf_receive_packet(AVCodecContext *avctx, AVPacket *avpkt)
Ecoding one frame - common function for all AMF encoders.
Definition: amfenc.c:579
AV_PICTURE_TYPE_I
@ AV_PICTURE_TYPE_I
Intra.
Definition: avutil.h:278
av_fifo_can_read
size_t av_fifo_can_read(const AVFifo *f)
Definition: fifo.c:87
amf_copy_buffer
static int amf_copy_buffer(AVCodecContext *avctx, AVPacket *pkt, AMFBuffer *buffer)
Definition: amfenc.c:193
AV_PIX_FMT_BGR0
@ AV_PIX_FMT_BGR0
packed BGR 8:8:8, 32bpp, BGRXBGRX... X=unused/undefined
Definition: pixfmt.h:265
time.h
PTS_PROP
#define PTS_PROP
Definition: amfenc.c:40
amf_submit_frame_locked
static int amf_submit_frame_locked(AVCodecContext *avctx, AVFrame *frame, AMFSurface **surface_resubmit)
Definition: amfenc.c:547
index
int index
Definition: gxfenc.c:90
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:683
AV_PIX_FMT_X2BGR10
#define AV_PIX_FMT_X2BGR10
Definition: pixfmt.h:614
hwcontext_dxva2.h
HW_CONFIG_ENCODER_DEVICE
#define HW_CONFIG_ENCODER_DEVICE(format, device_type_)
Definition: hwconfig.h:95
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
av_amf_extract_hdr_metadata
int av_amf_extract_hdr_metadata(const AVFrame *frame, AMFHDRMetadata *hdrmeta)
Definition: hwcontext_amf.c:233
ff_amf_pix_fmts
enum AVPixelFormat ff_amf_pix_fmts[]
Supported formats.
Definition: amfenc.c:55
size
int size
Definition: twinvq_data.h:10344
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:35
AV_NOPTS_VALUE
#define AV_NOPTS_VALUE
Undefined timestamp value.
Definition: avutil.h:247
AVCodecHWConfigInternal
Definition: hwconfig.h:25
AVPacket::dts
int64_t dts
Decompression timestamp in AVStream->time_base units; the time at which the packet is decompressed.
Definition: packet.h:587
AVPacket::flags
int flags
A combination of AV_PKT_FLAG values.
Definition: packet.h:594
AV_PIX_FMT_RGB0
@ AV_PIX_FMT_RGB0
packed RGB 8:8:8, 32bpp, RGBXRGBX... X=unused/undefined
Definition: pixfmt.h:263
AV_PIX_FMT_D3D11
@ AV_PIX_FMT_D3D11
Hardware surfaces for Direct3D11.
Definition: pixfmt.h:336
AVCodec::id
enum AVCodecID id
Definition: codec.h:186
AV_PIX_FMT_ARGB
@ AV_PIX_FMT_ARGB
packed ARGB 8:8:8:8, 32bpp, ARGBARGB...
Definition: pixfmt.h:99
HW_CONFIG_ENCODER_FRAMES
#define HW_CONFIG_ENCODER_FRAMES(format, device_type_)
Definition: hwconfig.h:98
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
AVPacket::pts
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:581
hw_device_ctx
static AVBufferRef * hw_device_ctx
Definition: hw_decode.c:45
ff_amfenc_hw_configs
const AVCodecHWConfigInternal *const ff_amfenc_hw_configs[]
Definition: amfenc.c:714
amf_store_attached_frame_ref
static AMF_RESULT amf_store_attached_frame_ref(AMFEncoderContext *ctx, const AVFrame *frame, AMFSurface *surface)
Definition: amfenc.c:368
AV_CODEC_ID_HEVC
@ AV_CODEC_ID_HEVC
Definition: codec_id.h:228
AVCodecContext::hw_device_ctx
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:1487
AVCodecContext::height
int height
Definition: avcodec.h:600
AVCodecContext::pix_fmt
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:639
AVCodecContext::hw_frames_ctx
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames.
Definition: avcodec.h:1465
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
frame_ref
static int frame_ref(AVFrame *dst, const AVFrame *src)
Definition: swscale.c:1333
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
av_fifo_alloc2
AVFifo * av_fifo_alloc2(size_t nb_elems, size_t elem_size, unsigned int flags)
Allocate and initialize an AVFifo with a given element size.
Definition: fifo.c:47
AVCodecContext
main external API structure.
Definition: avcodec.h:439
AV_PICTURE_TYPE_B
@ AV_PICTURE_TYPE_B
Bi-dir predicted.
Definition: avutil.h:280
buffer
the frame and frame reference mechanism is intended to as much as expensive copies of that data while still allowing the filters to produce correct results The data is stored in buffers represented by AVFrame structures Several references can point to the same frame buffer
Definition: filter_design.txt:49
ff_get_encode_buffer
int ff_get_encode_buffer(AVCodecContext *avctx, AVPacket *avpkt, int64_t size, int flags)
Get a buffer for a packet.
Definition: encode.c:105
av_image_copy2
static void av_image_copy2(uint8_t *const dst_data[4], const int dst_linesizes[4], uint8_t *const src_data[4], const int src_linesizes[4], enum AVPixelFormat pix_fmt, int width, int height)
Wrapper around av_image_copy() to workaround the limitation that the conversion from uint8_t * const ...
Definition: imgutils.h:184
AV_PIX_FMT_NONE
@ AV_PIX_FMT_NONE
Definition: pixfmt.h:72
L
#define L(x)
Definition: vpx_arith.h:36
amfenc.h
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
AV_PICTURE_TYPE_P
@ AV_PICTURE_TYPE_P
Predicted.
Definition: avutil.h:279
AVERROR_ENCODER_NOT_FOUND
#define AVERROR_ENCODER_NOT_FOUND
Encoder not found.
Definition: error.h:56
mem.h
AVCodecContext::max_b_frames
int max_b_frames
maximum number of B-frames between non-B-frames Note: The output will be delayed by max_b_frames+1 re...
Definition: avcodec.h:777
ff_encode_get_frame
int ff_encode_get_frame(AVCodecContext *avctx, AVFrame *frame)
Called by encoders to get the next frame for encoding.
Definition: encode.c:204
AVPacket
This structure stores compressed data.
Definition: packet.h:565
AVCodecContext::priv_data
void * priv_data
Definition: avcodec.h:466
next_encoder_index
static int64_t next_encoder_index
Definition: amfenc.c:76
AV_PIX_FMT_RGBAF16
#define AV_PIX_FMT_RGBAF16
Definition: pixfmt.h:624
av_freep
#define av_freep(p)
Definition: tableprint_vlc.h:35
AVCodecContext::width
int width
picture width / height.
Definition: avcodec.h:600
imgutils.h
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
av_fifo_freep2
void av_fifo_freep2(AVFifo **f)
Free an AVFifo and reset pointer to NULL.
Definition: fifo.c:286
pkt
static AVPacket * pkt
Definition: demux_decode.c:55
FF_QP2LAMBDA
#define FF_QP2LAMBDA
factor to convert from H.263 QP to lambda
Definition: avutil.h:226
hwcontext_d3d11va.h
AV_FIFO_FLAG_AUTO_GROW
#define AV_FIFO_FLAG_AUTO_GROW
Automatically resize the FIFO on writes, so that the data fits.
Definition: fifo.h:63
w32dlfcn.h
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376