FFmpeg
vf_deinterlace_d3d12.c
Go to the documentation of this file.
1 /*
2  * D3D12VA deinterlacing filter
3  *
4  * Copyright (c) 2026 Advanced Micro Devices, Inc.
5  *
6  * This file is part of FFmpeg.
7  *
8  * FFmpeg is free software; you can redistribute it and/or
9  * modify it under the terms of the GNU Lesser General Public
10  * License as published by the Free Software Foundation; either
11  * version 2.1 of the License, or (at your option) any later version.
12  *
13  * FFmpeg is distributed in the hope that it will be useful,
14  * but WITHOUT ANY WARRANTY; without even the implied warranty of
15  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16  * Lesser General Public License for more details.
17  *
18  * You should have received a copy of the GNU Lesser General Public
19  * License along with FFmpeg; if not, write to the Free Software
20  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21  */
22 
23 #define COBJMACROS
24 
25 #include "libavutil/opt.h"
26 #include "libavutil/pixdesc.h"
27 
28 #include "libavutil/hwcontext.h"
30 
31 #include "filters.h"
32 #include "video.h"
33 
34 #define MAX_REFERENCES 8
35 
36 /**
37  * Deinterlace mode enumeration
38  * Maps to D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG values
39  */
41  DEINT_D3D12_MODE_DEFAULT = 0, // Use best available mode
42  DEINT_D3D12_MODE_BOB = 1, // Bob deinterlacing (simple field interpolation)
43  DEINT_D3D12_MODE_CUSTOM = 2, // Driver-defined advanced deinterlacing
44 };
45 
46 typedef struct DeinterlaceD3D12Context {
47  const AVClass *classCtx;
48 
49  /* Filter options */
50  int mode; // Deinterlace mode (default, bob, custom)
51  int field_rate; // Output field rate (1 = frame rate, 2 = field rate)
52  int auto_enable; // Only deinterlace interlaced frames
53 
54  /* D3D12 objects */
55  ID3D12Device *device;
56  ID3D12VideoDevice *video_device;
57  ID3D12VideoProcessor *video_processor;
58  ID3D12CommandQueue *command_queue;
59  ID3D12VideoProcessCommandList *command_list;
60  ID3D12CommandAllocator *command_allocator;
61 
62  /* Synchronization */
63  ID3D12Fence *fence;
64  UINT64 fence_value;
65  HANDLE fence_event;
66 
67  /* Buffer references */
70 
71  /* Dimensions and formats */
72  int width, height;
73  DXGI_FORMAT input_format;
74 
75  /* Color space and frame rate */
76  DXGI_COLOR_SPACE_TYPE input_colorspace;
78 
79  /* Video processor capabilities */
80  D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT process_support;
81  D3D12_VIDEO_PROCESS_DEINTERLACE_FLAGS supported_deint_flags;
82 
83  /* Frame queue for temporal references */
87 
88  /* State tracking */
89  int eof;
96 
97  /* Processor configured flag */
100 
102 {
103  DeinterlaceD3D12Context *s = ctx->priv;
104  s->fence_value = 1;
105  s->processor_configured = 0;
106  return 0;
107 }
108 
110 {
111  UINT64 fence_value;
112  HRESULT hr;
113 
114  /* Wait for all GPU operations to complete before releasing resources */
115  if (s->command_queue && s->fence && s->fence_event) {
116  fence_value = s->fence_value;
117  hr = ID3D12CommandQueue_Signal(s->command_queue, s->fence, fence_value);
118  if (SUCCEEDED(hr)) {
119  UINT64 completed = ID3D12Fence_GetCompletedValue(s->fence);
120  if (completed < fence_value) {
121  hr = ID3D12Fence_SetEventOnCompletion(s->fence, fence_value, s->fence_event);
122  if (SUCCEEDED(hr)) {
123  WaitForSingleObject(s->fence_event, INFINITE);
124  }
125  }
126  }
127  }
128 
129  if (s->fence_event) {
130  CloseHandle(s->fence_event);
131  s->fence_event = NULL;
132  }
133 
134  if (s->fence) {
135  ID3D12Fence_Release(s->fence);
136  s->fence = NULL;
137  }
138 
139  if (s->command_list) {
140  ID3D12VideoProcessCommandList_Release(s->command_list);
141  s->command_list = NULL;
142  }
143 
144  if (s->command_allocator) {
145  ID3D12CommandAllocator_Release(s->command_allocator);
146  s->command_allocator = NULL;
147  }
148 
149  if (s->video_processor) {
150  ID3D12VideoProcessor_Release(s->video_processor);
151  s->video_processor = NULL;
152  }
153 
154  if (s->video_device) {
155  ID3D12VideoDevice_Release(s->video_device);
156  s->video_device = NULL;
157  }
158 
159  if (s->command_queue) {
160  ID3D12CommandQueue_Release(s->command_queue);
161  s->command_queue = NULL;
162  }
163 }
164 
166 {
167  for (int i = 0; i < s->queue_count; i++) {
168  av_frame_free(&s->frame_queue[i]);
169  }
170  s->queue_count = 0;
171 }
172 
173 static DXGI_COLOR_SPACE_TYPE get_dxgi_colorspace(enum AVColorSpace colorspace,
175  int is_10bit)
176 {
177  /* Map FFmpeg color space to DXGI color space */
178  if (is_10bit) {
179  /* 10-bit formats (P010) */
180  if (colorspace == AVCOL_SPC_BT2020_NCL || colorspace == AVCOL_SPC_BT2020_CL) {
181  if (trc == AVCOL_TRC_SMPTE2084) {
182  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G2084_LEFT_P2020; // HDR10
183  } else if (trc == AVCOL_TRC_ARIB_STD_B67) {
184  return DXGI_COLOR_SPACE_YCBCR_STUDIO_GHLG_TOPLEFT_P2020; // HLG
185  } else {
186  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020;
187  }
188  } else {
189  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709; // Rec.709 10-bit
190  }
191  } else {
192  /* 8-bit formats (NV12) */
193  if (colorspace == AVCOL_SPC_BT2020_NCL || colorspace == AVCOL_SPC_BT2020_CL) {
194  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P2020;
195  } else if (colorspace == AVCOL_SPC_BT470BG || colorspace == AVCOL_SPC_SMPTE170M) {
196  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P601;
197  } else {
198  return DXGI_COLOR_SPACE_YCBCR_STUDIO_G22_LEFT_P709; // Default to Rec.709
199  }
200  }
201 }
202 
204 {
205  AVRational framerate = {0, 0};
206 
207  if (in->duration > 0 && inlink->time_base.num > 0 && inlink->time_base.den > 0) {
208  av_reduce(&framerate.num, &framerate.den,
209  inlink->time_base.den, in->duration * inlink->time_base.num,
210  INT_MAX);
211  } else if (inlink->time_base.num > 0 && inlink->time_base.den > 0) {
212  framerate.num = inlink->time_base.den;
213  framerate.den = inlink->time_base.num;
214  } else {
215  framerate.num = 30;
216  framerate.den = 1;
217  av_log(ctx, AV_LOG_WARNING, "Input framerate not determinable, defaulting to 30fps\n");
218  }
219 
220  return framerate;
221 }
222 
223 static D3D12_VIDEO_PROCESS_DEINTERLACE_FLAGS get_deint_mode(DeinterlaceD3D12Context *s,
225 {
226  D3D12_VIDEO_PROCESS_DEINTERLACE_FLAGS mode_flag;
227 
228  switch (s->mode) {
230  mode_flag = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB;
231  break;
233  mode_flag = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_CUSTOM;
234  break;
236  default:
237  /* Select best available mode */
238  if (s->supported_deint_flags & D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_CUSTOM) {
239  mode_flag = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_CUSTOM;
240  av_log(ctx, AV_LOG_VERBOSE, "Using custom (driver-defined) deinterlacing\n");
241  } else if (s->supported_deint_flags & D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB) {
242  mode_flag = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB;
243  av_log(ctx, AV_LOG_VERBOSE, "Using bob deinterlacing\n");
244  } else {
245  mode_flag = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB;
246  av_log(ctx, AV_LOG_WARNING, "No deinterlacing modes reported, trying bob\n");
247  }
248  break;
249  }
250 
251  /* Verify requested mode is supported */
252  if (mode_flag != D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB &&
253  !(s->supported_deint_flags & mode_flag)) {
254  av_log(ctx, AV_LOG_WARNING, "Requested deinterlace mode not supported, falling back to bob\n");
255  mode_flag = D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB;
256  }
257 
258  return mode_flag;
259 }
260 
263  AVFrame *in)
264 {
265  HRESULT hr;
266  AVHWDeviceContext *hwctx = (AVHWDeviceContext *)s->hw_device_ctx->data;
267  AVD3D12VADeviceContext *d3d12_hwctx = (AVD3D12VADeviceContext *)hwctx->hwctx;
268  D3D12_VIDEO_PROCESS_DEINTERLACE_FLAGS deint_mode;
269  D3D12_VIDEO_FIELD_TYPE field_type;
270 
271  s->device = d3d12_hwctx->device;
272 
273  av_log(ctx, AV_LOG_VERBOSE, "Configuring D3D12 deinterlace processor: %dx%d\n",
274  s->width, s->height);
275 
276  hr = ID3D12Device_QueryInterface(s->device, &IID_ID3D12VideoDevice, (void **)&s->video_device);
277  if (FAILED(hr)) {
278  av_log(ctx, AV_LOG_ERROR, "Failed to get D3D12 video device interface: HRESULT 0x%lX\n", hr);
279  return AVERROR_EXTERNAL;
280  }
281 
282  D3D12_COMMAND_QUEUE_DESC queue_desc = {
283  .Type = D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
284  .Priority = D3D12_COMMAND_QUEUE_PRIORITY_NORMAL,
285  .Flags = D3D12_COMMAND_QUEUE_FLAG_NONE,
286  .NodeMask = 0
287  };
288 
289  hr = ID3D12Device_CreateCommandQueue(s->device, &queue_desc, &IID_ID3D12CommandQueue, (void **)&s->command_queue);
290  if (FAILED(hr)) {
291  av_log(ctx, AV_LOG_ERROR, "Failed to create command queue: HRESULT 0x%lX\n", hr);
292  return AVERROR_EXTERNAL;
293  }
294 
295  /* Determine field type from input frame */
297  field_type = D3D12_VIDEO_FIELD_TYPE_INTERLACED_TOP_FIELD_FIRST;
298  av_log(ctx, AV_LOG_VERBOSE, "Input field order: Top Field First\n");
299  } else {
300  field_type = D3D12_VIDEO_FIELD_TYPE_INTERLACED_BOTTOM_FIELD_FIRST;
301  av_log(ctx, AV_LOG_VERBOSE, "Input field order: Bottom Field First\n");
302  }
303 
304  /* Check deinterlacing support */
305  s->process_support.NodeIndex = 0;
306  s->process_support.InputSample.Format.Format = s->input_format;
307  s->process_support.InputSample.Format.ColorSpace = s->input_colorspace;
308  s->process_support.InputSample.Width = s->width;
309  s->process_support.InputSample.Height = s->height;
310  s->process_support.InputFrameRate.Numerator = s->input_framerate.num;
311  s->process_support.InputFrameRate.Denominator = s->input_framerate.den;
312  s->process_support.InputFieldType = field_type;
313  s->process_support.InputStereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
314 
315  s->process_support.OutputFormat.Format = s->input_format;
316  s->process_support.OutputFormat.ColorSpace = s->input_colorspace;
317  s->process_support.OutputFrameRate.Numerator = s->input_framerate.num * s->field_rate;
318  s->process_support.OutputFrameRate.Denominator = s->input_framerate.den;
319  s->process_support.OutputStereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE;
320 
321  hr = ID3D12VideoDevice_CheckFeatureSupport(
322  s->video_device,
323  D3D12_FEATURE_VIDEO_PROCESS_SUPPORT,
324  &s->process_support,
325  sizeof(s->process_support)
326  );
327 
328  if (FAILED(hr)) {
329  av_log(ctx, AV_LOG_ERROR, "Video process feature check failed: HRESULT 0x%lX\n", hr);
330  return AVERROR_EXTERNAL;
331  }
332 
333  if (!(s->process_support.SupportFlags & D3D12_VIDEO_PROCESS_SUPPORT_FLAG_SUPPORTED)) {
334  av_log(ctx, AV_LOG_ERROR, "Video process configuration not supported by hardware\n");
335  return AVERROR(ENOSYS);
336  }
337 
338  /* Store supported deinterlace flags */
339  s->supported_deint_flags = s->process_support.DeinterlaceSupport;
340 
341  av_log(ctx, AV_LOG_VERBOSE, "Deinterlace support flags: 0x%X\n", s->supported_deint_flags);
342 
343  if (!(s->supported_deint_flags & (D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_BOB |
344  D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_CUSTOM))) {
345  av_log(ctx, AV_LOG_ERROR, "No deinterlacing modes supported by hardware\n");
346  return AVERROR(ENOSYS);
347  }
348 
349  deint_mode = get_deint_mode(s, ctx);
350 
351  /* Query reference frame requirements from hardware */
352 #if CONFIG_D3D12_VIDEO_PROCESS_REFERENCE_INFO
353  D3D12_FEATURE_DATA_VIDEO_PROCESS_REFERENCE_INFO ref_info = {
354  .NodeIndex = 0,
355  .DeinterlaceMode = deint_mode,
356  .Filters = D3D12_VIDEO_PROCESS_FILTER_FLAG_NONE,
357  .FeatureSupport = D3D12_VIDEO_PROCESS_FEATURE_FLAG_NONE,
358  .InputFrameRate = { s->input_framerate.num, s->input_framerate.den },
359  .OutputFrameRate = { s->input_framerate.num * s->field_rate, s->input_framerate.den },
360  .EnableAutoProcessing = FALSE,
361  };
362 
363  hr = ID3D12VideoDevice_CheckFeatureSupport(
364  s->video_device,
365  D3D12_FEATURE_VIDEO_PROCESS_REFERENCE_INFO,
366  &ref_info,
367  sizeof(ref_info)
368  );
369 
370  if (SUCCEEDED(hr)) {
371  s->num_past_frames = ref_info.PastFrames;
372  s->num_future_frames = ref_info.FutureFrames;
374  "Reference frames from hardware: past=%d, future=%d\n",
375  s->num_past_frames, s->num_future_frames);
376  } else {
378  "Failed to query reference info (HRESULT 0x%lX), using defaults\n", hr);
379  s->num_past_frames = (deint_mode == D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_CUSTOM) ? 1 : 0;
380  s->num_future_frames = 0;
381  }
382 #else
384  "Reference info query not available in SDK, using defaults\n");
385  s->num_past_frames = (deint_mode == D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG_CUSTOM) ? 1 : 0;
386  s->num_future_frames = 0;
387 #endif
388 
389  /* May need 1 extra slot for PTS calculation.*/
390  s->extra_delay_for_timestamps = (s->field_rate == 2 && s->num_future_frames == 0) ? 1 : 0;
391 
392  s->queue_depth = s->num_past_frames + s->num_future_frames + s->extra_delay_for_timestamps + 1;
393 
394  if (s->queue_depth > MAX_REFERENCES) {
395  av_log(ctx, AV_LOG_ERROR, "Required queue depth (%d) exceeds maximum (%d)\n",
396  s->queue_depth, MAX_REFERENCES);
397  return AVERROR(ENOSYS);
398  }
399 
400  s->current_frame_index = s->num_past_frames;
401 
402  av_log(ctx, AV_LOG_VERBOSE, "Queue depth: %d (past: %d, future: %d, extra: %d)\n",
403  s->queue_depth, s->num_past_frames, s->num_future_frames,
404  s->extra_delay_for_timestamps);
405 
406  D3D12_VIDEO_PROCESS_OUTPUT_STREAM_DESC processor_output_desc = {
407  .Format = s->input_format,
408  .ColorSpace = s->input_colorspace,
409  .AlphaFillMode = D3D12_VIDEO_PROCESS_ALPHA_FILL_MODE_OPAQUE,
410  .AlphaFillModeSourceStreamIndex = 0,
411  .BackgroundColor = { 0.0f, 0.0f, 0.0f, 1.0f },
412  .FrameRate = { s->input_framerate.num * s->field_rate, s->input_framerate.den },
413  .EnableStereo = FALSE,
414  };
415 
416  D3D12_VIDEO_PROCESS_INPUT_STREAM_DESC processor_input_desc = {
417  .Format = s->input_format,
418  .ColorSpace = s->input_colorspace,
419  .SourceAspectRatio = { s->width, s->height },
420  .DestinationAspectRatio = { s->width, s->height },
421  .FrameRate = { s->input_framerate.num, s->input_framerate.den },
422  .StereoFormat = D3D12_VIDEO_FRAME_STEREO_FORMAT_NONE,
423  .FieldType = field_type,
424  .DeinterlaceMode = deint_mode,
425  .EnableOrientation = FALSE,
426  .FilterFlags = D3D12_VIDEO_PROCESS_FILTER_FLAG_NONE,
427  .SourceSizeRange = {
428  .MaxWidth = s->width,
429  .MaxHeight = s->height,
430  .MinWidth = s->width,
431  .MinHeight = s->height
432  },
433  .DestinationSizeRange = {
434  .MaxWidth = s->width,
435  .MaxHeight = s->height,
436  .MinWidth = s->width,
437  .MinHeight = s->height
438  },
439  .EnableAlphaBlending = FALSE,
440  .LumaKey = { .Enable = FALSE, .Lower = 0.0f, .Upper = 1.0f },
441  .NumPastFrames = s->num_past_frames,
442  .NumFutureFrames = s->num_future_frames,
443  .EnableAutoProcessing = FALSE,
444  };
445 
446  hr = ID3D12VideoDevice_CreateVideoProcessor(
447  s->video_device,
448  0,
449  &processor_output_desc,
450  1,
451  &processor_input_desc,
452  &IID_ID3D12VideoProcessor,
453  (void **)&s->video_processor
454  );
455 
456  if (FAILED(hr)) {
457  av_log(ctx, AV_LOG_ERROR, "Failed to create video processor: HRESULT 0x%lX\n", hr);
458  return AVERROR_EXTERNAL;
459  }
460 
461  hr = ID3D12Device_CreateCommandAllocator(
462  s->device,
463  D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
464  &IID_ID3D12CommandAllocator,
465  (void **)&s->command_allocator
466  );
467 
468  if (FAILED(hr)) {
469  av_log(ctx, AV_LOG_ERROR, "Failed to create command allocator: HRESULT 0x%lX\n", hr);
470  return AVERROR_EXTERNAL;
471  }
472 
473  hr = ID3D12Device_CreateCommandList(
474  s->device,
475  0,
476  D3D12_COMMAND_LIST_TYPE_VIDEO_PROCESS,
477  s->command_allocator,
478  NULL,
479  &IID_ID3D12VideoProcessCommandList,
480  (void **)&s->command_list
481  );
482 
483  if (FAILED(hr)) {
484  av_log(ctx, AV_LOG_ERROR, "Failed to create command list: HRESULT 0x%lX\n", hr);
485  return AVERROR_EXTERNAL;
486  }
487 
488  ID3D12VideoProcessCommandList_Close(s->command_list);
489 
490  hr = ID3D12Device_CreateFence(s->device, 0, D3D12_FENCE_FLAG_NONE, &IID_ID3D12Fence, (void **)&s->fence);
491  if (FAILED(hr)) {
492  av_log(ctx, AV_LOG_ERROR, "Failed to create fence: HRESULT 0x%lX\n", hr);
493  return AVERROR_EXTERNAL;
494  }
495 
496  s->fence_value = 1;
497 
498  s->fence_event = CreateEvent(NULL, FALSE, FALSE, NULL);
499  if (!s->fence_event) {
500  av_log(ctx, AV_LOG_ERROR, "Failed to create fence event\n");
501  return AVERROR_EXTERNAL;
502  }
503 
504  s->processor_configured = 1;
505  av_log(ctx, AV_LOG_VERBOSE, "D3D12 deinterlace processor successfully configured\n");
506  return 0;
507 }
508 
509 static void add_resource_barrier(D3D12_RESOURCE_BARRIER *barriers, int *count,
510  ID3D12Resource *resource,
511  D3D12_RESOURCE_STATES before,
512  D3D12_RESOURCE_STATES after)
513 {
514  barriers[(*count)++] = (D3D12_RESOURCE_BARRIER) {
515  .Type = D3D12_RESOURCE_BARRIER_TYPE_TRANSITION,
516  .Flags = D3D12_RESOURCE_BARRIER_FLAG_NONE,
517  .Transition = {
518  .pResource = resource,
519  .Subresource = D3D12_RESOURCE_BARRIER_ALL_SUBRESOURCES,
520  .StateBefore = before,
521  .StateAfter = after
522  }
523  };
524 }
525 
527  AVFilterLink *outlink,
528  AVFrame *input_frame,
529  int field,
530  int queue_idx)
531 {
532  DeinterlaceD3D12Context *s = ctx->priv;
533  AVFrame *out = NULL;
534  int ret = 0;
535  int i;
536  HRESULT hr;
537 
538  AVD3D12VAFrame *in_d3d12_frame = (AVD3D12VAFrame *)input_frame->data[0];
539 
540  out = av_frame_alloc();
541  if (!out) {
542  av_log(ctx, AV_LOG_ERROR, "Failed to allocate output frame\n");
543  return AVERROR(ENOMEM);
544  }
545 
546  ret = av_hwframe_get_buffer(s->hw_frames_ctx_out, out, 0);
547  if (ret < 0) {
548  av_log(ctx, AV_LOG_ERROR, "Failed to get output frame from pool\n");
549  av_frame_free(&out);
550  return ret;
551  }
552 
553  AVD3D12VAFrame *out_d3d12_frame = (AVD3D12VAFrame *)out->data[0];
554 
555  ID3D12Resource *input_resource = in_d3d12_frame->texture;
556  ID3D12Resource *output_resource = out_d3d12_frame->texture;
557 
558  /* Build past/future reference frame arrays from queue */
559  ID3D12Resource *past_textures[MAX_REFERENCES];
560  UINT past_subresources[MAX_REFERENCES];
561  int actual_past = 0;
562 
563  ID3D12Resource *future_textures[MAX_REFERENCES];
564  UINT future_subresources[MAX_REFERENCES];
565  int actual_future = 0;
566 
567  if (queue_idx >= 0) {
568  /* Collect past reference textures from the queue, walking
569  * backwards from the current frame position. */
570  for (i = 0; i < s->num_past_frames && (queue_idx - 1 - i) >= 0; i++) {
571  AVFrame *past_frame = s->frame_queue[queue_idx - 1 - i];
572  if (past_frame) {
573  AVD3D12VAFrame *past_d3d12 = (AVD3D12VAFrame *)past_frame->data[0];
574  past_textures[actual_past] = past_d3d12->texture;
575  past_subresources[actual_past] = 0;
576  actual_past++;
577  }
578  }
579 
580  /* Collect future reference textures from the queue, walking
581  * forwards from the current frame position. */
582  for (i = 0; i < s->num_future_frames && (queue_idx + 1 + i) < s->queue_count; i++) {
583  AVFrame *future_frame = s->frame_queue[queue_idx + 1 + i];
584  if (future_frame) {
585  AVD3D12VAFrame *future_d3d12 = (AVD3D12VAFrame *)future_frame->data[0];
586  future_textures[actual_future] = future_d3d12->texture;
587  future_subresources[actual_future] = 0;
588  actual_future++;
589  }
590  }
591 
593  "Reference frames: past=%d/%d, future=%d/%d, queue_idx=%d\n",
594  actual_past, s->num_past_frames, actual_future, s->num_future_frames,
595  queue_idx);
596  }
597 
598  /* Wait for input frame's fence before accessing it */
599  if (in_d3d12_frame->sync_ctx.fence && in_d3d12_frame->sync_ctx.fence_value > 0) {
600  UINT64 completed = ID3D12Fence_GetCompletedValue(in_d3d12_frame->sync_ctx.fence);
601  if (completed < in_d3d12_frame->sync_ctx.fence_value) {
602  hr = ID3D12CommandQueue_Wait(s->command_queue, in_d3d12_frame->sync_ctx.fence,
603  in_d3d12_frame->sync_ctx.fence_value);
604  if (FAILED(hr)) {
605  av_log(ctx, AV_LOG_ERROR, "Failed to wait for input fence: HRESULT 0x%lX\n", hr);
607  goto fail;
608  }
609  }
610  }
611 
612  /* Wait for past and future reference frame fences before accessing them*/
613  for (i = 0; i < actual_past; i++) {
614  AVFrame *past_frame = s->frame_queue[queue_idx - 1 - i];
615  AVD3D12VAFrame *past_d3d12 = (AVD3D12VAFrame *)past_frame->data[0];
616  if (past_d3d12->sync_ctx.fence && past_d3d12->sync_ctx.fence_value > 0) {
617  hr = ID3D12CommandQueue_Wait(s->command_queue, past_d3d12->sync_ctx.fence,
618  past_d3d12->sync_ctx.fence_value);
619  if (FAILED(hr)) {
620  av_log(ctx, AV_LOG_ERROR, "Failed to wait for past frame fence: HRESULT 0x%lX\n", hr);
622  goto fail;
623  }
624  }
625  }
626 
627  for (i = 0; i < actual_future; i++) {
628  AVFrame *future_frame = s->frame_queue[queue_idx + 1 + i];
629  AVD3D12VAFrame *future_d3d12 = (AVD3D12VAFrame *)future_frame->data[0];
630  if (future_d3d12->sync_ctx.fence && future_d3d12->sync_ctx.fence_value > 0) {
631  hr = ID3D12CommandQueue_Wait(s->command_queue, future_d3d12->sync_ctx.fence,
632  future_d3d12->sync_ctx.fence_value);
633  if (FAILED(hr)) {
634  av_log(ctx, AV_LOG_ERROR, "Failed to wait for future frame fence: HRESULT 0x%lX\n", hr);
636  goto fail;
637  }
638  }
639  }
640 
641  hr = ID3D12CommandAllocator_Reset(s->command_allocator);
642  if (FAILED(hr)) {
643  av_log(ctx, AV_LOG_ERROR, "Failed to reset command allocator: HRESULT 0x%lX\n", hr);
645  goto fail;
646  }
647 
648  hr = ID3D12VideoProcessCommandList_Reset(s->command_list, s->command_allocator);
649  if (FAILED(hr)) {
650  av_log(ctx, AV_LOG_ERROR, "Failed to reset command list: HRESULT 0x%lX\n", hr);
652  goto fail;
653  }
654 
655  /* Resource barriers: input + output + past refs + future refs */
656  D3D12_RESOURCE_BARRIER barriers[2 + MAX_REFERENCES * 2];
657  int num_barriers = 0;
658 
659  add_resource_barrier(barriers, &num_barriers, input_resource,
660  D3D12_RESOURCE_STATE_COMMON, D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ);
661  add_resource_barrier(barriers, &num_barriers, output_resource,
662  D3D12_RESOURCE_STATE_COMMON, D3D12_RESOURCE_STATE_VIDEO_PROCESS_WRITE);
663 
664  for (i = 0; i < actual_past; i++)
665  add_resource_barrier(barriers, &num_barriers, past_textures[i],
666  D3D12_RESOURCE_STATE_COMMON, D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ);
667 
668  for (i = 0; i < actual_future; i++)
669  add_resource_barrier(barriers, &num_barriers, future_textures[i],
670  D3D12_RESOURCE_STATE_COMMON, D3D12_RESOURCE_STATE_VIDEO_PROCESS_READ);
671 
672  ID3D12VideoProcessCommandList_ResourceBarrier(s->command_list, num_barriers, barriers);
673 
674  /* Setup input stream arguments */
675  D3D12_VIDEO_PROCESS_INPUT_STREAM_ARGUMENTS input_args = {0};
676 
677  input_args.InputStream[0].pTexture2D = input_resource;
678 
679  /* Populate reference set with past/future frames */
680  input_args.InputStream[0].ReferenceSet.NumPastFrames = actual_past;
681  input_args.InputStream[0].ReferenceSet.ppPastFrames = actual_past > 0 ? past_textures : NULL;
682  input_args.InputStream[0].ReferenceSet.pPastSubresources = actual_past > 0 ? past_subresources : NULL;
683  input_args.InputStream[0].ReferenceSet.NumFutureFrames = actual_future;
684  input_args.InputStream[0].ReferenceSet.ppFutureFrames = actual_future > 0 ? future_textures : NULL;
685  input_args.InputStream[0].ReferenceSet.pFutureSubresources = actual_future > 0 ? future_subresources : NULL;
686 
687  input_args.Transform.SourceRectangle.right = s->width;
688  input_args.Transform.SourceRectangle.bottom = s->height;
689  input_args.Transform.DestinationRectangle.right = s->width;
690  input_args.Transform.DestinationRectangle.bottom = s->height;
691  input_args.Transform.Orientation = D3D12_VIDEO_PROCESS_ORIENTATION_DEFAULT;
692 
693  input_args.Flags = D3D12_VIDEO_PROCESS_INPUT_STREAM_FLAG_NONE;
694 
695  input_args.RateInfo.OutputIndex = field;
696  input_args.RateInfo.InputFrameOrField = 0;
697 
698  memset(input_args.FilterLevels, 0, sizeof(input_args.FilterLevels));
699 
700  input_args.AlphaBlending.Enable = FALSE;
701  input_args.AlphaBlending.Alpha = 1.0f;
702 
703  /* Setup output stream arguments */
704  D3D12_VIDEO_PROCESS_OUTPUT_STREAM_ARGUMENTS output_args = {0};
705 
706  output_args.OutputStream[0].pTexture2D = output_resource;
707  output_args.TargetRectangle.right = s->width;
708  output_args.TargetRectangle.bottom = s->height;
709 
710  ID3D12VideoProcessCommandList_ProcessFrames(
711  s->command_list,
712  s->video_processor,
713  &output_args,
714  1,
715  &input_args
716  );
717 
718  /* Reverse barriers */
719  for (i = 0; i < num_barriers; i++) {
720  FFSWAP(D3D12_RESOURCE_STATES, barriers[i].Transition.StateBefore, barriers[i].Transition.StateAfter);
721  }
722  ID3D12VideoProcessCommandList_ResourceBarrier(s->command_list, num_barriers, barriers);
723 
724  hr = ID3D12VideoProcessCommandList_Close(s->command_list);
725  if (FAILED(hr)) {
726  av_log(ctx, AV_LOG_ERROR, "Failed to close command list: HRESULT 0x%lX\n", hr);
728  goto fail;
729  }
730 
731  ID3D12CommandList *cmd_lists[] = { (ID3D12CommandList *)s->command_list };
732  ID3D12CommandQueue_ExecuteCommandLists(s->command_queue, 1, cmd_lists);
733 
734  hr = ID3D12CommandQueue_Signal(s->command_queue, s->fence, s->fence_value);
735  if (FAILED(hr)) {
736  av_log(ctx, AV_LOG_ERROR, "Failed to signal fence: HRESULT 0x%lX\n", hr);
738  goto fail;
739  }
740 
741  out_d3d12_frame->sync_ctx.fence = s->fence;
742  out_d3d12_frame->sync_ctx.fence_value = s->fence_value;
743  ID3D12Fence_AddRef(s->fence);
744 
745  s->fence_value++;
746 
747  ret = av_frame_copy_props(out, input_frame);
748  if (ret < 0) {
749  av_log(ctx, AV_LOG_ERROR, "Failed to copy frame properties\n");
750  goto fail;
751  }
752 
753  out->width = s->width;
754  out->height = s->height;
755  out->format = AV_PIX_FMT_D3D12;
756  out->flags &= ~AV_FRAME_FLAG_INTERLACED;
757 
758  /* Calculate output PTS for field rate output */
759  if (s->field_rate == 2 && queue_idx >= 0) {
760  AVFrame *next_frame = (queue_idx + 1 < s->queue_count) ?
761  s->frame_queue[queue_idx + 1] : NULL;
762 
763  if (field == 0) {
764  out->pts = 2 * input_frame->pts;
765  } else if (s->eof || !next_frame) {
766  out->pts = 3 * input_frame->pts - s->prev_pts;
767  } else {
768  out->pts = input_frame->pts + next_frame->pts;
769  }
770  }
771 
772  av_log(ctx, AV_LOG_DEBUG, "Deinterlace output: %dx%d, pts=%"PRId64", field=%d\n",
773  out->width, out->height, out->pts, field);
774 
775  return ff_filter_frame(outlink, out);
776 
777 fail:
778  av_frame_free(&out);
779  return ret;
780 }
781 
783 {
784  AVFilterContext *ctx = inlink->dst;
785  DeinterlaceD3D12Context *s = ctx->priv;
786  AVFilterLink *outlink = ctx->outputs[0];
787  int ret = 0;
788  int field;
789  AVFrame *input_frame;
790 
791  /* Phase 4: EOF flush - process remaining frames in queue.
792  * Two sub-cases:
793  * a) Short stream: queue never filled, process all buffered frames
794  * b) Normal stream: process frames after current_frame_index
795  * (future refs and extra delay frames not yet output) */
796  if (!in) {
797  if (s->eof && s->queue_count > 0) {
798  int flush_idx;
799 
800  if (!s->initial_fill_done) {
801  /* Short stream: queue never reached full depth.
802  * Process ALL buffered frames with whatever references are available. */
803  flush_idx = 0;
805  "EOF flush (short stream): processing all %d buffered frames\n",
806  s->queue_count);
807  } else {
808  /* Normal stream: process remaining frames after the last
809  * normally-processed one (future refs, extra delay). */
810  flush_idx = s->current_frame_index + 1;
812  "EOF flush: processing frames %d..%d in queue (count=%d)\n",
813  flush_idx, s->queue_count - 1, s->queue_count);
814  }
815 
816  while (flush_idx < s->queue_count) {
817  input_frame = s->frame_queue[flush_idx];
818  if (input_frame) {
819  for (field = 0; field < s->field_rate; field++) {
820  ret = deint_d3d12_process_frame(ctx, outlink, input_frame, field, flush_idx);
821  if (ret < 0)
822  return ret;
823  }
824  s->prev_pts = input_frame->pts;
825  }
826  flush_idx++;
827  }
828  return AVERROR_EOF;
829  }
830  return AVERROR_EOF;
831  }
832 
833  if (!in->hw_frames_ctx) {
834  av_log(ctx, AV_LOG_ERROR, "No hardware frames context in input frame\n");
835  av_frame_free(&in);
836  return AVERROR(EINVAL);
837  }
838 
839  av_log(ctx, AV_LOG_DEBUG, "Input frame: %dx%d, pts=%"PRId64", interlaced=%d\n",
840  in->width, in->height, in->pts,
841  !!(in->flags & AV_FRAME_FLAG_INTERLACED));
842 
843  /* Initialize processor on first frame */
844  if (!s->processor_configured) {
846  AVD3D12VAFramesContext *input_hwctx = (AVD3D12VAFramesContext *)frames_ctx->hwctx;
847 
848  s->width = frames_ctx->width;
849  s->height = frames_ctx->height;
850  s->input_format = input_hwctx->format;
851 
852  if (s->input_format == DXGI_FORMAT_UNKNOWN) {
853  switch (frames_ctx->sw_format) {
854  case AV_PIX_FMT_NV12:
855  s->input_format = DXGI_FORMAT_NV12;
856  break;
857  case AV_PIX_FMT_P010:
858  s->input_format = DXGI_FORMAT_P010;
859  break;
860  default:
861  av_log(ctx, AV_LOG_ERROR, "Unsupported input format: %s\n",
862  av_get_pix_fmt_name(frames_ctx->sw_format));
863  av_frame_free(&in);
864  return AVERROR(EINVAL);
865  }
866  }
867 
868  int is_10bit = (s->input_format == DXGI_FORMAT_P010);
869  s->input_colorspace = get_dxgi_colorspace(in->colorspace, in->color_trc, is_10bit);
870  s->input_framerate = get_input_framerate(ctx, inlink, in);
871 
873  if (ret < 0) {
874  av_log(ctx, AV_LOG_ERROR, "Failed to configure processor\n");
875  av_frame_free(&in);
876  return ret;
877  }
878  }
879 
880  /* Auto mode: pass through progressive frames by processing them as-is */
881  if (s->auto_enable && !(in->flags & AV_FRAME_FLAG_INTERLACED)) {
882  av_log(ctx, AV_LOG_DEBUG, "Progressive frame, processing as pass-through\n");
883  ret = deint_d3d12_process_frame(ctx, outlink, in, 0, -1);
884  av_frame_free(&in);
885  return ret;
886  }
887 
888  /* Queue management and frame processing.
889  *
890  * For bob mode, the hardware typically needs no reference frames
891  * (past=0, future=0), so queue_depth=1 and every input frame is
892  * processed immediately -- simple frame-in, frame-out.
893  *
894  * For custom (driver-defined) mode, the hardware uses temporal
895  * reference frames for higher-quality motion-adaptive deinterlacing.
896  * The queue possible holds past, current, and future reference frames:
897  * The queue is managed in four phases:
898  * 1. Filling: buffer frames until queue reaches queue_depth
899  * 2. Initial fill: queue just became full, process ALL buffered
900  * frames (0..current_frame_index) with degraded references
901  * for the earliest frames
902  * 3. Steady state: slide queue forward by one position per input,
903  * process the frame at current_frame_index with full references
904  * 4. EOF flush: process remaining frames after current_frame_index,
905  * or all buffered frames if the queue never filled (short stream)
906  *
907  * When queue_depth=1 (bob mode), phases 1, 2, and 4 are effectively
908  * skipped, and only the steady-state path executes.
909  */
910 
911  if (s->queue_count < s->queue_depth) {
912  /* Phase 1: Filling - buffer incoming frames until we have enough
913  * past and future references to begin processing. */
914  s->frame_queue[s->queue_count++] = in;
915  if (s->queue_count < s->queue_depth)
916  return 0;
917 
918  /* Phase 2: Initial fill complete - process all frames from the
919  * start of the queue through current_frame_index. The first
920  * frames will have fewer past references (graceful degradation),
921  * but the D3D12 video processor handles partial reference sets. */
922  for (int i = 0; i <= s->current_frame_index; i++) {
923  input_frame = s->frame_queue[i];
924  if (!input_frame)
925  continue;
926  for (field = 0; field < s->field_rate; field++) {
927  ret = deint_d3d12_process_frame(ctx, outlink, input_frame, field, i);
928  if (ret < 0)
929  return ret;
930  }
931  s->prev_pts = input_frame->pts;
932  }
933  s->initial_fill_done = 1;
934  return ret;
935  }
936 
937  /* Phase 3: Steady state - slide the queue forward by removing the
938  * oldest frame and appending the new one at the end. The frame at
939  * current_frame_index always has full past and future references. */
940  av_frame_free(&s->frame_queue[0]);
941  for (int i = 0; i + 1 < s->queue_count; i++)
942  s->frame_queue[i] = s->frame_queue[i + 1];
943  s->frame_queue[s->queue_count - 1] = in;
944 
945  input_frame = s->frame_queue[s->current_frame_index];
946  if (!input_frame)
947  return 0;
948 
949  for (field = 0; field < s->field_rate; field++) {
950  ret = deint_d3d12_process_frame(ctx, outlink, input_frame, field, s->current_frame_index);
951  if (ret < 0)
952  break;
953  }
954 
955  s->prev_pts = input_frame->pts;
956 
957  return ret;
958 }
959 
961 {
962  AVFilterContext *ctx = link->src;
963  DeinterlaceD3D12Context *s = ctx->priv;
964  int ret;
965 
966  if (s->eof)
967  return AVERROR_EOF;
968 
969  ret = ff_request_frame(ctx->inputs[0]);
970  if (ret == AVERROR_EOF && s->queue_count > 0) {
971  s->eof = 1;
972  /* Flush remaining frames in queue (future frames, extra delay,
973  * or short stream where queue never fully filled) */
974  return deint_d3d12_filter_frame(ctx->inputs[0], NULL);
975  }
976 
977  return ret;
978 }
979 
981 {
982  AVFilterContext *ctx = outlink->src;
983  DeinterlaceD3D12Context *s = ctx->priv;
984  AVFilterLink *inlink = ctx->inputs[0];
986  FilterLink *outl = ff_filter_link(outlink);
987  int ret;
988 
991 
992  av_buffer_unref(&s->hw_frames_ctx_out);
993  av_buffer_unref(&s->hw_device_ctx);
994 
995  s->processor_configured = 0;
996 
997  outlink->w = inlink->w;
998  outlink->h = inlink->h;
999  s->width = inlink->w;
1000  s->height = inlink->h;
1001 
1002  /* Adjust time base and frame rate for field rate output */
1003  outlink->time_base = av_mul_q(inlink->time_base, (AVRational){ 1, s->field_rate });
1004  outl->frame_rate = av_mul_q(inl->frame_rate, (AVRational){ s->field_rate, 1 });
1005 
1006  if (!inl->hw_frames_ctx) {
1007  av_log(ctx, AV_LOG_ERROR, "No hw_frames_ctx available on input link\n");
1008  return AVERROR(EINVAL);
1009  }
1010 
1011  AVHWFramesContext *in_frames_ctx = (AVHWFramesContext *)inl->hw_frames_ctx->data;
1012  s->hw_device_ctx = av_buffer_ref(in_frames_ctx->device_ref);
1013  if (!s->hw_device_ctx) {
1014  av_log(ctx, AV_LOG_ERROR, "Failed to reference device context\n");
1015  return AVERROR(ENOMEM);
1016  }
1017 
1018  s->hw_frames_ctx_out = av_hwframe_ctx_alloc(s->hw_device_ctx);
1019  if (!s->hw_frames_ctx_out)
1020  return AVERROR(ENOMEM);
1021 
1022  AVHWFramesContext *frames_ctx = (AVHWFramesContext *)s->hw_frames_ctx_out->data;
1023 
1024  frames_ctx->format = AV_PIX_FMT_D3D12;
1025  frames_ctx->sw_format = in_frames_ctx->sw_format;
1026  frames_ctx->width = s->width;
1027  frames_ctx->height = s->height;
1028  frames_ctx->initial_pool_size = 10;
1029 
1030  if (ctx->extra_hw_frames > 0)
1031  frames_ctx->initial_pool_size += ctx->extra_hw_frames;
1032 
1033  AVD3D12VAFramesContext *frames_hwctx = frames_ctx->hwctx;
1034  AVD3D12VAFramesContext *in_frames_hwctx = in_frames_ctx->hwctx;
1035 
1036  frames_hwctx->format = in_frames_hwctx->format;
1037  frames_hwctx->resource_flags = D3D12_RESOURCE_FLAG_ALLOW_RENDER_TARGET;
1038  frames_hwctx->heap_flags = D3D12_HEAP_FLAG_NONE;
1039 
1040  ret = av_hwframe_ctx_init(s->hw_frames_ctx_out);
1041  if (ret < 0) {
1042  av_buffer_unref(&s->hw_frames_ctx_out);
1043  return ret;
1044  }
1045 
1046  outl->hw_frames_ctx = av_buffer_ref(s->hw_frames_ctx_out);
1047  if (!outl->hw_frames_ctx)
1048  return AVERROR(ENOMEM);
1049 
1050  av_log(ctx, AV_LOG_VERBOSE, "D3D12 deinterlace config: %dx%d, field_rate=%d\n",
1051  outlink->w, outlink->h, s->field_rate);
1052 
1053  return 0;
1054 }
1055 
1057 {
1058  DeinterlaceD3D12Context *s = ctx->priv;
1059 
1062 
1063  av_buffer_unref(&s->hw_frames_ctx_out);
1064  av_buffer_unref(&s->hw_device_ctx);
1065 }
1066 
1068  {
1069  .name = "default",
1070  .type = AVMEDIA_TYPE_VIDEO,
1071  .filter_frame = deint_d3d12_filter_frame,
1072  },
1073 };
1074 
1076  {
1077  .name = "default",
1078  .type = AVMEDIA_TYPE_VIDEO,
1079  .request_frame = deint_d3d12_request_frame,
1080  .config_props = deint_d3d12_config_output,
1081  },
1082 };
1083 
1084 #define OFFSET(x) offsetof(DeinterlaceD3D12Context, x)
1085 #define FLAGS (AV_OPT_FLAG_FILTERING_PARAM | AV_OPT_FLAG_VIDEO_PARAM)
1086 
1088  { "mode", "Deinterlacing mode",
1091  { "default", "Use best available deinterlacing mode",
1092  0, AV_OPT_TYPE_CONST, { .i64 = DEINT_D3D12_MODE_DEFAULT }, 0, 0, FLAGS, .unit = "mode" },
1093  { "bob", "Bob deinterlacing (simple field interpolation)",
1094  0, AV_OPT_TYPE_CONST, { .i64 = DEINT_D3D12_MODE_BOB }, 0, 0, FLAGS, .unit = "mode" },
1095  { "custom", "Driver-defined advanced deinterlacing",
1096  0, AV_OPT_TYPE_CONST, { .i64 = DEINT_D3D12_MODE_CUSTOM }, 0, 0, FLAGS, .unit = "mode" },
1097 
1098  { "rate", "Generate output at frame rate or field rate",
1099  OFFSET(field_rate), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 2, FLAGS, .unit = "rate" },
1100  { "frame", "Output at frame rate (one frame for each field-pair)",
1101  0, AV_OPT_TYPE_CONST, { .i64 = 1 }, 0, 0, FLAGS, .unit = "rate" },
1102  { "field", "Output at field rate (one frame for each field)",
1103  0, AV_OPT_TYPE_CONST, { .i64 = 2 }, 0, 0, FLAGS, .unit = "rate" },
1104 
1105  { "auto", "Only deinterlace interlaced frames, pass through progressive",
1106  OFFSET(auto_enable), AV_OPT_TYPE_BOOL, { .i64 = 0 }, 0, 1, FLAGS },
1107 
1108  { NULL }
1109 };
1110 
1111 AVFILTER_DEFINE_CLASS(deinterlace_d3d12);
1112 
1114  .p.name = "deinterlace_d3d12",
1115  .p.description = NULL_IF_CONFIG_SMALL("Deinterlacing using Direct3D12 Video Processor"),
1116  .priv_size = sizeof(DeinterlaceD3D12Context),
1117  .p.priv_class = &deinterlace_d3d12_class,
1118  .init = deint_d3d12_init,
1119  .uninit = deint_d3d12_uninit,
1123  .p.flags = AVFILTER_FLAG_HWDEVICE,
1124  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
1125 };
DeinterlaceD3D12Context::supported_deint_flags
D3D12_VIDEO_PROCESS_DEINTERLACE_FLAGS supported_deint_flags
Definition: vf_deinterlace_d3d12.c:81
DeinterlaceD3D12Context::command_queue
ID3D12CommandQueue * command_queue
Definition: vf_deinterlace_d3d12.c:58
AVFrame::color_trc
enum AVColorTransferCharacteristic color_trc
Definition: frame.h:682
deint_d3d12_request_frame
static int deint_d3d12_request_frame(AVFilterLink *link)
Definition: vf_deinterlace_d3d12.c:960
AVHWDeviceContext::hwctx
void * hwctx
The format-specific data, allocated and freed by libavutil along with this context.
Definition: hwcontext.h:88
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
DEINT_D3D12_MODE_DEFAULT
@ DEINT_D3D12_MODE_DEFAULT
Definition: vf_deinterlace_d3d12.c:41
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
AVColorTransferCharacteristic
AVColorTransferCharacteristic
Color Transfer Characteristic.
Definition: pixfmt.h:666
out
static FILE * out
Definition: movenc.c:55
DeinterlaceD3D12Context::width
int width
Definition: vf_deinterlace_d3d12.c:72
DEINT_D3D12_MODE_BOB
@ DEINT_D3D12_MODE_BOB
Definition: vf_deinterlace_d3d12.c:42
DeinterlaceD3D12Context::prev_pts
int64_t prev_pts
Definition: vf_deinterlace_d3d12.c:90
DeinterlaceD3D12Context::device
ID3D12Device * device
Definition: vf_deinterlace_d3d12.c:55
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVFrame::duration
int64_t duration
Duration of the frame, in the same units as pts.
Definition: frame.h:775
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
deint_d3d12_filter_frame
static int deint_d3d12_filter_frame(AVFilterLink *inlink, AVFrame *in)
Definition: vf_deinterlace_d3d12.c:782
DeinterlaceD3D12Context::hw_frames_ctx_out
AVBufferRef * hw_frames_ctx_out
Definition: vf_deinterlace_d3d12.c:69
int64_t
long long int64_t
Definition: coverity.c:34
inlink
The exact code depends on how similar the blocks are and how related they are to the and needs to apply these operations to the correct inlink or outlink if there are several Macros are available to factor that when no extra processing is inlink
Definition: filter_design.txt:212
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
AVFrame::colorspace
enum AVColorSpace colorspace
YUV colorspace type.
Definition: frame.h:689
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
FILTER_INPUTS
#define FILTER_INPUTS(array)
Definition: filters.h:264
mode
Definition: swscale.c:56
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
DeinterlaceD3D12Context::command_list
ID3D12VideoProcessCommandList * command_list
Definition: vf_deinterlace_d3d12.c:59
pixdesc.h
AVFrame::pts
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:529
AVFrame::width
int width
Definition: frame.h:499
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
AVOption
AVOption.
Definition: opt.h:429
ff_request_frame
int ff_request_frame(AVFilterLink *link)
Request an input frame from the filter at the other end of the link.
Definition: avfilter.c:483
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
DeinterlaceD3D12Context::fence_event
HANDLE fence_event
Definition: vf_deinterlace_d3d12.c:65
AVFrame::flags
int flags
Frame flags, a combination of AV_FRAME_FLAGS.
Definition: frame.h:671
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
FLAGS
#define FLAGS
Definition: vf_deinterlace_d3d12.c:1085
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
get_dxgi_colorspace
static DXGI_COLOR_SPACE_TYPE get_dxgi_colorspace(enum AVColorSpace colorspace, enum AVColorTransferCharacteristic trc, int is_10bit)
Definition: vf_deinterlace_d3d12.c:173
video.h
DeinterlaceD3D12Context::input_format
DXGI_FORMAT input_format
Definition: vf_deinterlace_d3d12.c:73
DeinterlaceD3D12Context::extra_delay_for_timestamps
int extra_delay_for_timestamps
Definition: vf_deinterlace_d3d12.c:94
AVCOL_SPC_BT2020_CL
@ AVCOL_SPC_BT2020_CL
ITU-R BT2020 constant luminance system.
Definition: pixfmt.h:712
deint_d3d12_process_frame
static int deint_d3d12_process_frame(AVFilterContext *ctx, AVFilterLink *outlink, AVFrame *input_frame, int field, int queue_idx)
Definition: vf_deinterlace_d3d12.c:526
DeinterlaceD3D12Context::current_frame_index
int current_frame_index
Definition: vf_deinterlace_d3d12.c:93
AVFrame::data
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:448
AV_FRAME_FLAG_TOP_FIELD_FIRST
#define AV_FRAME_FLAG_TOP_FIELD_FIRST
A flag to mark frames where the top field is displayed first if the content is interlaced.
Definition: frame.h:655
AVCOL_SPC_BT470BG
@ AVCOL_SPC_BT470BG
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
Definition: pixfmt.h:706
DeinterlaceD3D12Mode
DeinterlaceD3D12Mode
Deinterlace mode enumeration Maps to D3D12_VIDEO_PROCESS_DEINTERLACE_FLAG values.
Definition: vf_deinterlace_d3d12.c:40
fail
#define fail()
Definition: checkasm.h:219
get_input_framerate
static AVRational get_input_framerate(AVFilterContext *ctx, AVFilterLink *inlink, AVFrame *in)
Definition: vf_deinterlace_d3d12.c:203
av_reduce
int av_reduce(int *dst_num, int *dst_den, int64_t num, int64_t den, int64_t max)
Reduce a fraction.
Definition: rational.c:35
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:40
DeinterlaceD3D12Context::num_future_frames
int num_future_frames
Definition: vf_deinterlace_d3d12.c:92
DeinterlaceD3D12Context::video_processor
ID3D12VideoProcessor * video_processor
Definition: vf_deinterlace_d3d12.c:57
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
DeinterlaceD3D12Context::command_allocator
ID3D12CommandAllocator * command_allocator
Definition: vf_deinterlace_d3d12.c:60
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
DeinterlaceD3D12Context::mode
int mode
Definition: vf_deinterlace_d3d12.c:50
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
av_cold
#define av_cold
Definition: attributes.h:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
FFFilter
Definition: filters.h:267
DeinterlaceD3D12Context::queue_count
int queue_count
Definition: vf_deinterlace_d3d12.c:85
s
#define s(width, name)
Definition: cbs_vp9.c:198
AVCOL_SPC_SMPTE170M
@ AVCOL_SPC_SMPTE170M
also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
Definition: pixfmt.h:707
AVD3D12VAFrame::sync_ctx
AVD3D12VASyncContext sync_ctx
The sync context for the texture.
Definition: hwcontext_d3d12va.h:159
filters.h
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
DeinterlaceD3D12Context::classCtx
const AVClass * classCtx
Definition: vf_deinterlace_d3d12.c:47
get_deint_mode
static D3D12_VIDEO_PROCESS_DEINTERLACE_FLAGS get_deint_mode(DeinterlaceD3D12Context *s, AVFilterContext *ctx)
Definition: vf_deinterlace_d3d12.c:223
field
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this field
Definition: writing_filters.txt:78
OFFSET
#define OFFSET(x)
Definition: vf_deinterlace_d3d12.c:1084
AVD3D12VASyncContext::fence
ID3D12Fence * fence
D3D12 fence object.
Definition: hwcontext_d3d12va.h:108
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:265
DeinterlaceD3D12Context::processor_configured
int processor_configured
Definition: vf_deinterlace_d3d12.c:98
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
if
if(ret)
Definition: filter_design.txt:179
DeinterlaceD3D12Context::hw_device_ctx
AVBufferRef * hw_device_ctx
Definition: vf_deinterlace_d3d12.c:68
framerate
float framerate
Definition: av1_levels.c:29
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
NULL
#define NULL
Definition: coverity.c:32
ff_vf_deinterlace_d3d12
const FFFilter ff_vf_deinterlace_d3d12
Definition: vf_deinterlace_d3d12.c:1113
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_frame_copy_props
int av_frame_copy_props(AVFrame *dst, const AVFrame *src)
Copy only "metadata" fields from src to dst.
Definition: frame.c:599
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVD3D12VAFramesContext::heap_flags
D3D12_HEAP_FLAGS heap_flags
Options for working with heaps allocation when creating resources.
Definition: hwcontext_d3d12va.h:193
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
AVHWFramesContext::device_ref
AVBufferRef * device_ref
A reference to the parent AVHWDeviceContext.
Definition: hwcontext.h:129
DeinterlaceD3D12Context::eof
int eof
Definition: vf_deinterlace_d3d12.c:89
DeinterlaceD3D12Context::field_rate
int field_rate
Definition: vf_deinterlace_d3d12.c:51
DeinterlaceD3D12Context::fence_value
UINT64 fence_value
Definition: vf_deinterlace_d3d12.c:64
AVD3D12VAFramesContext
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_d3d12va.h:172
AV_PIX_FMT_D3D12
@ AV_PIX_FMT_D3D12
Hardware surfaces for Direct3D 12.
Definition: pixfmt.h:440
DeinterlaceD3D12Context::fence
ID3D12Fence * fence
Definition: vf_deinterlace_d3d12.c:63
AVD3D12VAFrame::texture
ID3D12Resource * texture
The texture in which the frame is located.
Definition: hwcontext_d3d12va.h:144
hwcontext_d3d12va.h
AVD3D12VAFramesContext::resource_flags
D3D12_RESOURCE_FLAGS resource_flags
Options for working with resources.
Definition: hwcontext_d3d12va.h:185
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:199
DeinterlaceD3D12Context::height
int height
Definition: vf_deinterlace_d3d12.c:72
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:208
DeinterlaceD3D12Context::auto_enable
int auto_enable
Definition: vf_deinterlace_d3d12.c:52
add_resource_barrier
static void add_resource_barrier(D3D12_RESOURCE_BARRIER *barriers, int *count, ID3D12Resource *resource, D3D12_RESOURCE_STATES before, D3D12_RESOURCE_STATES after)
Definition: vf_deinterlace_d3d12.c:509
AVCOL_TRC_SMPTE2084
@ AVCOL_TRC_SMPTE2084
SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems.
Definition: pixfmt.h:683
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
deint_d3d12_clear_queue
static void deint_d3d12_clear_queue(DeinterlaceD3D12Context *s)
Definition: vf_deinterlace_d3d12.c:165
i
#define i(width, name, range_min, range_max)
Definition: cbs_h264.c:63
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:188
DeinterlaceD3D12Context::video_device
ID3D12VideoDevice * video_device
Definition: vf_deinterlace_d3d12.c:56
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AVD3D12VAFrame
D3D12VA frame descriptor for pool allocation.
Definition: hwcontext_d3d12va.h:138
deint_d3d12_config_output
static int deint_d3d12_config_output(AVFilterLink *outlink)
Definition: vf_deinterlace_d3d12.c:980
AVD3D12VADeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_d3d12va.h:43
DeinterlaceD3D12Context::num_past_frames
int num_past_frames
Definition: vf_deinterlace_d3d12.c:91
AVCOL_SPC_BT2020_NCL
@ AVCOL_SPC_BT2020_NCL
ITU-R BT2020 non-constant luminance system.
Definition: pixfmt.h:711
DeinterlaceD3D12Context::queue_depth
int queue_depth
Definition: vf_deinterlace_d3d12.c:84
AVColorSpace
AVColorSpace
YUV colorspace type.
Definition: pixfmt.h:700
MAX_REFERENCES
#define MAX_REFERENCES
Definition: vf_deinterlace_d3d12.c:34
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(deinterlace_d3d12)
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:46
DeinterlaceD3D12Context
Definition: vf_deinterlace_d3d12.c:46
AV_FRAME_FLAG_INTERLACED
#define AV_FRAME_FLAG_INTERLACED
A flag to mark frames whose content is interlaced.
Definition: frame.h:650
deint_d3d12_uninit
static av_cold void deint_d3d12_uninit(AVFilterContext *ctx)
Definition: vf_deinterlace_d3d12.c:1056
AVD3D12VAFramesContext::format
DXGI_FORMAT format
DXGI_FORMAT format.
Definition: hwcontext_d3d12va.h:177
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AV_PIX_FMT_NV12
@ AV_PIX_FMT_NV12
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:96
FFSWAP
#define FFSWAP(type, a, b)
Definition: macros.h:52
AVHWFramesContext::hwctx
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:153
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:724
AVFrame::height
int height
Definition: frame.h:499
AVCOL_TRC_ARIB_STD_B67
@ AVCOL_TRC_ARIB_STD_B67
ARIB STD-B67, known as "Hybrid log-gamma".
Definition: pixfmt.h:687
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
Windows::Graphics::DirectX::Direct3D11::p
IDirect3DDxgiInterfaceAccess _COM_Outptr_ void ** p
Definition: vsrc_gfxcapture_winrt.hpp:53
av_mul_q
AVRational av_mul_q(AVRational b, AVRational c)
Multiply two rationals.
Definition: rational.c:80
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
AV_PIX_FMT_P010
#define AV_PIX_FMT_P010
Definition: pixfmt.h:602
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:190
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:271
DeinterlaceD3D12Context::initial_fill_done
int initial_fill_done
Definition: vf_deinterlace_d3d12.c:95
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
DEINT_D3D12_MODE_CUSTOM
@ DEINT_D3D12_MODE_CUSTOM
Definition: vf_deinterlace_d3d12.c:43
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
deint_d3d12_inputs
static const AVFilterPad deint_d3d12_inputs[]
Definition: vf_deinterlace_d3d12.c:1067
WaitForSingleObject
#define WaitForSingleObject(a, b)
Definition: w32pthreads.h:64
hwcontext.h
DeinterlaceD3D12Context::input_colorspace
DXGI_COLOR_SPACE_TYPE input_colorspace
Definition: vf_deinterlace_d3d12.c:76
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
deint_d3d12_init
static av_cold int deint_d3d12_init(AVFilterContext *ctx)
Definition: vf_deinterlace_d3d12.c:101
AVD3D12VASyncContext::fence_value
uint64_t fence_value
The fence value used for sync.
Definition: hwcontext_d3d12va.h:119
DeinterlaceD3D12Context::frame_queue
AVFrame * frame_queue[MAX_REFERENCES]
Definition: vf_deinterlace_d3d12.c:86
av_hwframe_get_buffer
int av_hwframe_get_buffer(AVBufferRef *hwframe_ref, AVFrame *frame, int flags)
Allocate a new frame attached to the given AVHWFramesContext.
Definition: hwcontext.c:506
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:254
deint_d3d12_outputs
static const AVFilterPad deint_d3d12_outputs[]
Definition: vf_deinterlace_d3d12.c:1075
DeinterlaceD3D12Context::process_support
D3D12_FEATURE_DATA_VIDEO_PROCESS_SUPPORT process_support
Definition: vf_deinterlace_d3d12.c:80
av_get_pix_fmt_name
const char * av_get_pix_fmt_name(enum AVPixelFormat pix_fmt)
Return the short name for a pixel format, NULL in case pix_fmt is unknown.
Definition: pixdesc.c:3376
deinterlace_d3d12_options
static const AVOption deinterlace_d3d12_options[]
Definition: vf_deinterlace_d3d12.c:1087
deint_d3d12_configure_processor
static int deint_d3d12_configure_processor(DeinterlaceD3D12Context *s, AVFilterContext *ctx, AVFrame *in)
Definition: vf_deinterlace_d3d12.c:261
release_d3d12_resources
static void release_d3d12_resources(DeinterlaceD3D12Context *s)
Definition: vf_deinterlace_d3d12.c:109
DeinterlaceD3D12Context::input_framerate
AVRational input_framerate
Definition: vf_deinterlace_d3d12.c:77