FFmpeg
 All Data Structures Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros Groups Pages
framesync.h
Go to the documentation of this file.
1 /*
2  * Copyright (c) 2013 Nicolas George
3  *
4  * This file is part of FFmpeg.
5  *
6  * FFmpeg is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU Lesser General Public License
8  * as published by the Free Software Foundation; either
9  * version 2.1 of the License, or (at your option) any later version.
10  *
11  * FFmpeg is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14  * GNU Lesser General Public License for more details.
15  *
16  * You should have received a copy of the GNU Lesser General Public License
17  * along with FFmpeg; if not, write to the Free Software Foundation, Inc.,
18  * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
19  */
20 
21 #ifndef AVFILTER_FRAMESYNC_H
22 #define AVFILTER_FRAMESYNC_H
23 
24 #include "bufferqueue.h"
25 
26 enum EOFAction {
30 };
31 
32 /*
33  * TODO
34  * Export convenient options.
35  */
36 
37 /**
38  * This API is intended as a helper for filters that have several video
39  * input and need to combine them somehow. If the inputs have different or
40  * variable frame rate, getting the input frames to match requires a rather
41  * complex logic and a few user-tunable options.
42  *
43  * In this API, when a set of synchronized input frames is ready to be
44  * procesed is called a frame event. Frame event can be generated in
45  * response to input frames on any or all inputs and the handling of
46  * situations where some stream extend beyond the beginning or the end of
47  * others can be configured.
48  *
49  * The basic working of this API is the following: set the on_event
50  * callback, then call ff_framesync_activate() from the filter's activate
51  * callback.
52  */
53 
54 /**
55  * Stream extrapolation mode
56  *
57  * Describe how the frames of a stream are extrapolated before the first one
58  * and after EOF to keep sync with possibly longer other streams.
59  */
61 
62  /**
63  * Completely stop all streams with this one.
64  */
66 
67  /**
68  * Ignore this stream and continue processing the other ones.
69  */
71 
72  /**
73  * Extend the frame to infinity.
74  */
76 };
77 
78 /**
79  * Input stream structure
80  */
81 typedef struct FFFrameSyncIn {
82 
83  /**
84  * Extrapolation mode for timestamps before the first frame
85  */
87 
88  /**
89  * Extrapolation mode for timestamps after the last frame
90  */
92 
93  /**
94  * Time base for the incoming frames
95  */
97 
98  /**
99  * Current frame, may be NULL before the first one or after EOF
100  */
102 
103  /**
104  * Next frame, for internal use
105  */
107 
108  /**
109  * PTS of the current frame
110  */
111  int64_t pts;
112 
113  /**
114  * PTS of the next frame, for internal use
115  */
116  int64_t pts_next;
117 
118  /**
119  * Boolean flagging the next frame, for internal use
120  */
122 
123  /**
124  * State: before first, in stream or after EOF, for internal use
125  */
127 
128  /**
129  * Synchronization level: frames on input at the highest sync level will
130  * generate output frame events.
131  *
132  * For example, if inputs #0 and #1 have sync level 2 and input #2 has
133  * sync level 1, then a frame on either input #0 or #1 will generate a
134  * frame event, but not a frame on input #2 until both inputs #0 and #1
135  * have reached EOF.
136  *
137  * If sync is 0, no frame event will be generated.
138  */
139  unsigned sync;
140 
141 } FFFrameSyncIn;
142 
143 /**
144  * Frame sync structure.
145  */
146 typedef struct FFFrameSync {
147  const AVClass *class;
148 
149  /**
150  * Parent filter context.
151  */
153 
154  /**
155  * Number of input streams
156  */
157  unsigned nb_in;
158 
159  /**
160  * Time base for the output events
161  */
163 
164  /**
165  * Timestamp of the current event
166  */
167  int64_t pts;
168 
169  /**
170  * Callback called when a frame event is ready
171  */
172  int (*on_event)(struct FFFrameSync *fs);
173 
174  /**
175  * Opaque pointer, not used by the API
176  */
177  void *opaque;
178 
179  /**
180  * Index of the input that requires a request
181  */
182  unsigned in_request;
183 
184  /**
185  * Synchronization level: only inputs with the same sync level are sync
186  * sources.
187  */
188  unsigned sync_level;
189 
190  /**
191  * Flag indicating that a frame event is ready
192  */
194 
195  /**
196  * Flag indicating that output has reached EOF.
197  */
199 
200  /**
201  * Pointer to array of inputs.
202  */
204 
208 
209 } FFFrameSync;
210 
211 /**
212  * Get the class for the framesync object.
213  */
214 const AVClass *framesync_get_class(void);
215 
216 /**
217  * Pre-initialize a frame sync structure.
218  *
219  * It sets the class pointer and inits the options to their default values.
220  * The entire structure is expected to be already set to 0.
221  * This step is optional, but necessary to use the options.
222  */
224 
225 /**
226  * Initialize a frame sync structure.
227  *
228  * The entire structure is expected to be already set to 0 or preinited.
229  *
230  * @param fs frame sync structure to initialize
231  * @param parent parent AVFilterContext object
232  * @param nb_in number of inputs
233  * @return >= 0 for success or a negative error code
234  */
236 
237 /**
238  * Configure a frame sync structure.
239  *
240  * Must be called after all options are set but before all use.
241  *
242  * @return >= 0 for success or a negative error code
243  */
245 
246 /**
247  * Free all memory currently allocated.
248  */
250 
251 /**
252  * Get the current frame in an input.
253  *
254  * @param fs frame sync structure
255  * @param in index of the input
256  * @param rframe used to return the current frame (or NULL)
257  * @param get if not zero, the calling code needs to get ownership of
258  * the returned frame; the current frame will either be
259  * duplicated or removed from the framesync structure
260  */
261 int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe,
262  unsigned get);
263 
264 /**
265  * Examine the frames in the filter's input and try to produce output.
266  *
267  * This function can be the complete implementation of the activate
268  * method of a filter using framesync.
269  */
271 
272 /**
273  * Initialize a frame sync structure for dualinput.
274  *
275  * Compared to generic framesync, dualinput assumes the first input is the
276  * main one and the filtering is performed on it. The first input will be
277  * the only one with sync set and generic timeline support will just pass it
278  * unchanged when disabled.
279  *
280  * Equivalent to ff_framesync_init(fs, parent, 2) then setting the time
281  * base, sync and ext modes on the inputs.
282  */
284 
285 /**
286  * @param f0 used to return the main frame
287  * @param f1 used to return the second frame, or NULL if disabled
288  * @return >=0 for success or AVERROR code
289  * @note The frame returned in f0 belongs to the caller (get = 1 in
290  * ff_framesync_get_frame()) while the frame returned in f1 is still owned
291  * by the framesync structure.
292  */
294 
295 /**
296  * Same as ff_framesync_dualinput_get(), but make sure that f0 is writable.
297  */
299 
300 #define FRAMESYNC_DEFINE_CLASS(name, context, field) \
301 static int name##_framesync_preinit(AVFilterContext *ctx) { \
302  context *s = ctx->priv; \
303  ff_framesync_preinit(&s->field); \
304  return 0; \
305 } \
306 static const AVClass *name##_child_class_next(const AVClass *prev) { \
307  return prev ? NULL : framesync_get_class(); \
308 } \
309 static void *name##_child_next(void *obj, void *prev) { \
310  context *s = obj; \
311  s->fs.class = framesync_get_class(); /* FIXME */ \
312  return prev ? NULL : &s->field; \
313 } \
314 static const AVClass name##_class = { \
315  .class_name = #name, \
316  .item_name = av_default_item_name, \
317  .option = name##_options, \
318  .version = LIBAVUTIL_VERSION_INT, \
319  .category = AV_CLASS_CATEGORY_FILTER, \
320  .child_class_next = name##_child_class_next, \
321  .child_next = name##_child_next, \
322 }
323 
324 #endif /* AVFILTER_FRAMESYNC_H */
int ff_framesync_init_dualinput(FFFrameSync *fs, AVFilterContext *parent)
Initialize a frame sync structure for dualinput.
Definition: framesync.c:361
This structure describes decoded (raw) audio or video data.
Definition: frame.h:218
int(* on_event)(struct FFFrameSync *fs)
Callback called when a frame event is ready.
Definition: framesync.h:172
EOFAction
Definition: framesync.h:26
int64_t pts
Timestamp of the current event.
Definition: framesync.h:167
enum FFFrameSyncExtMode before
Extrapolation mode for timestamps before the first frame.
Definition: framesync.h:86
int ff_framesync_activate(FFFrameSync *fs)
Examine the frames in the filter's input and try to produce output.
Definition: framesync.c:344
AVFilterContext * parent
Parent filter context.
Definition: framesync.h:152
uint8_t have_next
Boolean flagging the next frame, for internal use.
Definition: framesync.h:121
uint8_t
unsigned sync_level
Synchronization level: only inputs with the same sync level are sync sources.
Definition: framesync.h:188
FFFrameSyncIn * in
Pointer to array of inputs.
Definition: framesync.h:203
enum FFFrameSyncExtMode after
Extrapolation mode for timestamps after the last frame.
Definition: framesync.h:91
Input stream structure.
Definition: framesync.h:81
AVFrame * frame_next
Next frame, for internal use.
Definition: framesync.h:106
Frame sync structure.
Definition: framesync.h:146
int ff_framesync_dualinput_get_writable(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Same as ff_framesync_dualinput_get(), but make sure that f0 is writable.
Definition: framesync.c:399
int opt_shortest
Definition: framesync.h:206
AVRational time_base
Time base for the incoming frames.
Definition: framesync.h:96
int opt_repeatlast
Definition: framesync.h:205
uint8_t eof
Flag indicating that output has reached EOF.
Definition: framesync.h:198
unsigned in_request
Index of the input that requires a request.
Definition: framesync.h:182
int ff_framesync_init(FFFrameSync *fs, AVFilterContext *parent, unsigned nb_in)
Initialize a frame sync structure.
Definition: framesync.c:77
void ff_framesync_preinit(FFFrameSync *fs)
Pre-initialize a frame sync structure.
Definition: framesync.c:69
FFFrameSyncExtMode
This API is intended as a helper for filters that have several video input and need to combine them s...
Definition: framesync.h:60
AVRational time_base
Time base for the output events.
Definition: framesync.h:162
int ff_framesync_get_frame(FFFrameSync *fs, unsigned in, AVFrame **rframe, unsigned get)
Get the current frame in an input.
Definition: framesync.c:256
int ff_framesync_configure(FFFrameSync *fs)
Configure a frame sync structure.
Definition: framesync.c:117
void * opaque
Opaque pointer, not used by the API.
Definition: framesync.h:177
Extend the frame to infinity.
Definition: framesync.h:75
uint8_t pi<< 24) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0f/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_U8, uint8_t,(*(constuint8_t *) pi-0x80)*(1.0/(1<< 7))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S16, int16_t,(*(constint16_t *) pi >>8)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0f/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S16, int16_t,*(constint16_t *) pi *(1.0/(1<< 15))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_S32, int32_t,(*(constint32_t *) pi >>24)+0x80) CONV_FUNC_GROUP(AV_SAMPLE_FMT_FLT, float, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0f/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_DBL, double, AV_SAMPLE_FMT_S32, int32_t,*(constint32_t *) pi *(1.0/(1U<< 31))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_FLT, float, av_clip_uint8(lrintf(*(constfloat *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_FLT, float, av_clip_int16(lrintf(*(constfloat *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_FLT, float, av_clipl_int32(llrintf(*(constfloat *) pi *(1U<< 31)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_U8, uint8_t, AV_SAMPLE_FMT_DBL, double, av_clip_uint8(lrint(*(constdouble *) pi *(1<< 7))+0x80)) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S16, int16_t, AV_SAMPLE_FMT_DBL, double, av_clip_int16(lrint(*(constdouble *) pi *(1<< 15)))) CONV_FUNC_GROUP(AV_SAMPLE_FMT_S32, int32_t, AV_SAMPLE_FMT_DBL, double, av_clipl_int32(llrint(*(constdouble *) pi *(1U<< 31))))#defineSET_CONV_FUNC_GROUP(ofmt, ifmt) staticvoidset_generic_function(AudioConvert *ac){}voidff_audio_convert_free(AudioConvert **ac){if(!*ac) return;ff_dither_free(&(*ac) ->dc);av_freep(ac);}AudioConvert *ff_audio_convert_alloc(AVAudioResampleContext *avr, enumAVSampleFormatout_fmt, enumAVSampleFormatin_fmt, intchannels, intsample_rate, intapply_map){AudioConvert *ac;intin_planar, out_planar;ac=av_mallocz(sizeof(*ac));if(!ac) returnNULL;ac->avr=avr;ac->out_fmt=out_fmt;ac->in_fmt=in_fmt;ac->channels=channels;ac->apply_map=apply_map;if(avr->dither_method!=AV_RESAMPLE_DITHER_NONE &&av_get_packed_sample_fmt(out_fmt)==AV_SAMPLE_FMT_S16 &&av_get_bytes_per_sample(in_fmt)>2){ac->dc=ff_dither_alloc(avr, out_fmt, in_fmt, channels, sample_rate, apply_map);if(!ac->dc){av_free(ac);returnNULL;}returnac;}in_planar=ff_sample_fmt_is_planar(in_fmt, channels);out_planar=ff_sample_fmt_is_planar(out_fmt, channels);if(in_planar==out_planar){ac->func_type=CONV_FUNC_TYPE_FLAT;ac->planes=in_planar?ac->channels:1;}elseif(in_planar) ac->func_type=CONV_FUNC_TYPE_INTERLEAVE;elseac->func_type=CONV_FUNC_TYPE_DEINTERLEAVE;set_generic_function(ac);if(ARCH_AARCH64) ff_audio_convert_init_aarch64(ac);if(ARCH_ARM) ff_audio_convert_init_arm(ac);if(ARCH_X86) ff_audio_convert_init_x86(ac);returnac;}intff_audio_convert(AudioConvert *ac, AudioData *out, AudioData *in){intuse_generic=1;intlen=in->nb_samples;intp;if(ac->dc){av_log(ac->avr, AV_LOG_TRACE,"%dsamples-audio_convert:%sto%s(dithered)\n", len, av_get_sample_fmt_name(ac->in_fmt), av_get_sample_fmt_name(ac->out_fmt));returnff_convert_dither(ac-> in
int ff_framesync_dualinput_get(FFFrameSync *fs, AVFrame **f0, AVFrame **f1)
Definition: framesync.c:379
uint8_t state
State: before first, in stream or after EOF, for internal use.
Definition: framesync.h:126
unsigned sync
Synchronization level: frames on input at the highest sync level will generate output frame events...
Definition: framesync.h:139
Describe the class of an AVClass context structure.
Definition: log.h:67
Rational number (pair of numerator and denominator).
Definition: rational.h:58
Ignore this stream and continue processing the other ones.
Definition: framesync.h:70
unsigned nb_in
Number of input streams.
Definition: framesync.h:157
AVFrame * frame
Current frame, may be NULL before the first one or after EOF.
Definition: framesync.h:101
uint8_t frame_ready
Flag indicating that a frame event is ready.
Definition: framesync.h:193
int opt_eof_action
Definition: framesync.h:207
void ff_framesync_uninit(FFFrameSync *fs)
Free all memory currently allocated.
Definition: framesync.c:293
const AVClass * framesync_get_class(void)
Get the class for the framesync object.
Definition: framesync.c:64
int
int64_t pts
PTS of the current frame.
Definition: framesync.h:111
Completely stop all streams with this one.
Definition: framesync.h:65
An instance of a filter.
Definition: avfilter.h:338
int64_t pts_next
PTS of the next frame, for internal use.
Definition: framesync.h:116