FFmpeg
vsrc_amf.c
Go to the documentation of this file.
1 /*
2  * This file is part of FFmpeg.
3  *
4  * FFmpeg is free software; you can redistribute it and/or
5  * modify it under the terms of the GNU Lesser General Public
6  * License as published by the Free Software Foundation; either
7  * version 2.1 of the License, or (at your option) any later version.
8  *
9  * FFmpeg is distributed in the hope that it will be useful,
10  * but WITHOUT ANY WARRANTY; without even the implied warranty of
11  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12  * Lesser General Public License for more details.
13  *
14  * You should have received a copy of the GNU Lesser General Public
15  * License along with FFmpeg; if not, write to the Free Software
16  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17  */
18 
19 #include "config.h"
20 
21 #include "libavutil/pixdesc.h"
22 #include "libavutil/mem.h"
23 #include "libavutil/opt.h"
24 #include "libavutil/time.h"
25 #include "libavutil/avstring.h"
26 #include "libavutil/avassert.h"
27 #include "libavutil/hwcontext.h"
30 #include "compat/w32dlfcn.h"
31 #include "avfilter.h"
32 #include "filters.h"
33 #include "video.h"
34 
35 #include <AMF/core/Factory.h>
36 #include <AMF/core/Surface.h>
37 #include <AMF/components/ColorSpace.h>
38 #include <AMF/components/DisplayCapture.h>
39 
40 typedef struct AMFGrabContext {
42 
45  amf_bool duplicate_output;
47 
49 
50  AMFComponent *capture;
51  amf_bool eof;
52  AMF_SURFACE_FORMAT format;
53  void *winmmdll;
54  amf_uint32 timerPrecision;
56 
57 #define OFFSET(x) offsetof(AMFGrabContext, x)
58 #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
59 
60 static const AVOption amf_capture_options[] = {
61  { "monitor_index", "Index of display monitor to capture", OFFSET(monitor_index), AV_OPT_TYPE_INT, {.i64 = 0}, 0, 8, FLAGS },
62  { "framerate", "Capture framerate", OFFSET(framerate), AV_OPT_TYPE_VIDEO_RATE, {.str = "60"}, 0, INT_MAX, FLAGS },
63  { "duplicate_output", "Use display output duplication for screen capture", OFFSET(duplicate_output), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS },
64 
65  { "capture_mode", "Capture synchronization mode", OFFSET(capture_mode), AV_OPT_TYPE_INT, {.i64 = AMF_DISPLAYCAPTURE_MODE_KEEP_FRAMERATE}, 0, 2, FLAGS, "mode" },
66  { "keep_framerate", "Capture component maintains the frame rate", 0, AV_OPT_TYPE_CONST, {.i64 = AMF_DISPLAYCAPTURE_MODE_KEEP_FRAMERATE}, 0, 0, FLAGS, "mode" },
67  { "wait_for_present", "Capture component waits for flip (present) event", 0, AV_OPT_TYPE_CONST, {.i64 = AMF_DISPLAYCAPTURE_MODE_WAIT_FOR_PRESENT}, 0, 0, FLAGS, "mode" },
68  { "get_current", "Returns current visible surface immediately", 0, AV_OPT_TYPE_CONST, {.i64 = AMF_DISPLAYCAPTURE_MODE_GET_CURRENT_SURFACE}, 0, 0, FLAGS, "mode" },
69  { NULL }
70 };
71 
72 AVFILTER_DEFINE_CLASS(amf_capture);
73 
74 // need to increase precision for capture timing accuracy
75 #if defined (_WIN32)
76 
77 #include <timeapi.h>
78 
79 typedef WINMMAPI MMRESULT (WINAPI *timeBeginPeriod_fn)( UINT uPeriod);
80 typedef WINMMAPI MMRESULT (WINAPI *timeEndPeriod_fn)(UINT uPeriod);
81 
82 static void amf_increase_timer_precision(AMFGrabContext *ctx)
83 {
84  ctx->winmmdll = dlopen("Winmm.dll", 0);
85  if(ctx->winmmdll){
86  timeBeginPeriod_fn fn = (timeBeginPeriod_fn)dlsym(ctx->winmmdll, "timeBeginPeriod");
87  if(fn){
88  ctx->timerPrecision = 1;
89  while (fn(ctx->timerPrecision) == TIMERR_NOCANDO)
90  {
91  ++ctx->timerPrecision;
92  }
93  }
94  }
95 }
96 static void amf_restore_timer_precision(AMFGrabContext *ctx)
97 {
98  if(ctx->winmmdll){
99  timeEndPeriod_fn fn = (timeEndPeriod_fn)dlsym(ctx->winmmdll, "timeEndPeriod");
100  if(fn)
101  fn(ctx->timerPrecision);
102  dlclose(ctx->winmmdll);
103  ctx->winmmdll = 0;
104  }
105 }
106 #endif
107 
108 static void amf_release_surface(void *opaque, uint8_t *data)
109 {
110  if(!!data){
111  AMFInterface *surface = (AMFInterface*)(data);
112  if (surface && surface->pVtbl)
113  surface->pVtbl->Release(surface);
114  }
115 }
116 
117 static av_cold void amf_uninit(AVFilterContext *avctx)
118 {
119  AMFGrabContext *ctx = avctx->priv;
120 
121  if (ctx->capture) {
122  ctx->capture->pVtbl->Drain(ctx->capture);
123  ctx->capture->pVtbl->Terminate(ctx->capture);
124  ctx->capture->pVtbl->Release(ctx->capture);
125  ctx->capture = NULL;
126  }
127 
128  av_buffer_unref(&ctx->device_ctx_ref);
129 #if defined (_WIN32)
130  amf_restore_timer_precision(ctx);
131 #endif
132 }
133 
134 static av_cold int amf_init(AVFilterContext *avctx)
135 {
136  AMFGrabContext *ctx = avctx->priv;
137 #if defined (_WIN32)
138  amf_increase_timer_precision(ctx);
139 #endif
140  ctx->eof = 0;
141  av_log(avctx, AV_LOG_VERBOSE, "Initializing AMF screen capture\n");
142 
143  return 0;
144 }
145 
146 static int amf_init_vsrc(AVFilterLink *outlink)
147 {
148  FilterLink *link = ff_filter_link(outlink);
149  AVFilterContext *avctx = outlink->src;
150  AMFGrabContext *ctx = avctx->priv;
151  AVHWDeviceContext *hw_device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
152  AVAMFDeviceContext *amf_device_ctx = (AVAMFDeviceContext*)hw_device_ctx->hwctx;
153  AMF_RESULT res;
154  AMFRate framerate;
155  AMFVariantStruct var = {0};
156  AMFSize resolution = {0};
157 
158  res = amf_device_ctx->factory->pVtbl->CreateComponent(amf_device_ctx->factory,
159  amf_device_ctx->context,
160  AMFDisplayCapture,
161  &ctx->capture);
162  AMF_RETURN_IF_FALSE(ctx, res == AMF_OK, AVERROR_FILTER_NOT_FOUND, "CreateComponent(%ls) failed with error %d\n", AMFDisplayCapture, res);
163 
164  AMF_ASSIGN_PROPERTY_INT64(res, ctx->capture, AMF_DISPLAYCAPTURE_MONITOR_INDEX, ctx->monitor_index);
165  if (res != AMF_OK) {
166  av_log(avctx, AV_LOG_ERROR, "Failed to set monitor index: %d\n", res);
167  return AVERROR_EXTERNAL;
168  }
169 
170  if (ctx->framerate.num > 0 && ctx->framerate.den > 0)
171  framerate = AMFConstructRate(ctx->framerate.num, ctx->framerate.den);
172  else
173  framerate = AMFConstructRate(30, 1);
174 
175  AMF_ASSIGN_PROPERTY_BOOL(res, ctx->capture, AMF_DISPLAYCAPTURE_DUPLICATEOUTPUT, ctx->duplicate_output);
176  if (res != AMF_OK) {
177  av_log(avctx, AV_LOG_ERROR, "Failed to set AMF_DISPLAYCAPTURE_DUPLICATEOUTPUT: %d\n", res);
178  return AVERROR_EXTERNAL;
179  }
180 
181  AMF_ASSIGN_PROPERTY_RATE(res, ctx->capture, AMF_DISPLAYCAPTURE_FRAMERATE, framerate);
182  if (res != AMF_OK) {
183  av_log(avctx, AV_LOG_ERROR, "Failed to set framerate: %d\n", res);
184  return AVERROR_EXTERNAL;
185  }
186 
187  AMF_ASSIGN_PROPERTY_INT64(res, ctx->capture, AMF_DISPLAYCAPTURE_MODE, ctx->capture_mode);
188  if (res != AMF_OK) {
189  av_log(avctx, AV_LOG_WARNING, "Failed to set capture mode: %d\n", res);
190  }
191 
192  res = ctx->capture->pVtbl->Init(ctx->capture, AMF_SURFACE_UNKNOWN, 0, 0);
193  if (res != AMF_OK) {
194  av_log(avctx, AV_LOG_ERROR, "Failed to initialize capture component: %d\n", res);
195  return AVERROR_EXTERNAL;
196  }
197 
198  res = ctx->capture->pVtbl->GetProperty(ctx->capture, AMF_DISPLAYCAPTURE_RESOLUTION, &var);
199  if (res == AMF_OK && var.type == AMF_VARIANT_SIZE) {
200  resolution = var.sizeValue;
201  outlink->w = resolution.width;
202  outlink->h = resolution.height;
203 
204  av_log(avctx, AV_LOG_INFO, "Capture resolution: %dx%d\n",
205  outlink->w, outlink->h);
206  } else {
207  av_log(avctx, AV_LOG_ERROR, "Failed to get capture resolution from AMF\n");
208  AMFVariantClear(&var);
209  return AVERROR_EXTERNAL;
210  }
211 
212  res = ctx->capture->pVtbl->GetProperty(ctx->capture, AMF_DISPLAYCAPTURE_FORMAT, &var);
213  if (res == AMF_OK && var.type == AMF_VARIANT_INT64) {
214  ctx->format = (AMF_SURFACE_FORMAT)var.int64Value;
215  av_log(avctx, AV_LOG_INFO, "Capture format: %d\n", ctx->format);
216  } else {
217  ctx->format = AMF_SURFACE_BGRA;
218  av_log(avctx, AV_LOG_WARNING, "Failed to get format, assuming BGRA\n");
219  }
220 
221 
222  outlink->time_base = (AVRational){framerate.den, framerate.num};
223  link->frame_rate = (AVRational){framerate.num, framerate.den};
224  AMFVariantClear(&var);
225  return 0;
226 }
227 
228 static int amf_config_props(AVFilterLink *outlink)
229 {
230  FilterLink *link = ff_filter_link(outlink);
231  AVFilterContext *avctx = outlink->src;
232  AMFGrabContext *ctx = avctx->priv;
233  AVHWDeviceContext *device_ctx;
234  int ret;
235  int pool_size = 1;
236 
237  av_buffer_unref(&ctx->device_ctx_ref);
238 
239  if (avctx->hw_device_ctx) {
240  device_ctx = (AVHWDeviceContext*)avctx->hw_device_ctx->data;
241  if (device_ctx->type == AV_HWDEVICE_TYPE_AMF)
242  {
243  ctx->device_ctx_ref = av_buffer_ref(avctx->hw_device_ctx);
244  } else {
246  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to create derived AMF device context: %s\n", av_err2str(ret));
247  }
248  } else {
249  ret = av_hwdevice_ctx_create(&ctx->device_ctx_ref, AV_HWDEVICE_TYPE_AMF, NULL, NULL, 0);
250  AMF_GOTO_FAIL_IF_FALSE(avctx, ret == 0, ret, "Failed to create hardware device context (AMF) : %s\n", av_err2str(ret));
251  }
252  if ((ret = amf_init_vsrc(outlink)) == 0) {
253  AVHWDeviceContext *device_ctx = (AVHWDeviceContext*)ctx->device_ctx_ref->data;
254  if (device_ctx->type == AV_HWDEVICE_TYPE_AMF) {
255  AVHWFramesContext *frames_ctx;
256  link->hw_frames_ctx = av_hwframe_ctx_alloc(ctx->device_ctx_ref);
257  AMF_GOTO_FAIL_IF_FALSE(avctx, !!link->hw_frames_ctx, AVERROR(ENOMEM), "av_hwframe_ctx_alloc failed\n");
258 
259  frames_ctx = (AVHWFramesContext*)link->hw_frames_ctx->data;
260  frames_ctx->format = AV_PIX_FMT_AMF_SURFACE;
261  frames_ctx->sw_format = av_amf_to_av_format(ctx->format);
262  frames_ctx->initial_pool_size = pool_size;
263  if (avctx->extra_hw_frames > 0)
264  frames_ctx->initial_pool_size += avctx->extra_hw_frames;
265 
266  frames_ctx->width = outlink->w;
267  frames_ctx->height = outlink->h;
268 
270  if (ret < 0) {
271  av_log(avctx, AV_LOG_ERROR, "Failed to initialize hardware frames context: %s\n",
272  av_err2str(ret));
273 
274  return ret;
275  }
276 
277  if (!link->hw_frames_ctx)
278  return AVERROR(ENOMEM);
279  }
280  return 0;
281  }
282 fail:
283  amf_uninit(avctx);
284  return ret;
285 }
286 
287 static int amf_capture_frame(AVFilterLink *outlink)
288 {
289  AVFilterContext *avctx = outlink->src;
290  AMFGrabContext *ctx = avctx->priv;
291  AMFSurface *surface = NULL;
292  AVFrame *frame = NULL;
293  AMF_RESULT res;
294  AMFData *data_out = NULL;
295  FilterLink *fl = ff_filter_link(outlink);
296  int format_amf;
297  int i;
298  int ret;
299  AMFPlane *plane;
300 
301  if (ctx->eof)
302  return AVERROR_EOF;
303 
304  res = ctx->capture->pVtbl->QueryOutput(ctx->capture, &data_out);
305 
306  if (res == AMF_REPEAT) {
307  av_log(0, AV_LOG_DEBUG, "AMF capture returned res = AMF_REPEAT\n");
308  return AVERROR(EAGAIN);
309  }
310 
311  if (res == AMF_EOF) {
312  ctx->eof = 1;
313  av_log(avctx, AV_LOG_DEBUG, "Capture reached EOF\n");
314  return AVERROR_EOF;
315  }
316 
317  if (res != AMF_OK || !data_out) {
318  if (res != AMF_OK)
319  av_log(avctx, AV_LOG_WARNING, "QueryOutput failed: %d\n", res);
320 
321  return AVERROR(EAGAIN);
322  }
323 
324  AMFGuid guid = IID_AMFSurface();
325  ret = data_out->pVtbl->QueryInterface(data_out, &guid, (void**)&surface);
326  data_out->pVtbl->Release(data_out);
327  if (ret != AMF_OK || !surface) {
328  av_log(avctx, AV_LOG_ERROR, "QueryInterface(IID_AMFSurface) failed: %d\n", ret);
329  return AVERROR(EAGAIN);
330  }
331 
332  frame = av_frame_alloc();
333  if (!frame) {
334  surface->pVtbl->Release(surface);
335  return AVERROR(ENOMEM);
336  }
337  frame->format = outlink->format;
338  frame->width = outlink->w;
339  frame->height = outlink->h;
340  frame->sample_aspect_ratio = (AVRational){1, 1};
341 
342  amf_pts pts = surface->pVtbl->GetPts(surface);
343  frame->pts = av_rescale_q(pts, AMF_TIME_BASE_Q, outlink->time_base);
344 
345  if (fl->hw_frames_ctx) {
346  frame->format = AV_PIX_FMT_AMF_SURFACE;
347  frame->data[0] = (uint8_t*)surface;
348  frame->buf[0] = av_buffer_create((uint8_t*)surface, sizeof(surface),
350  frame->hw_frames_ctx = av_buffer_ref(fl->hw_frames_ctx);
351  if (!frame->buf[0]) {
353  surface->pVtbl->Release(surface);
354  return AVERROR(ENOMEM);
355  }
356  } else {
357  ret = surface->pVtbl->Convert(surface, AMF_MEMORY_HOST);
358  AMF_RETURN_IF_FALSE(avctx, ret == AMF_OK, AVERROR_UNKNOWN, "Convert(amf::AMF_MEMORY_HOST) failed with error %d\n", ret);
359 
360  for (i = 0; i < surface->pVtbl->GetPlanesCount(surface); i++) {
361  plane = surface->pVtbl->GetPlaneAt(surface, i);
362  frame->data[i] = plane->pVtbl->GetNative(plane);
363  frame->linesize[i] = plane->pVtbl->GetHPitch(plane);
364  }
365 
366  frame->buf[0] = av_buffer_create((uint8_t *)surface, sizeof(surface),
367  amf_release_surface, (void*)avctx,
369  AMF_RETURN_IF_FALSE(avctx, !!frame->buf[0], AVERROR(ENOMEM), "av_buffer_create for amf surface failed.");
370 
371  format_amf = surface->pVtbl->GetFormat(surface);
372  frame->format = av_amf_to_av_format(format_amf);
373  }
374 
375  return ff_filter_frame(outlink, frame);
376 }
377 
378 static const AVFilterPad amf_outputs[] = {
379  {
380  .name = "default",
381  .type = AVMEDIA_TYPE_VIDEO,
382  .request_frame = amf_capture_frame,
383  .config_props = amf_config_props,
384  },
385 };
386 
388  .p.name = "vsrc_amf",
389  .p.description = NULL_IF_CONFIG_SMALL("AMD AMF screen capture"),
390  .p.priv_class = &amf_capture_class,
391  .p.inputs = NULL,
392  .p.flags = AVFILTER_FLAG_HWDEVICE,
393  .priv_size = sizeof(AMFGrabContext),
394  .init = amf_init,
395  .uninit = amf_uninit,
398  .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
399 };
AV_LOG_WARNING
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:216
AVERROR
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a all references to both lists are replaced with a reference to the intersection And when a single format is eventually chosen for a link amongst the remaining all references to the list are updated That means that if a filter requires that its input and output have the same format amongst a supported all it has to do is use a reference to the same list of formats query_formats can leave some formats unset and return AVERROR(EAGAIN) to cause the negotiation mechanism toagain later. That can be used by filters with complex requirements to use the format negotiated on one link to set the formats supported on another. Frame references ownership and permissions
opt.h
ff_filter_frame
int ff_filter_frame(AVFilterLink *link, AVFrame *frame)
Send a frame of data to the next filter.
Definition: avfilter.c:1067
AVERROR_EOF
#define AVERROR_EOF
End of file.
Definition: error.h:57
AVBufferRef::data
uint8_t * data
The data buffer.
Definition: buffer.h:90
AV_OPT_TYPE_VIDEO_RATE
@ AV_OPT_TYPE_VIDEO_RATE
Underlying C type is AVRational.
Definition: opt.h:315
FLAGS
#define FLAGS
Definition: vsrc_amf.c:58
AMFGrabContext::timerPrecision
amf_uint32 timerPrecision
Definition: vsrc_amf.c:54
AVHWFramesContext::format
enum AVPixelFormat format
The pixel format identifying the underlying HW surface type.
Definition: hwcontext.h:200
av_frame_free
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:64
av_hwframe_ctx_init
int av_hwframe_ctx_init(AVBufferRef *ref)
Finalize the context before use.
Definition: hwcontext.c:337
AVFrame
This structure describes decoded (raw) audio or video data.
Definition: frame.h:427
pixdesc.h
AMFGrabContext::framerate
AVRational framerate
Definition: vsrc_amf.c:44
av_hwframe_ctx_alloc
AVBufferRef * av_hwframe_ctx_alloc(AVBufferRef *device_ref_in)
Allocate an AVHWFramesContext tied to a given device context.
Definition: hwcontext.c:263
amf_config_props
static int amf_config_props(AVFilterLink *outlink)
Definition: vsrc_amf.c:228
AVOption
AVOption.
Definition: opt.h:429
data
const char data[16]
Definition: mxf.c:149
AV_LOG_VERBOSE
#define AV_LOG_VERBOSE
Detailed information.
Definition: log.h:226
AVFilterContext::hw_device_ctx
AVBufferRef * hw_device_ctx
For filters which will create hardware frames, sets the device the filter should create them in.
Definition: avfilter.h:356
av_buffer_ref
AVBufferRef * av_buffer_ref(const AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:103
AMF_RETURN_IF_FALSE
#define AMF_RETURN_IF_FALSE(avctx, exp, ret_value,...)
Error handling helper.
Definition: amfenc.h:169
AVFilter::name
const char * name
Filter name.
Definition: avfilter.h:220
AVERROR_UNKNOWN
#define AVERROR_UNKNOWN
Unknown error, typically from an external library.
Definition: error.h:73
AVHWFramesContext::width
int width
The allocated dimensions of the frames in this pool.
Definition: hwcontext.h:220
video.h
AV_PIX_FMT_AMF_SURFACE
@ AV_PIX_FMT_AMF_SURFACE
HW acceleration through AMF.
Definition: pixfmt.h:477
AMFGrabContext::capture
AMFComponent * capture
Definition: vsrc_amf.c:50
AVFilterContext::priv
void * priv
private data for use by the filter
Definition: avfilter.h:289
fail
#define fail()
Definition: checkasm.h:217
resolution
The official guide to swscale for confused that consecutive non overlapping rectangles of slice_bottom special converter These generally are unscaled converters of common like for each output line the vertical scaler pulls lines from a ring buffer When the ring buffer does not contain the wanted then it is pulled from the input slice through the input converter and horizontal scaler The result is also stored in the ring buffer to serve future vertical scaler requests When no more output can be generated because lines from a future slice would be then all remaining lines in the current slice are horizontally scaled and put in the ring buffer[This is done for luma and chroma, each with possibly different numbers of lines per picture.] Input to YUV Converter When the input to the main path is not planar bits per component YUV or bit it is converted to planar bit YUV Two sets of converters exist for this the other leaves the full chroma resolution
Definition: swscale.txt:54
AMFGrabContext::winmmdll
void * winmmdll
Definition: vsrc_amf.c:53
av_amf_to_av_format
enum AVPixelFormat av_amf_to_av_format(enum AMF_SURFACE_FORMAT fmt)
Definition: hwcontext_amf.c:142
AVFilterContext::extra_hw_frames
int extra_hw_frames
Sets the number of extra hardware frames which the filter will allocate on its output links for use i...
Definition: avfilter.h:380
pts
static int64_t pts
Definition: transcode_aac.c:644
amf_capture_frame
static int amf_capture_frame(AVFilterLink *outlink)
Definition: vsrc_amf.c:287
amf_outputs
static const AVFilterPad amf_outputs[]
Definition: vsrc_amf.c:378
AMFGrabContext
Definition: vsrc_amf.c:40
AVFilterPad
A filter pad used for either input or output.
Definition: filters.h:39
AVHWDeviceContext
This struct aggregates all the (hardware/vendor-specific) "high-level" state, i.e.
Definition: hwcontext.h:63
av_frame_alloc
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:52
fn
Definition: ops_tmpl_float.c:122
avassert.h
AV_LOG_ERROR
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:210
av_cold
#define av_cold
Definition: attributes.h:106
AVHWFramesContext::height
int height
Definition: hwcontext.h:220
FFFilter
Definition: filters.h:266
AV_BUFFER_FLAG_READONLY
#define AV_BUFFER_FLAG_READONLY
Always treat the buffer as read-only, even when it has only one reference.
Definition: buffer.h:114
AMFGrabContext::device_ctx_ref
AVBufferRef * device_ctx_ref
Definition: vsrc_amf.c:48
amf_init_vsrc
static int amf_init_vsrc(AVFilterLink *outlink)
Definition: vsrc_amf.c:146
filters.h
AV_HWDEVICE_TYPE_AMF
@ AV_HWDEVICE_TYPE_AMF
Definition: hwcontext.h:41
AV_LOG_DEBUG
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:231
ctx
static AVFormatContext * ctx
Definition: movenc.c:49
AMF_GOTO_FAIL_IF_FALSE
#define AMF_GOTO_FAIL_IF_FALSE(avctx, exp, ret_value,...)
Definition: hwcontext_amf_internal.h:34
av_rescale_q
int64_t av_rescale_q(int64_t a, AVRational bq, AVRational cq)
Rescale a 64-bit integer by 2 rational numbers.
Definition: mathematics.c:142
hwcontext_amf.h
FILTER_OUTPUTS
#define FILTER_OUTPUTS(array)
Definition: filters.h:264
AMFGrabContext::capture_mode
int capture_mode
Definition: vsrc_amf.c:46
link
Filter the word “frame” indicates either a video frame or a group of audio as stored in an AVFrame structure Format for each input and each output the list of supported formats For video that means pixel format For audio that means channel sample they are references to shared objects When the negotiation mechanism computes the intersection of the formats supported at each end of a link
Definition: filter_design.txt:23
AMF_TIME_BASE_Q
#define AMF_TIME_BASE_Q
Definition: hwcontext_amf_internal.h:41
AMFGrabContext::duplicate_output
amf_bool duplicate_output
Definition: vsrc_amf.c:45
if
if(ret)
Definition: filter_design.txt:179
AMFGrabContext::monitor_index
int monitor_index
Definition: vsrc_amf.c:43
framerate
float framerate
Definition: av1_levels.c:29
AVClass
Describe the class of an AVClass context structure.
Definition: log.h:76
AMFGrabContext::format
AMF_SURFACE_FORMAT format
Definition: vsrc_amf.c:52
NULL
#define NULL
Definition: coverity.c:32
AVHWFramesContext::sw_format
enum AVPixelFormat sw_format
The pixel format identifying the actual data layout of the hardware frames.
Definition: hwcontext.h:213
av_buffer_unref
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it.
Definition: buffer.c:139
AVRational
Rational number (pair of numerator and denominator).
Definition: rational.h:58
time.h
OFFSET
#define OFFSET(x)
Definition: vsrc_amf.c:57
ff_filter_link
static FilterLink * ff_filter_link(AVFilterLink *link)
Definition: filters.h:198
av_buffer_create
AVBufferRef * av_buffer_create(uint8_t *data, size_t size, void(*free)(void *opaque, uint8_t *data), void *opaque, int flags)
Create an AVBuffer from an existing array.
Definition: buffer.c:55
FF_FILTER_FLAG_HWFRAME_AWARE
#define FF_FILTER_FLAG_HWFRAME_AWARE
The filter is aware of hardware frames, and any hardware frame context should not be automatically pr...
Definition: filters.h:207
init
int(* init)(AVBSFContext *ctx)
Definition: dts2pts.c:550
NULL_IF_CONFIG_SMALL
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification.
Definition: internal.h:94
AVFILTER_DEFINE_CLASS
AVFILTER_DEFINE_CLASS(amf_capture)
av_err2str
#define av_err2str(errnum)
Convenience macro, the return value should be used only directly in function arguments but never stan...
Definition: error.h:122
AVFILTER_FLAG_HWDEVICE
#define AVFILTER_FLAG_HWDEVICE
The filter can create hardware frames using AVFilterContext.hw_device_ctx.
Definition: avfilter.h:188
AVAMFDeviceContext
This struct is allocated as AVHWDeviceContext.hwctx.
Definition: hwcontext_amf.h:33
fn
#define fn(a)
Definition: aap_template.c:37
AVERROR_EXTERNAL
#define AVERROR_EXTERNAL
Generic error in an external library.
Definition: error.h:59
AV_LOG_INFO
#define AV_LOG_INFO
Standard information.
Definition: log.h:221
amf_uninit
static av_cold void amf_uninit(AVFilterContext *avctx)
Definition: vsrc_amf.c:117
uninit
static void uninit(AVBSFContext *ctx)
Definition: pcm_rechunk.c:68
av_hwdevice_ctx_create_derived
int av_hwdevice_ctx_create_derived(AVBufferRef **dst_ref_ptr, enum AVHWDeviceType type, AVBufferRef *src_ref, int flags)
Create a new device of the specified type from an existing device.
Definition: hwcontext.c:718
i
#define i(width, name, range_min, range_max)
Definition: cbs_h2645.c:256
hw_device_ctx
static AVBufferRef * hw_device_ctx
Definition: hw_decode.c:45
AVFilterPad::name
const char * name
Pad name.
Definition: filters.h:45
AVHWFramesContext
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:118
ret
ret
Definition: filter_design.txt:187
AVHWDeviceContext::type
enum AVHWDeviceType type
This field identifies the underlying API used for hardware access.
Definition: hwcontext.h:75
frame
these buffered frames must be flushed immediately if a new input produces new the filter must not call request_frame to get more It must just process the frame or queue it The task of requesting more frames is left to the filter s request_frame method or the application If a filter has several the filter must be ready for frames arriving randomly on any input any filter with several inputs will most likely require some kind of queuing mechanism It is perfectly acceptable to have a limited queue and to drop frames when the inputs are too unbalanced request_frame For filters that do not use the this method is called when a frame is wanted on an output For a it should directly call filter_frame on the corresponding output For a if there are queued frames already one of these frames should be pushed If the filter should request a frame on one of its repeatedly until at least one frame has been pushed Return or at least make progress towards producing a frame
Definition: filter_design.txt:265
av_hwdevice_ctx_create
int av_hwdevice_ctx_create(AVBufferRef **pdevice_ref, enum AVHWDeviceType type, const char *device, AVDictionary *opts, int flags)
Open a device of the specified type and create an AVHWDeviceContext for it.
Definition: hwcontext.c:615
AVFrame::hw_frames_ctx
AVBufferRef * hw_frames_ctx
For hwaccel-format frames, this should be a reference to the AVHWFramesContext describing the frame.
Definition: frame.h:724
amf_init
static av_cold int amf_init(AVFilterContext *avctx)
Definition: vsrc_amf.c:134
hwcontext_amf_internal.h
AMFGrabContext::avclass
AVClass * avclass
Definition: vsrc_amf.c:41
AV_OPT_TYPE_INT
@ AV_OPT_TYPE_INT
Underlying C type is int.
Definition: opt.h:259
avfilter.h
AVERROR_FILTER_NOT_FOUND
#define AVERROR_FILTER_NOT_FOUND
Filter not found.
Definition: error.h:60
AVFilterContext
An instance of a filter.
Definition: avfilter.h:274
AVHWFramesContext::initial_pool_size
int initial_pool_size
Initial size of the frame pool.
Definition: hwcontext.h:190
AVMEDIA_TYPE_VIDEO
@ AVMEDIA_TYPE_VIDEO
Definition: avutil.h:200
FFFilter::p
AVFilter p
The public AVFilter.
Definition: filters.h:270
mem.h
AVBufferRef
A reference to a data buffer.
Definition: buffer.h:82
AV_OPT_TYPE_BOOL
@ AV_OPT_TYPE_BOOL
Underlying C type is int.
Definition: opt.h:327
hwcontext.h
av_log
#define av_log(a,...)
Definition: tableprint_vlc.h:27
amf_release_surface
static void amf_release_surface(void *opaque, uint8_t *data)
Definition: vsrc_amf.c:108
ff_vsrc_amf_capture
const FFFilter ff_vsrc_amf_capture
Definition: vsrc_amf.c:387
amf_capture_options
static const AVOption amf_capture_options[]
Definition: vsrc_amf.c:60
avstring.h
AV_OPT_TYPE_CONST
@ AV_OPT_TYPE_CONST
Special option type for declaring named constants.
Definition: opt.h:299
FILTER_SINGLE_PIXFMT
#define FILTER_SINGLE_PIXFMT(pix_fmt_)
Definition: filters.h:253
w32dlfcn.h
AMFGrabContext::eof
amf_bool eof
Definition: vsrc_amf.c:51