FFmpeg  4.3.6
qsvdec.c
Go to the documentation of this file.
1 /*
2  * Intel MediaSDK QSV codec-independent code
3  *
4  * copyright (c) 2013 Luca Barbato
5  * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
6  *
7  * This file is part of FFmpeg.
8  *
9  * FFmpeg is free software; you can redistribute it and/or
10  * modify it under the terms of the GNU Lesser General Public
11  * License as published by the Free Software Foundation; either
12  * version 2.1 of the License, or (at your option) any later version.
13  *
14  * FFmpeg is distributed in the hope that it will be useful,
15  * but WITHOUT ANY WARRANTY; without even the implied warranty of
16  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17  * Lesser General Public License for more details.
18  *
19  * You should have received a copy of the GNU Lesser General Public
20  * License along with FFmpeg; if not, write to the Free Software
21  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22  */
23 
24 #include <string.h>
25 #include <sys/types.h>
26 
27 #include <mfx/mfxvideo.h>
28 
29 #include "libavutil/common.h"
30 #include "libavutil/hwcontext.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/log.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/pixfmt.h"
36 #include "libavutil/time.h"
37 #include "libavutil/imgutils.h"
38 
39 #include "avcodec.h"
40 #include "internal.h"
41 #include "decode.h"
42 #include "qsv.h"
43 #include "qsv_internal.h"
44 #include "qsvdec.h"
45 
47  &(const AVCodecHWConfigInternal) {
48  .public = {
52  .device_type = AV_HWDEVICE_TYPE_QSV,
53  },
54  .hwaccel = NULL,
55  },
56  NULL
57 };
58 
60 {
61  int ret = 0;
62 
63  ff_decode_frame_props(avctx, frame);
64 
65  frame->width = avctx->width;
66  frame->height = avctx->height;
67 
68  switch (avctx->pix_fmt) {
69  case AV_PIX_FMT_NV12:
70  frame->linesize[0] = FFALIGN(avctx->width, 128);
71  break;
72  case AV_PIX_FMT_P010:
73  frame->linesize[0] = 2 * FFALIGN(avctx->width, 128);
74  break;
75  default:
76  av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format.\n");
77  return AVERROR(EINVAL);
78  }
79 
80  frame->linesize[1] = frame->linesize[0];
81  frame->buf[0] = av_buffer_pool_get(pool);
82  if (!frame->buf[0])
83  return AVERROR(ENOMEM);
84 
85  frame->data[0] = frame->buf[0]->data;
86  frame->data[1] = frame->data[0] +
87  frame->linesize[0] * FFALIGN(avctx->height, 64);
88 
89  ret = ff_attach_decode_data(frame);
90  if (ret < 0)
91  return ret;
92 
93  return 0;
94 }
95 
96 static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session,
97  AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
98 {
99  int ret;
100 
101  if (q->gpu_copy == MFX_GPUCOPY_ON &&
102  !(q->iopattern & MFX_IOPATTERN_OUT_SYSTEM_MEMORY)) {
103  av_log(avctx, AV_LOG_WARNING, "GPU-accelerated memory copy "
104  "only works in system memory mode.\n");
105  q->gpu_copy = MFX_GPUCOPY_OFF;
106  }
107  if (session) {
108  q->session = session;
109  } else if (hw_frames_ref) {
110  if (q->internal_qs.session) {
111  MFXClose(q->internal_qs.session);
112  q->internal_qs.session = NULL;
113  }
115 
116  q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref);
117  if (!q->frames_ctx.hw_frames_ctx)
118  return AVERROR(ENOMEM);
119 
121  &q->frames_ctx, q->load_plugins,
122  q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY,
123  q->gpu_copy);
124  if (ret < 0) {
126  return ret;
127  }
128 
129  q->session = q->internal_qs.session;
130  } else if (hw_device_ref) {
131  if (q->internal_qs.session) {
132  MFXClose(q->internal_qs.session);
133  q->internal_qs.session = NULL;
134  }
135 
137  hw_device_ref, q->load_plugins, q->gpu_copy);
138  if (ret < 0)
139  return ret;
140 
141  q->session = q->internal_qs.session;
142  } else {
143  if (!q->internal_qs.session) {
144  ret = ff_qsv_init_internal_session(avctx, &q->internal_qs,
145  q->load_plugins, q->gpu_copy);
146  if (ret < 0)
147  return ret;
148  }
149 
150  q->session = q->internal_qs.session;
151  }
152 
153  /* make sure the decoder is uninitialized */
154  MFXVideoDECODE_Close(q->session);
155 
156  return 0;
157 }
158 
159 static inline unsigned int qsv_fifo_item_size(void)
160 {
161  return sizeof(mfxSyncPoint*) + sizeof(QSVFrame*);
162 }
163 
164 static inline unsigned int qsv_fifo_size(const AVFifoBuffer* fifo)
165 {
166  return av_fifo_size(fifo) / qsv_fifo_item_size();
167 }
168 
169 static int qsv_decode_preinit(AVCodecContext *avctx, QSVContext *q, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
170 {
171  mfxSession session = NULL;
172  int iopattern = 0;
173  int ret;
174  enum AVPixelFormat pix_fmts[3] = {
175  AV_PIX_FMT_QSV, /* opaque format in case of video memory output */
176  pix_fmt, /* system memory format obtained from bitstream parser */
177  AV_PIX_FMT_NONE };
178 
179  ret = ff_get_format(avctx, pix_fmts);
180  if (ret < 0) {
181  q->orig_pix_fmt = avctx->pix_fmt = AV_PIX_FMT_NONE;
182  return ret;
183  }
184 
185  if (!q->async_fifo) {
187  if (!q->async_fifo)
188  return AVERROR(ENOMEM);
189  }
190 
191  if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) {
192  AVQSVContext *user_ctx = avctx->hwaccel_context;
193  session = user_ctx->session;
194  iopattern = user_ctx->iopattern;
195  q->ext_buffers = user_ctx->ext_buffers;
196  q->nb_ext_buffers = user_ctx->nb_ext_buffers;
197  }
198 
199  if (avctx->hw_frames_ctx) {
200  AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data;
201  AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx;
202 
203  if (!iopattern) {
204  if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)
205  iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
206  else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
207  iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY;
208  }
209  }
210 
211  if (!iopattern)
212  iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
213  q->iopattern = iopattern;
214 
215  ff_qsv_print_iopattern(avctx, q->iopattern, "Decoder");
216 
217  ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx, avctx->hw_device_ctx);
218  if (ret < 0) {
219  av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n");
220  return ret;
221  }
222 
223  param->IOPattern = q->iopattern;
224  param->AsyncDepth = q->async_depth;
225  param->ExtParam = q->ext_buffers;
226  param->NumExtParam = q->nb_ext_buffers;
227 
228  return 0;
229  }
230 
231 static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *param)
232 {
233  int ret;
234 
235  avctx->width = param->mfx.FrameInfo.CropW;
236  avctx->height = param->mfx.FrameInfo.CropH;
237  avctx->coded_width = param->mfx.FrameInfo.Width;
238  avctx->coded_height = param->mfx.FrameInfo.Height;
239  avctx->level = param->mfx.CodecLevel;
240  avctx->profile = param->mfx.CodecProfile;
241  avctx->field_order = ff_qsv_map_picstruct(param->mfx.FrameInfo.PicStruct);
242  avctx->pix_fmt = ff_qsv_map_fourcc(param->mfx.FrameInfo.FourCC);
243 
244  ret = MFXVideoDECODE_Init(q->session, param);
245  if (ret < 0)
246  return ff_qsv_print_error(avctx, ret,
247  "Error initializing the MFX video decoder");
248 
249  q->frame_info = param->mfx.FrameInfo;
250 
251  if (!avctx->hw_frames_ctx)
253  FFALIGN(avctx->width, 128), FFALIGN(avctx->height, 64), 1), av_buffer_allocz);
254  return 0;
255 }
256 
257 static int qsv_decode_header(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
258 {
259  int ret;
260 
261  mfxBitstream bs = { 0 };
262 
263  if (avpkt->size) {
264  bs.Data = avpkt->data;
265  bs.DataLength = avpkt->size;
266  bs.MaxLength = bs.DataLength;
267  bs.TimeStamp = avpkt->pts;
268  if (avctx->field_order == AV_FIELD_PROGRESSIVE)
269  bs.DataFlag |= MFX_BITSTREAM_COMPLETE_FRAME;
270  } else
271  return AVERROR_INVALIDDATA;
272 
273 
274  if(!q->session) {
275  ret = qsv_decode_preinit(avctx, q, pix_fmt, param);
276  if (ret < 0)
277  return ret;
278  }
279 
280  ret = ff_qsv_codec_id_to_mfx(avctx->codec_id);
281  if (ret < 0)
282  return ret;
283 
284  param->mfx.CodecId = ret;
285  ret = MFXVideoDECODE_DecodeHeader(q->session, &bs, param);
286  if (MFX_ERR_MORE_DATA == ret) {
287  return AVERROR(EAGAIN);
288  }
289  if (ret < 0)
290  return ff_qsv_print_error(avctx, ret,
291  "Error decoding stream header");
292 
293  return 0;
294 }
295 
297 {
298  int ret;
299 
300  if (q->pool)
301  ret = ff_qsv_get_continuous_buffer(avctx, frame->frame, q->pool);
302  else
303  ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF);
304 
305  if (ret < 0)
306  return ret;
307 
308  if (frame->frame->format == AV_PIX_FMT_QSV) {
309  frame->surface = *(mfxFrameSurface1*)frame->frame->data[3];
310  } else {
311  frame->surface.Info = q->frame_info;
312 
313  frame->surface.Data.PitchLow = frame->frame->linesize[0];
314  frame->surface.Data.Y = frame->frame->data[0];
315  frame->surface.Data.UV = frame->frame->data[1];
316  }
317 
318  if (q->frames_ctx.mids) {
319  ret = ff_qsv_find_surface_idx(&q->frames_ctx, frame);
320  if (ret < 0)
321  return ret;
322 
323  frame->surface.Data.MemId = &q->frames_ctx.mids[ret];
324  }
325  frame->surface.Data.ExtParam = &frame->ext_param;
326  frame->surface.Data.NumExtParam = 1;
327  frame->ext_param = (mfxExtBuffer*)&frame->dec_info;
328  frame->dec_info.Header.BufferId = MFX_EXTBUFF_DECODED_FRAME_INFO;
329  frame->dec_info.Header.BufferSz = sizeof(frame->dec_info);
330 
331  frame->used = 1;
332 
333  return 0;
334 }
335 
337 {
338  QSVFrame *cur = q->work_frames;
339  while (cur) {
340  if (cur->used && !cur->surface.Data.Locked && !cur->queued) {
341  cur->used = 0;
342  av_frame_unref(cur->frame);
343  }
344  cur = cur->next;
345  }
346 }
347 
348 static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
349 {
350  QSVFrame *frame, **last;
351  int ret;
352 
354 
355  frame = q->work_frames;
356  last = &q->work_frames;
357  while (frame) {
358  if (!frame->used) {
359  ret = alloc_frame(avctx, q, frame);
360  if (ret < 0)
361  return ret;
362  *surf = &frame->surface;
363  return 0;
364  }
365 
366  last = &frame->next;
367  frame = frame->next;
368  }
369 
370  frame = av_mallocz(sizeof(*frame));
371  if (!frame)
372  return AVERROR(ENOMEM);
373  frame->frame = av_frame_alloc();
374  if (!frame->frame) {
375  av_freep(&frame);
376  return AVERROR(ENOMEM);
377  }
378  *last = frame;
379 
380  ret = alloc_frame(avctx, q, frame);
381  if (ret < 0)
382  return ret;
383 
384  *surf = &frame->surface;
385 
386  return 0;
387 }
388 
389 static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf)
390 {
391  QSVFrame *cur = q->work_frames;
392  while (cur) {
393  if (surf == &cur->surface)
394  return cur;
395  cur = cur->next;
396  }
397  return NULL;
398 }
399 
400 static int qsv_decode(AVCodecContext *avctx, QSVContext *q,
401  AVFrame *frame, int *got_frame,
402  AVPacket *avpkt)
403 {
404  QSVFrame *out_frame;
405  mfxFrameSurface1 *insurf;
406  mfxFrameSurface1 *outsurf;
407  mfxSyncPoint *sync;
408  mfxBitstream bs = { { { 0 } } };
409  int ret;
410 
411  if (avpkt->size) {
412  bs.Data = avpkt->data;
413  bs.DataLength = avpkt->size;
414  bs.MaxLength = bs.DataLength;
415  bs.TimeStamp = avpkt->pts;
416  if (avctx->field_order == AV_FIELD_PROGRESSIVE)
417  bs.DataFlag |= MFX_BITSTREAM_COMPLETE_FRAME;
418  }
419 
420  sync = av_mallocz(sizeof(*sync));
421  if (!sync) {
422  av_freep(&sync);
423  return AVERROR(ENOMEM);
424  }
425 
426  do {
427  ret = get_surface(avctx, q, &insurf);
428  if (ret < 0) {
429  av_freep(&sync);
430  return ret;
431  }
432 
433  ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL,
434  insurf, &outsurf, sync);
435  if (ret == MFX_WRN_DEVICE_BUSY)
436  av_usleep(500);
437 
438  } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE);
439 
440  if (ret != MFX_ERR_NONE &&
441  ret != MFX_ERR_MORE_DATA &&
442  ret != MFX_WRN_VIDEO_PARAM_CHANGED &&
443  ret != MFX_ERR_MORE_SURFACE) {
444  av_freep(&sync);
445  return ff_qsv_print_error(avctx, ret,
446  "Error during QSV decoding.");
447  }
448 
449  /* make sure we do not enter an infinite loop if the SDK
450  * did not consume any data and did not return anything */
451  if (!*sync && !bs.DataOffset) {
452  bs.DataOffset = avpkt->size;
453  ++q->zero_consume_run;
454  if (q->zero_consume_run > 1)
455  ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data");
456  } else if (!*sync && bs.DataOffset) {
457  ++q->buffered_count;
458  } else {
459  q->zero_consume_run = 0;
460  }
461 
462  if (*sync) {
463  QSVFrame *out_frame = find_frame(q, outsurf);
464 
465  if (!out_frame) {
466  av_log(avctx, AV_LOG_ERROR,
467  "The returned surface does not correspond to any frame\n");
468  av_freep(&sync);
469  return AVERROR_BUG;
470  }
471 
472  out_frame->queued = 1;
473  av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
474  av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL);
475  } else {
476  av_freep(&sync);
477  }
478 
479  if ((qsv_fifo_size(q->async_fifo) >= q->async_depth) ||
480  (!avpkt->size && av_fifo_size(q->async_fifo))) {
481  AVFrame *src_frame;
482 
483  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
484  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
485  out_frame->queued = 0;
486 
487  if (avctx->pix_fmt != AV_PIX_FMT_QSV) {
488  do {
489  ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000);
490  } while (ret == MFX_WRN_IN_EXECUTION);
491  }
492 
493  av_freep(&sync);
494 
495  src_frame = out_frame->frame;
496 
497  ret = av_frame_ref(frame, src_frame);
498  if (ret < 0)
499  return ret;
500 
501  outsurf = &out_frame->surface;
502 
503 #if FF_API_PKT_PTS
505  frame->pkt_pts = outsurf->Data.TimeStamp;
507 #endif
508  frame->pts = outsurf->Data.TimeStamp;
509 
510  frame->repeat_pict =
511  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 :
512  outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 :
513  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0;
514  frame->top_field_first =
515  outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF;
516  frame->interlaced_frame =
517  !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE);
518  frame->pict_type = ff_qsv_map_pictype(out_frame->dec_info.FrameType);
519  //Key frame is IDR frame is only suitable for H264. For HEVC, IRAPs are key frames.
520  if (avctx->codec_id == AV_CODEC_ID_H264)
521  frame->key_frame = !!(out_frame->dec_info.FrameType & MFX_FRAMETYPE_IDR);
522 
523  /* update the surface properties */
524  if (avctx->pix_fmt == AV_PIX_FMT_QSV)
525  ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info;
526 
527  *got_frame = 1;
528  }
529 
530  return bs.DataOffset;
531 }
532 
534 {
535  QSVFrame *cur = q->work_frames;
536 
537  if (q->session)
538  MFXVideoDECODE_Close(q->session);
539 
540  while (q->async_fifo && av_fifo_size(q->async_fifo)) {
541  QSVFrame *out_frame;
542  mfxSyncPoint *sync;
543 
544  av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL);
545  av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL);
546 
547  av_freep(&sync);
548  }
549 
550  while (cur) {
551  q->work_frames = cur->next;
552  av_frame_free(&cur->frame);
553  av_freep(&cur);
554  cur = q->work_frames;
555  }
556 
558  q->async_fifo = NULL;
559 
561 
565 
566  return 0;
567 }
568 
570  AVFrame *frame, int *got_frame, AVPacket *pkt)
571 {
572  int ret;
573  mfxVideoParam param = { 0 };
575 
576  if (!pkt->size)
577  return qsv_decode(avctx, q, frame, got_frame, pkt);
578 
579  /* TODO: flush delayed frames on reinit */
580 
581  // sw_pix_fmt, coded_width/height should be set for ff_get_format(),
582  // assume sw_pix_fmt is NV12 and coded_width/height to be 1280x720,
583  // the assumption may be not corret but will be updated after header decoded if not true.
584  if (q->orig_pix_fmt != AV_PIX_FMT_NONE)
585  pix_fmt = q->orig_pix_fmt;
586  if (!avctx->coded_width)
587  avctx->coded_width = 1280;
588  if (!avctx->coded_height)
589  avctx->coded_height = 720;
590 
591  ret = qsv_decode_header(avctx, q, pkt, pix_fmt, &param);
592 
593  if (ret >= 0 && (q->orig_pix_fmt != ff_qsv_map_fourcc(param.mfx.FrameInfo.FourCC) ||
594  avctx->coded_width != param.mfx.FrameInfo.Width ||
595  avctx->coded_height != param.mfx.FrameInfo.Height)) {
596  AVPacket zero_pkt = {0};
597 
598  if (q->buffered_count) {
599  q->reinit_flag = 1;
600  /* decode zero-size pkt to flush the buffered pkt before reinit */
601  q->buffered_count--;
602  return qsv_decode(avctx, q, frame, got_frame, &zero_pkt);
603  }
604  q->reinit_flag = 0;
605 
606  q->orig_pix_fmt = avctx->pix_fmt = pix_fmt = ff_qsv_map_fourcc(param.mfx.FrameInfo.FourCC);
607 
608  avctx->coded_width = param.mfx.FrameInfo.Width;
609  avctx->coded_height = param.mfx.FrameInfo.Height;
610 
611  ret = qsv_decode_preinit(avctx, q, pix_fmt, &param);
612  if (ret < 0)
613  goto reinit_fail;
614  q->initialized = 0;
615  }
616 
617  if (!q->initialized) {
618  ret = qsv_decode_init(avctx, q, &param);
619  if (ret < 0)
620  goto reinit_fail;
621  q->initialized = 1;
622  }
623 
624  return qsv_decode(avctx, q, frame, got_frame, pkt);
625 
626 reinit_fail:
627  q->orig_pix_fmt = avctx->pix_fmt = AV_PIX_FMT_NONE;
628  return ret;
629 }
630 
632 {
634  q->initialized = 0;
635 }
AVCodecHWConfig public
This is the structure which will be returned to the user by avcodec_get_hw_config().
Definition: hwconfig.h:34
#define NULL
Definition: coverity.c:32
int ff_get_format(AVCodecContext *avctx, const enum AVPixelFormat *fmt)
Select the (possibly hardware accelerated) pixel format.
Definition: decode.c:1279
int iopattern
Definition: qsvdec.h:68
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
The codec supports this format via the hw_frames_ctx interface.
Definition: codec.h:406
static enum AVPixelFormat pix_fmt
static unsigned int qsv_fifo_size(const AVFifoBuffer *fifo)
Definition: qsvdec.c:164
void av_buffer_unref(AVBufferRef **buf)
Free a given reference and automatically free the buffer if there are no more references to it...
Definition: buffer.c:125
This structure describes decoded (raw) audio or video data.
Definition: frame.h:300
int coded_width
Bitstream width / height, may be different from width/height e.g.
Definition: avcodec.h:714
misc image utilities
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
Memory handling functions.
AVBufferRef * buf[AV_NUM_DATA_POINTERS]
AVBuffer references backing the data for this frame.
Definition: frame.h:491
This struct is allocated as AVHWFramesContext.hwctx.
Definition: hwcontext_qsv.h:42
int repeat_pict
When decoding, this signals how much the picture must be delayed.
Definition: frame.h:442
int size
Definition: packet.h:356
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:736
mfxExtBuffer ** ext_buffers
Definition: qsvdec.h:73
int ff_qsv_init_session_device(AVCodecContext *avctx, mfxSession *psession, AVBufferRef *device_ref, const char *load_plugins, int gpu_copy)
Definition: qsv.c:686
int av_usleep(unsigned usec)
Sleep for a period of time.
Definition: time.c:84
int ff_decode_frame_props(AVCodecContext *avctx, AVFrame *frame)
Set various frame properties from the codec context / packet data.
Definition: decode.c:1685
static AVPacket pkt
static int qsv_decode(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *avpkt)
Definition: qsvdec.c:400
int profile
profile
Definition: avcodec.h:1859
QSVSession internal_qs
Definition: qsvdec.h:45
static int ff_qsv_get_continuous_buffer(AVCodecContext *avctx, AVFrame *frame, AVBufferPool *pool)
Definition: qsvdec.c:59
static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q, mfxVideoParam *param)
Definition: qsvdec.c:231
AVBufferRef * hw_frames_ctx
Definition: qsv_internal.h:92
int av_fifo_generic_write(AVFifoBuffer *f, void *src, int size, int(*func)(void *, void *, int))
Feed data from a user-supplied callback to an AVFifoBuffer.
Definition: fifo.c:122
int ff_qsv_print_error(void *log_ctx, mfxStatus err, const char *error_string)
Definition: qsv.c:172
#define AV_PIX_FMT_P010
Definition: pixfmt.h:446
mfxExtDecodedFrameInfo dec_info
Definition: qsv_internal.h:74
AVFrame * av_frame_alloc(void)
Allocate an AVFrame and set its fields to default values.
Definition: frame.c:190
int ff_attach_decode_data(AVFrame *frame)
Definition: decode.c:1830
void * hwaccel_context
Hardware accelerator context.
Definition: avcodec.h:1702
AVBufferRef * mids_buf
Definition: qsv_internal.h:99
int ff_qsv_decode_close(QSVContext *q)
Definition: qsvdec.c:533
int av_frame_ref(AVFrame *dst, const AVFrame *src)
Set up a new reference to the data described by the source frame.
Definition: frame.c:444
int64_t pts
Presentation timestamp in time_base units (time when frame should be shown to user).
Definition: frame.h:393
enum AVPixelFormat pix_fmt
For decoders, a hardware pixel format which that decoder may be able to decode to if suitable hardwar...
Definition: codec.h:434
static AVFrame * frame
int queued
Definition: qsv_internal.h:77
uint8_t * data
Definition: packet.h:355
void av_fifo_free(AVFifoBuffer *f)
Free an AVFifoBuffer.
Definition: fifo.c:55
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:447
#define FFALIGN(x, a)
Definition: macros.h:48
#define av_log(a,...)
The buffer pool.
static QSVFrame * find_frame(QSVContext *q, mfxFrameSurface1 *surf)
Definition: qsvdec.c:389
int av_image_get_buffer_size(enum AVPixelFormat pix_fmt, int width, int height, int align)
Return the size in bytes of the amount of data required to store an image with the given parameters...
Definition: imgutils.c:431
int ff_qsv_init_session_frames(AVCodecContext *avctx, mfxSession *psession, QSVFramesContext *qsv_frames_ctx, const char *load_plugins, int opaque, int gpu_copy)
Definition: qsv.c:763
int width
Definition: frame.h:358
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
int ff_qsv_find_surface_idx(QSVFramesContext *ctx, QSVFrame *frame)
Definition: qsv.c:237
#define AVERROR(e)
Definition: error.h:43
void av_frame_free(AVFrame **frame)
Free the frame and any dynamically allocated objects in it, e.g.
Definition: frame.c:203
int reinit_flag
Definition: qsvdec.h:57
int av_fifo_generic_read(AVFifoBuffer *f, void *dest, int buf_size, void(*func)(void *, void *, int))
Feed data from an AVFifoBuffer to a user-supplied callback.
Definition: fifo.c:213
planar YUV 4:2:0, 12bpp, 1 plane for Y and 1 plane for the UV components, which are interleaved (firs...
Definition: pixfmt.h:89
int ff_qsv_print_iopattern(void *log_ctx, int mfx_iopattern, const char *extra_string)
Definition: qsv.c:100
void * av_mallocz(size_t size)
Allocate a memory block with alignment suitable for all memory accesses (including vectors if availab...
Definition: mem.c:237
int iopattern
The IO pattern to use.
Definition: qsv.h:46
int ff_qsv_close_internal_session(QSVSession *qs)
Definition: qsv.c:810
enum AVPictureType ff_qsv_map_pictype(int mfx_pic_type)
Definition: qsv.c:266
int nb_ext_buffers
Definition: qsv.h:52
void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q)
Definition: qsvdec.c:631
int buffered_count
Definition: qsvdec.h:56
int ff_qsv_print_warning(void *log_ctx, mfxStatus err, const char *warning_string)
Definition: qsv.c:182
mfxExtBuffer * ext_param
Definition: qsv_internal.h:75
int zero_consume_run
Definition: qsvdec.h:55
The codec supports this format by some ad-hoc method.
Definition: codec.h:422
AVBufferPool * pool
Definition: qsvdec.h:62
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:383
static unsigned int qsv_fifo_item_size(void)
Definition: qsvdec.c:159
int width
picture width / height.
Definition: avcodec.h:699
AVBufferRef * hw_frames_ctx
A reference to the AVHWFramesContext describing the input (for encoding) or output (decoding) frames...
Definition: avcodec.h:2226
int ff_qsv_codec_id_to_mfx(enum AVCodecID codec_id)
Definition: qsv.c:43
int level
level
Definition: avcodec.h:1982
mfxFrameSurface1 surface
Definition: qsv_internal.h:72
int format
format of the frame, -1 if unknown or unset Values correspond to enum AVPixelFormat for video frames...
Definition: frame.h:373
enum AVPixelFormat orig_pix_fmt
Definition: qsvdec.h:59
Libavcodec external API header.
enum AVCodecID codec_id
Definition: avcodec.h:536
int av_fifo_size(const AVFifoBuffer *f)
Return the amount of data in bytes in the AVFifoBuffer, that is the amount of data you can read from ...
Definition: fifo.c:77
mfxExtBuffer ** ext_buffers
Extra buffers to pass to encoder or decoder initialization.
Definition: qsv.h:51
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:331
static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame)
Definition: qsvdec.c:296
main external API structure.
Definition: avcodec.h:526
uint8_t * data
The data buffer.
Definition: buffer.h:89
struct QSVFrame * next
Definition: qsv_internal.h:80
void * hwctx
The format-specific data, allocated and freed automatically along with this context.
Definition: hwcontext.h:162
int ff_get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags)
Get a buffer for a frame.
Definition: decode.c:1854
AVBufferRef * av_buffer_allocz(int size)
Same as av_buffer_alloc(), except the returned buffer will be initialized to zero.
Definition: buffer.c:83
static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf)
Definition: qsvdec.c:348
#define AVERROR_BUG
Internal bug, also see AVERROR_BUG2.
Definition: error.h:50
int coded_height
Definition: avcodec.h:714
void av_buffer_pool_uninit(AVBufferPool **ppool)
Mark the pool as being available for freeing.
Definition: buffer.c:276
int gpu_copy
Definition: qsvdec.h:69
This struct describes a set or pool of "hardware" frames (i.e.
Definition: hwcontext.h:124
HW acceleration through QSV, data[3] contains a pointer to the mfxFrameSurface1 structure.
Definition: pixfmt.h:222
int ff_qsv_init_internal_session(AVCodecContext *avctx, QSVSession *qs, const char *load_plugins, int gpu_copy)
Definition: qsv.c:380
char * load_plugins
Definition: qsvdec.h:71
This struct is used for communicating QSV parameters between libavcodec and the caller.
Definition: qsv.h:36
static void qsv_clear_unused_frames(QSVContext *q)
Definition: qsvdec.c:336
static enum AVPixelFormat pix_fmts[]
Definition: libkvazaar.c:275
void av_frame_unref(AVFrame *frame)
Unreference all the buffers referenced by frame and reset the frame fields.
Definition: frame.c:554
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:314
attribute_deprecated int64_t pkt_pts
PTS copied from the AVPacket that was decoded to produce this frame.
Definition: frame.h:401
A reference to a data buffer.
Definition: buffer.h:81
#define FF_DISABLE_DEPRECATION_WARNINGS
Definition: internal.h:84
common internal api header.
common internal and external API header
if(ret< 0)
Definition: vf_mcdeint.c:279
AVBufferPool * av_buffer_pool_init(int size, AVBufferRef *(*alloc)(int size))
Allocate and initialize a buffer pool.
Definition: buffer.c:239
static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, AVBufferRef *hw_frames_ref, AVBufferRef *hw_device_ref)
Definition: qsvdec.c:96
AVBufferRef * av_buffer_ref(AVBufferRef *buf)
Create a new reference to an AVBuffer.
Definition: buffer.c:93
mfxFrameInfo frame_info
Definition: qsvdec.h:61
pixel format definitions
#define FF_ENABLE_DEPRECATION_WARNINGS
Definition: internal.h:85
AVFifoBuffer * av_fifo_alloc(unsigned int size)
Initialize an AVFifoBuffer.
Definition: fifo.c:43
int top_field_first
If the content is interlaced, is top field displayed first.
Definition: frame.h:452
QSVFramesContext frames_ctx
Definition: qsvdec.h:47
static int qsv_decode_preinit(AVCodecContext *avctx, QSVContext *q, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
Definition: qsvdec.c:169
mfxSession session
Definition: qsvdec.h:41
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:378
AVFifoBuffer * async_fifo
Definition: qsvdec.h:54
int height
Definition: frame.h:358
int initialized
Definition: qsvdec.h:64
enum AVFieldOrder ff_qsv_map_picstruct(int mfx_pic_struct)
Definition: qsv.c:248
#define av_freep(p)
enum AVFieldOrder field_order
Field order.
Definition: avcodec.h:1183
An API-specific header for AV_HWDEVICE_TYPE_QSV.
AVFrame * frame
Definition: qsv_internal.h:71
AVBufferRef * av_buffer_pool_get(AVBufferPool *pool)
Allocate a new AVBuffer, reusing an old buffer from the pool when available.
Definition: buffer.c:337
const AVCodecHWConfigInternal * ff_qsv_hw_configs[]
Definition: qsvdec.c:46
int async_depth
Definition: qsvdec.h:67
AVBufferRef * hw_device_ctx
A reference to the AVHWDeviceContext describing the device which will be used by a hardware encoder/d...
Definition: avcodec.h:2278
int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q, AVFrame *frame, int *got_frame, AVPacket *pkt)
Definition: qsvdec.c:569
QSVFrame * work_frames
a linked list of frames currently being used by QSV
Definition: qsvdec.h:52
static int qsv_decode_header(AVCodecContext *avctx, QSVContext *q, AVPacket *avpkt, enum AVPixelFormat pix_fmt, mfxVideoParam *param)
Definition: qsvdec.c:257
AVPixelFormat
Pixel format.
Definition: pixfmt.h:64
This structure stores compressed data.
Definition: packet.h:332
#define AV_GET_BUFFER_FLAG_REF
The decoder will keep a reference to the frame and may reuse it later.
Definition: avcodec.h:509
mfxSession session
If non-NULL, the session to use for encoding or decoding.
Definition: qsv.h:41
mfxSession session
Definition: qsv_internal.h:84
int64_t pts
Presentation timestamp in AVStream->time_base units; the time at which the decompressed packet will b...
Definition: packet.h:348
enum AVPixelFormat ff_qsv_map_fourcc(uint32_t fourcc)
Definition: qsv.c:192
int nb_ext_buffers
Definition: qsvdec.h:74