hwcontext_qsv.c 40 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <mfx/mfxvideo.h>
  21. #include "config.h"
  22. #if HAVE_PTHREADS
  23. #include <pthread.h>
  24. #endif
  25. #if CONFIG_VAAPI
  26. #include "hwcontext_vaapi.h"
  27. #endif
  28. #if CONFIG_DXVA2
  29. #include "hwcontext_dxva2.h"
  30. #endif
  31. #include "buffer.h"
  32. #include "common.h"
  33. #include "hwcontext.h"
  34. #include "hwcontext_internal.h"
  35. #include "hwcontext_qsv.h"
  36. #include "mem.h"
  37. #include "pixfmt.h"
  38. #include "pixdesc.h"
  39. #include "time.h"
  40. typedef struct QSVDevicePriv {
  41. AVBufferRef *child_device_ctx;
  42. } QSVDevicePriv;
  43. typedef struct QSVDeviceContext {
  44. mfxHDL handle;
  45. mfxHandleType handle_type;
  46. mfxVersion ver;
  47. mfxIMPL impl;
  48. enum AVHWDeviceType child_device_type;
  49. enum AVPixelFormat child_pix_fmt;
  50. } QSVDeviceContext;
  51. typedef struct QSVFramesContext {
  52. mfxSession session_download;
  53. int session_download_init;
  54. mfxSession session_upload;
  55. int session_upload_init;
  56. #if HAVE_PTHREADS
  57. pthread_mutex_t session_lock;
  58. pthread_cond_t session_cond;
  59. #endif
  60. AVBufferRef *child_frames_ref;
  61. mfxFrameSurface1 *surfaces_internal;
  62. int nb_surfaces_used;
  63. // used in the frame allocator for non-opaque surfaces
  64. mfxMemId *mem_ids;
  65. // used in the opaque alloc request for opaque surfaces
  66. mfxFrameSurface1 **surface_ptrs;
  67. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  68. mfxExtBuffer *ext_buffers[1];
  69. } QSVFramesContext;
  70. static const struct {
  71. mfxHandleType handle_type;
  72. enum AVHWDeviceType device_type;
  73. enum AVPixelFormat pix_fmt;
  74. } supported_handle_types[] = {
  75. #if CONFIG_VAAPI
  76. { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
  77. #endif
  78. #if CONFIG_DXVA2
  79. { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
  80. #endif
  81. { 0 },
  82. };
  83. static const struct {
  84. enum AVPixelFormat pix_fmt;
  85. uint32_t fourcc;
  86. } supported_pixel_formats[] = {
  87. { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
  88. { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
  89. { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
  90. { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
  91. };
  92. static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
  93. {
  94. int i;
  95. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
  96. if (supported_pixel_formats[i].pix_fmt == pix_fmt)
  97. return supported_pixel_formats[i].fourcc;
  98. }
  99. return 0;
  100. }
  101. static int qsv_device_init(AVHWDeviceContext *ctx)
  102. {
  103. AVQSVDeviceContext *hwctx = ctx->hwctx;
  104. QSVDeviceContext *s = ctx->internal->priv;
  105. mfxStatus err;
  106. int i;
  107. for (i = 0; supported_handle_types[i].handle_type; i++) {
  108. err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
  109. &s->handle);
  110. if (err == MFX_ERR_NONE) {
  111. s->handle_type = supported_handle_types[i].handle_type;
  112. s->child_device_type = supported_handle_types[i].device_type;
  113. s->child_pix_fmt = supported_handle_types[i].pix_fmt;
  114. break;
  115. }
  116. }
  117. if (!s->handle) {
  118. av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
  119. "from the session\n");
  120. }
  121. err = MFXQueryIMPL(hwctx->session, &s->impl);
  122. if (err == MFX_ERR_NONE)
  123. err = MFXQueryVersion(hwctx->session, &s->ver);
  124. if (err != MFX_ERR_NONE) {
  125. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  126. return AVERROR_UNKNOWN;
  127. }
  128. return 0;
  129. }
  130. static void qsv_frames_uninit(AVHWFramesContext *ctx)
  131. {
  132. QSVFramesContext *s = ctx->internal->priv;
  133. if (s->session_download) {
  134. MFXVideoVPP_Close(s->session_download);
  135. MFXClose(s->session_download);
  136. }
  137. s->session_download = NULL;
  138. s->session_download_init = 0;
  139. if (s->session_upload) {
  140. MFXVideoVPP_Close(s->session_upload);
  141. MFXClose(s->session_upload);
  142. }
  143. s->session_upload = NULL;
  144. s->session_upload_init = 0;
  145. #if HAVE_PTHREADS
  146. pthread_mutex_destroy(&s->session_lock);
  147. pthread_cond_destroy(&s->session_cond);
  148. #endif
  149. av_freep(&s->mem_ids);
  150. av_freep(&s->surface_ptrs);
  151. av_freep(&s->surfaces_internal);
  152. av_buffer_unref(&s->child_frames_ref);
  153. }
  154. static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
  155. {
  156. }
  157. static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
  158. {
  159. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  160. QSVFramesContext *s = ctx->internal->priv;
  161. AVQSVFramesContext *hwctx = ctx->hwctx;
  162. if (s->nb_surfaces_used < hwctx->nb_surfaces) {
  163. s->nb_surfaces_used++;
  164. return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
  165. sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
  166. }
  167. return NULL;
  168. }
  169. static int qsv_init_child_ctx(AVHWFramesContext *ctx)
  170. {
  171. AVQSVFramesContext *hwctx = ctx->hwctx;
  172. QSVFramesContext *s = ctx->internal->priv;
  173. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  174. AVBufferRef *child_device_ref = NULL;
  175. AVBufferRef *child_frames_ref = NULL;
  176. AVHWDeviceContext *child_device_ctx;
  177. AVHWFramesContext *child_frames_ctx;
  178. int i, ret = 0;
  179. if (!device_priv->handle) {
  180. av_log(ctx, AV_LOG_ERROR,
  181. "Cannot create a non-opaque internal surface pool without "
  182. "a hardware handle\n");
  183. return AVERROR(EINVAL);
  184. }
  185. child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
  186. if (!child_device_ref)
  187. return AVERROR(ENOMEM);
  188. child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
  189. #if CONFIG_VAAPI
  190. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  191. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  192. child_device_hwctx->display = (VADisplay)device_priv->handle;
  193. }
  194. #endif
  195. #if CONFIG_DXVA2
  196. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  197. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  198. child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
  199. }
  200. #endif
  201. ret = av_hwdevice_ctx_init(child_device_ref);
  202. if (ret < 0) {
  203. av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
  204. goto fail;
  205. }
  206. child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
  207. if (!child_frames_ref) {
  208. ret = AVERROR(ENOMEM);
  209. goto fail;
  210. }
  211. child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
  212. child_frames_ctx->format = device_priv->child_pix_fmt;
  213. child_frames_ctx->sw_format = ctx->sw_format;
  214. child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
  215. child_frames_ctx->width = FFALIGN(ctx->width, 16);
  216. child_frames_ctx->height = FFALIGN(ctx->height, 16);
  217. #if CONFIG_DXVA2
  218. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  219. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  220. if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
  221. child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  222. else
  223. child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  224. }
  225. #endif
  226. ret = av_hwframe_ctx_init(child_frames_ref);
  227. if (ret < 0) {
  228. av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
  229. goto fail;
  230. }
  231. #if CONFIG_VAAPI
  232. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  233. AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  234. for (i = 0; i < ctx->initial_pool_size; i++)
  235. s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
  236. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  237. }
  238. #endif
  239. #if CONFIG_DXVA2
  240. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  241. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  242. for (i = 0; i < ctx->initial_pool_size; i++)
  243. s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
  244. if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  245. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  246. else
  247. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  248. }
  249. #endif
  250. s->child_frames_ref = child_frames_ref;
  251. child_frames_ref = NULL;
  252. fail:
  253. av_buffer_unref(&child_device_ref);
  254. av_buffer_unref(&child_frames_ref);
  255. return ret;
  256. }
  257. static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
  258. {
  259. const AVPixFmtDescriptor *desc;
  260. uint32_t fourcc;
  261. desc = av_pix_fmt_desc_get(ctx->sw_format);
  262. if (!desc)
  263. return AVERROR(EINVAL);
  264. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  265. if (!fourcc)
  266. return AVERROR(EINVAL);
  267. surf->Info.BitDepthLuma = desc->comp[0].depth;
  268. surf->Info.BitDepthChroma = desc->comp[0].depth;
  269. surf->Info.Shift = desc->comp[0].depth > 8;
  270. if (desc->log2_chroma_w && desc->log2_chroma_h)
  271. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  272. else if (desc->log2_chroma_w)
  273. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  274. else
  275. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  276. surf->Info.FourCC = fourcc;
  277. surf->Info.Width = FFALIGN(ctx->width, 16);
  278. surf->Info.CropW = ctx->width;
  279. surf->Info.Height = FFALIGN(ctx->height, 16);
  280. surf->Info.CropH = ctx->height;
  281. surf->Info.FrameRateExtN = 25;
  282. surf->Info.FrameRateExtD = 1;
  283. surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  284. return 0;
  285. }
  286. static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
  287. {
  288. QSVFramesContext *s = ctx->internal->priv;
  289. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  290. int i, ret = 0;
  291. if (ctx->initial_pool_size <= 0) {
  292. av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
  293. return AVERROR(EINVAL);
  294. }
  295. s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
  296. sizeof(*s->surfaces_internal));
  297. if (!s->surfaces_internal)
  298. return AVERROR(ENOMEM);
  299. for (i = 0; i < ctx->initial_pool_size; i++) {
  300. ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
  301. if (ret < 0)
  302. return ret;
  303. }
  304. if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
  305. ret = qsv_init_child_ctx(ctx);
  306. if (ret < 0)
  307. return ret;
  308. }
  309. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
  310. ctx, qsv_pool_alloc, NULL);
  311. if (!ctx->internal->pool_internal)
  312. return AVERROR(ENOMEM);
  313. frames_hwctx->surfaces = s->surfaces_internal;
  314. frames_hwctx->nb_surfaces = ctx->initial_pool_size;
  315. return 0;
  316. }
  317. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  318. mfxFrameAllocResponse *resp)
  319. {
  320. AVHWFramesContext *ctx = pthis;
  321. QSVFramesContext *s = ctx->internal->priv;
  322. AVQSVFramesContext *hwctx = ctx->hwctx;
  323. mfxFrameInfo *i = &req->Info;
  324. mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
  325. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  326. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  327. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  328. return MFX_ERR_UNSUPPORTED;
  329. if (i->Width > i1->Width || i->Height > i1->Height ||
  330. i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
  331. av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
  332. "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
  333. i->Width, i->Height, i->FourCC, i->ChromaFormat,
  334. i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
  335. return MFX_ERR_UNSUPPORTED;
  336. }
  337. resp->mids = s->mem_ids;
  338. resp->NumFrameActual = hwctx->nb_surfaces;
  339. return MFX_ERR_NONE;
  340. }
  341. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  342. {
  343. return MFX_ERR_NONE;
  344. }
  345. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  346. {
  347. return MFX_ERR_UNSUPPORTED;
  348. }
  349. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  350. {
  351. return MFX_ERR_UNSUPPORTED;
  352. }
  353. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  354. {
  355. *hdl = mid;
  356. return MFX_ERR_NONE;
  357. }
  358. static int qsv_init_internal_session(AVHWFramesContext *ctx,
  359. mfxSession *session, int upload)
  360. {
  361. QSVFramesContext *s = ctx->internal->priv;
  362. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  363. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  364. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  365. mfxFrameAllocator frame_allocator = {
  366. .pthis = ctx,
  367. .Alloc = frame_alloc,
  368. .Lock = frame_lock,
  369. .Unlock = frame_unlock,
  370. .GetHDL = frame_get_hdl,
  371. .Free = frame_free,
  372. };
  373. mfxVideoParam par;
  374. mfxStatus err;
  375. err = MFXInit(device_priv->impl, &device_priv->ver, session);
  376. if (err != MFX_ERR_NONE) {
  377. av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
  378. return AVERROR_UNKNOWN;
  379. }
  380. if (device_priv->handle) {
  381. err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
  382. device_priv->handle);
  383. if (err != MFX_ERR_NONE)
  384. return AVERROR_UNKNOWN;
  385. }
  386. if (!opaque) {
  387. err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
  388. if (err != MFX_ERR_NONE)
  389. return AVERROR_UNKNOWN;
  390. }
  391. memset(&par, 0, sizeof(par));
  392. if (opaque) {
  393. par.ExtParam = s->ext_buffers;
  394. par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
  395. par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
  396. MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  397. } else {
  398. par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
  399. MFX_IOPATTERN_IN_VIDEO_MEMORY;
  400. }
  401. par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
  402. MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  403. par.AsyncDepth = 1;
  404. par.vpp.In = frames_hwctx->surfaces[0].Info;
  405. /* Apparently VPP requires the frame rate to be set to some value, otherwise
  406. * init will fail (probably for the framerate conversion filter). Since we
  407. * are only doing data upload/download here, we just invent an arbitrary
  408. * value */
  409. par.vpp.In.FrameRateExtN = 25;
  410. par.vpp.In.FrameRateExtD = 1;
  411. par.vpp.Out = par.vpp.In;
  412. err = MFXVideoVPP_Init(*session, &par);
  413. if (err != MFX_ERR_NONE) {
  414. av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
  415. "Surface upload/download will not be possible\n");
  416. MFXClose(*session);
  417. *session = NULL;
  418. }
  419. return 0;
  420. }
  421. static int qsv_frames_init(AVHWFramesContext *ctx)
  422. {
  423. QSVFramesContext *s = ctx->internal->priv;
  424. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  425. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  426. uint32_t fourcc;
  427. int i, ret;
  428. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  429. if (!fourcc) {
  430. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
  431. return AVERROR(ENOSYS);
  432. }
  433. if (!ctx->pool) {
  434. ret = qsv_init_pool(ctx, fourcc);
  435. if (ret < 0) {
  436. av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
  437. return ret;
  438. }
  439. }
  440. if (opaque) {
  441. s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
  442. sizeof(*s->surface_ptrs));
  443. if (!s->surface_ptrs)
  444. return AVERROR(ENOMEM);
  445. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  446. s->surface_ptrs[i] = frames_hwctx->surfaces + i;
  447. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  448. s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
  449. s->opaque_alloc.In.Type = frames_hwctx->frame_type;
  450. s->opaque_alloc.Out = s->opaque_alloc.In;
  451. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  452. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  453. s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
  454. } else {
  455. s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
  456. if (!s->mem_ids)
  457. return AVERROR(ENOMEM);
  458. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  459. s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
  460. }
  461. s->session_download = NULL;
  462. s->session_upload = NULL;
  463. s->session_download_init = 0;
  464. s->session_upload_init = 0;
  465. #if HAVE_PTHREADS
  466. pthread_mutex_init(&s->session_lock, NULL);
  467. pthread_cond_init(&s->session_cond, NULL);
  468. #endif
  469. return 0;
  470. }
  471. static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  472. {
  473. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  474. if (!frame->buf[0])
  475. return AVERROR(ENOMEM);
  476. frame->data[3] = frame->buf[0]->data;
  477. frame->format = AV_PIX_FMT_QSV;
  478. frame->width = ctx->width;
  479. frame->height = ctx->height;
  480. return 0;
  481. }
  482. static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
  483. enum AVHWFrameTransferDirection dir,
  484. enum AVPixelFormat **formats)
  485. {
  486. enum AVPixelFormat *fmts;
  487. fmts = av_malloc_array(2, sizeof(*fmts));
  488. if (!fmts)
  489. return AVERROR(ENOMEM);
  490. fmts[0] = ctx->sw_format;
  491. fmts[1] = AV_PIX_FMT_NONE;
  492. *formats = fmts;
  493. return 0;
  494. }
  495. static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
  496. AVHWFramesContext *src_ctx, int flags)
  497. {
  498. AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
  499. int i;
  500. switch (dst_ctx->device_ctx->type) {
  501. #if CONFIG_VAAPI
  502. case AV_HWDEVICE_TYPE_VAAPI:
  503. {
  504. AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
  505. dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
  506. sizeof(*dst_hwctx->surface_ids));
  507. if (!dst_hwctx->surface_ids)
  508. return AVERROR(ENOMEM);
  509. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  510. dst_hwctx->surface_ids[i] =
  511. *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
  512. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  513. }
  514. break;
  515. #endif
  516. #if CONFIG_DXVA2
  517. case AV_HWDEVICE_TYPE_DXVA2:
  518. {
  519. AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
  520. dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
  521. sizeof(*dst_hwctx->surfaces));
  522. if (!dst_hwctx->surfaces)
  523. return AVERROR(ENOMEM);
  524. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  525. dst_hwctx->surfaces[i] =
  526. (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
  527. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  528. if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
  529. dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  530. else
  531. dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  532. }
  533. break;
  534. #endif
  535. default:
  536. return AVERROR(ENOSYS);
  537. }
  538. return 0;
  539. }
  540. static int qsv_map_from(AVHWFramesContext *ctx,
  541. AVFrame *dst, const AVFrame *src, int flags)
  542. {
  543. QSVFramesContext *s = ctx->internal->priv;
  544. mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
  545. AVHWFramesContext *child_frames_ctx;
  546. const AVPixFmtDescriptor *desc;
  547. uint8_t *child_data;
  548. AVFrame *dummy;
  549. int ret = 0;
  550. if (!s->child_frames_ref)
  551. return AVERROR(ENOSYS);
  552. child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  553. switch (child_frames_ctx->device_ctx->type) {
  554. #if CONFIG_VAAPI
  555. case AV_HWDEVICE_TYPE_VAAPI:
  556. child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
  557. break;
  558. #endif
  559. #if CONFIG_DXVA2
  560. case AV_HWDEVICE_TYPE_DXVA2:
  561. child_data = surf->Data.MemId;
  562. break;
  563. #endif
  564. default:
  565. return AVERROR(ENOSYS);
  566. }
  567. if (dst->format == child_frames_ctx->format) {
  568. ret = ff_hwframe_map_create(s->child_frames_ref,
  569. dst, src, NULL, NULL);
  570. if (ret < 0)
  571. return ret;
  572. dst->width = src->width;
  573. dst->height = src->height;
  574. dst->data[3] = child_data;
  575. return 0;
  576. }
  577. desc = av_pix_fmt_desc_get(dst->format);
  578. if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
  579. // This only supports mapping to software.
  580. return AVERROR(ENOSYS);
  581. }
  582. dummy = av_frame_alloc();
  583. if (!dummy)
  584. return AVERROR(ENOMEM);
  585. dummy->buf[0] = av_buffer_ref(src->buf[0]);
  586. dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
  587. if (!dummy->buf[0] || !dummy->hw_frames_ctx)
  588. goto fail;
  589. dummy->format = child_frames_ctx->format;
  590. dummy->width = src->width;
  591. dummy->height = src->height;
  592. dummy->data[3] = child_data;
  593. ret = av_hwframe_map(dst, dummy, flags);
  594. fail:
  595. av_frame_free(&dummy);
  596. return ret;
  597. }
  598. static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
  599. const AVFrame *src)
  600. {
  601. QSVFramesContext *s = ctx->internal->priv;
  602. AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  603. int download = !!src->hw_frames_ctx;
  604. mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
  605. AVFrame *dummy;
  606. int ret;
  607. dummy = av_frame_alloc();
  608. if (!dummy)
  609. return AVERROR(ENOMEM);
  610. dummy->format = child_frames_ctx->format;
  611. dummy->width = src->width;
  612. dummy->height = src->height;
  613. dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
  614. dummy->data[3] = surf->Data.MemId;
  615. dummy->hw_frames_ctx = s->child_frames_ref;
  616. ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
  617. av_hwframe_transfer_data(dummy, src, 0);
  618. dummy->buf[0] = NULL;
  619. dummy->data[3] = NULL;
  620. dummy->hw_frames_ctx = NULL;
  621. av_frame_free(&dummy);
  622. return ret;
  623. }
  624. static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
  625. {
  626. switch (frame->format) {
  627. case AV_PIX_FMT_NV12:
  628. case AV_PIX_FMT_P010:
  629. surface->Data.Y = frame->data[0];
  630. surface->Data.UV = frame->data[1];
  631. break;
  632. case AV_PIX_FMT_YUV420P:
  633. surface->Data.Y = frame->data[0];
  634. surface->Data.U = frame->data[1];
  635. surface->Data.V = frame->data[2];
  636. break;
  637. case AV_PIX_FMT_BGRA:
  638. surface->Data.B = frame->data[0];
  639. surface->Data.G = frame->data[0] + 1;
  640. surface->Data.R = frame->data[0] + 2;
  641. surface->Data.A = frame->data[0] + 3;
  642. break;
  643. default:
  644. return MFX_ERR_UNSUPPORTED;
  645. }
  646. surface->Data.Pitch = frame->linesize[0];
  647. surface->Data.TimeStamp = frame->pts;
  648. return 0;
  649. }
  650. static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  651. const AVFrame *src)
  652. {
  653. QSVFramesContext *s = ctx->internal->priv;
  654. mfxFrameSurface1 out = {{ 0 }};
  655. mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
  656. mfxSyncPoint sync = NULL;
  657. mfxStatus err;
  658. int ret = 0;
  659. while (!s->session_download_init && !s->session_download && !ret) {
  660. #if HAVE_PTHREADS
  661. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  662. #endif
  663. if (!s->session_download_init) {
  664. ret = qsv_init_internal_session(ctx, &s->session_download, 0);
  665. if (s->session_download)
  666. s->session_download_init = 1;
  667. }
  668. #if HAVE_PTHREADS
  669. pthread_mutex_unlock(&s->session_lock);
  670. pthread_cond_signal(&s->session_cond);
  671. } else {
  672. pthread_mutex_lock(&s->session_lock);
  673. while (!s->session_download_init && !s->session_download) {
  674. pthread_cond_wait(&s->session_cond, &s->session_lock);
  675. }
  676. pthread_mutex_unlock(&s->session_lock);
  677. }
  678. #endif
  679. }
  680. if (ret < 0)
  681. return ret;
  682. if (!s->session_download) {
  683. if (s->child_frames_ref)
  684. return qsv_transfer_data_child(ctx, dst, src);
  685. av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
  686. return AVERROR(ENOSYS);
  687. }
  688. out.Info = in->Info;
  689. map_frame_to_surface(dst, &out);
  690. do {
  691. err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
  692. if (err == MFX_WRN_DEVICE_BUSY)
  693. av_usleep(1);
  694. } while (err == MFX_WRN_DEVICE_BUSY);
  695. if (err < 0 || !sync) {
  696. av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
  697. return AVERROR_UNKNOWN;
  698. }
  699. do {
  700. err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
  701. } while (err == MFX_WRN_IN_EXECUTION);
  702. if (err < 0) {
  703. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  704. return AVERROR_UNKNOWN;
  705. }
  706. return 0;
  707. }
  708. static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  709. const AVFrame *src)
  710. {
  711. QSVFramesContext *s = ctx->internal->priv;
  712. mfxFrameSurface1 in = {{ 0 }};
  713. mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
  714. mfxSyncPoint sync = NULL;
  715. mfxStatus err;
  716. int ret = 0;
  717. /* make a copy if the input is not padded as libmfx requires */
  718. AVFrame tmp_frame;
  719. const AVFrame *src_frame;
  720. int realigned = 0;
  721. while (!s->session_upload_init && !s->session_upload && !ret) {
  722. #if HAVE_PTHREADS
  723. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  724. #endif
  725. if (!s->session_upload_init) {
  726. ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
  727. if (s->session_upload)
  728. s->session_upload_init = 1;
  729. }
  730. #if HAVE_PTHREADS
  731. pthread_mutex_unlock(&s->session_lock);
  732. pthread_cond_signal(&s->session_cond);
  733. } else {
  734. pthread_mutex_lock(&s->session_lock);
  735. while (!s->session_upload_init && !s->session_upload) {
  736. pthread_cond_wait(&s->session_cond, &s->session_lock);
  737. }
  738. pthread_mutex_unlock(&s->session_lock);
  739. }
  740. #endif
  741. }
  742. if (ret < 0)
  743. return ret;
  744. if (src->height & 15 || src->linesize[0] & 15) {
  745. realigned = 1;
  746. memset(&tmp_frame, 0, sizeof(tmp_frame));
  747. tmp_frame.format = src->format;
  748. tmp_frame.width = FFALIGN(src->width, 16);
  749. tmp_frame.height = FFALIGN(src->height, 16);
  750. ret = av_frame_get_buffer(&tmp_frame, 32);
  751. if (ret < 0)
  752. return ret;
  753. ret = av_frame_copy(&tmp_frame, src);
  754. if (ret < 0) {
  755. av_frame_unref(&tmp_frame);
  756. return ret;
  757. }
  758. }
  759. src_frame = realigned ? &tmp_frame : src;
  760. if (!s->session_upload) {
  761. if (s->child_frames_ref)
  762. return qsv_transfer_data_child(ctx, dst, src_frame);
  763. av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
  764. return AVERROR(ENOSYS);
  765. }
  766. in.Info = out->Info;
  767. map_frame_to_surface(src_frame, &in);
  768. do {
  769. err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
  770. if (err == MFX_WRN_DEVICE_BUSY)
  771. av_usleep(1);
  772. } while (err == MFX_WRN_DEVICE_BUSY);
  773. if (err < 0 || !sync) {
  774. av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
  775. return AVERROR_UNKNOWN;
  776. }
  777. do {
  778. err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
  779. } while (err == MFX_WRN_IN_EXECUTION);
  780. if (err < 0) {
  781. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
  782. return AVERROR_UNKNOWN;
  783. }
  784. if (realigned)
  785. av_frame_unref(&tmp_frame);
  786. return 0;
  787. }
  788. static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
  789. AVHWFramesContext *src_ctx, int flags)
  790. {
  791. QSVFramesContext *s = dst_ctx->internal->priv;
  792. AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
  793. int i;
  794. switch (src_ctx->device_ctx->type) {
  795. #if CONFIG_VAAPI
  796. case AV_HWDEVICE_TYPE_VAAPI:
  797. {
  798. AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
  799. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  800. sizeof(*s->surfaces_internal));
  801. if (!s->surfaces_internal)
  802. return AVERROR(ENOMEM);
  803. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  804. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  805. s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
  806. }
  807. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  808. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  809. }
  810. break;
  811. #endif
  812. #if CONFIG_DXVA2
  813. case AV_HWDEVICE_TYPE_DXVA2:
  814. {
  815. AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
  816. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  817. sizeof(*s->surfaces_internal));
  818. if (!s->surfaces_internal)
  819. return AVERROR(ENOMEM);
  820. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  821. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  822. s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
  823. }
  824. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  825. if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  826. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  827. else
  828. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  829. }
  830. break;
  831. #endif
  832. default:
  833. return AVERROR(ENOSYS);
  834. }
  835. dst_hwctx->surfaces = s->surfaces_internal;
  836. return 0;
  837. }
  838. static int qsv_map_to(AVHWFramesContext *dst_ctx,
  839. AVFrame *dst, const AVFrame *src, int flags)
  840. {
  841. AVQSVFramesContext *hwctx = dst_ctx->hwctx;
  842. int i, err;
  843. for (i = 0; i < hwctx->nb_surfaces; i++) {
  844. #if CONFIG_VAAPI
  845. if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
  846. (VASurfaceID)(uintptr_t)src->data[3])
  847. break;
  848. #endif
  849. #if CONFIG_DXVA2
  850. if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
  851. (IDirect3DSurface9*)(uintptr_t)src->data[3])
  852. break;
  853. #endif
  854. }
  855. if (i >= hwctx->nb_surfaces) {
  856. av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
  857. "is not in the mapped frames context.\n");
  858. return AVERROR(EINVAL);
  859. }
  860. err = ff_hwframe_map_create(dst->hw_frames_ctx,
  861. dst, src, NULL, NULL);
  862. if (err)
  863. return err;
  864. dst->width = src->width;
  865. dst->height = src->height;
  866. dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
  867. return 0;
  868. }
  869. static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
  870. const void *hwconfig,
  871. AVHWFramesConstraints *constraints)
  872. {
  873. int i;
  874. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
  875. sizeof(*constraints->valid_sw_formats));
  876. if (!constraints->valid_sw_formats)
  877. return AVERROR(ENOMEM);
  878. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
  879. constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
  880. constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
  881. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  882. if (!constraints->valid_hw_formats)
  883. return AVERROR(ENOMEM);
  884. constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
  885. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  886. return 0;
  887. }
  888. static void qsv_device_free(AVHWDeviceContext *ctx)
  889. {
  890. AVQSVDeviceContext *hwctx = ctx->hwctx;
  891. QSVDevicePriv *priv = ctx->user_opaque;
  892. if (hwctx->session)
  893. MFXClose(hwctx->session);
  894. av_buffer_unref(&priv->child_device_ctx);
  895. av_freep(&priv);
  896. }
  897. static mfxIMPL choose_implementation(const char *device)
  898. {
  899. static const struct {
  900. const char *name;
  901. mfxIMPL impl;
  902. } impl_map[] = {
  903. { "auto", MFX_IMPL_AUTO },
  904. { "sw", MFX_IMPL_SOFTWARE },
  905. { "hw", MFX_IMPL_HARDWARE },
  906. { "auto_any", MFX_IMPL_AUTO_ANY },
  907. { "hw_any", MFX_IMPL_HARDWARE_ANY },
  908. { "hw2", MFX_IMPL_HARDWARE2 },
  909. { "hw3", MFX_IMPL_HARDWARE3 },
  910. { "hw4", MFX_IMPL_HARDWARE4 },
  911. };
  912. mfxIMPL impl = MFX_IMPL_AUTO_ANY;
  913. int i;
  914. if (device) {
  915. for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
  916. if (!strcmp(device, impl_map[i].name)) {
  917. impl = impl_map[i].impl;
  918. break;
  919. }
  920. if (i == FF_ARRAY_ELEMS(impl_map))
  921. impl = strtol(device, NULL, 0);
  922. }
  923. return impl;
  924. }
  925. static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
  926. mfxIMPL implementation,
  927. AVHWDeviceContext *child_device_ctx,
  928. int flags)
  929. {
  930. AVQSVDeviceContext *hwctx = ctx->hwctx;
  931. mfxVersion ver = { { 3, 1 } };
  932. mfxHDL handle;
  933. mfxHandleType handle_type;
  934. mfxStatus err;
  935. int ret;
  936. switch (child_device_ctx->type) {
  937. #if CONFIG_VAAPI
  938. case AV_HWDEVICE_TYPE_VAAPI:
  939. {
  940. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  941. handle_type = MFX_HANDLE_VA_DISPLAY;
  942. handle = (mfxHDL)child_device_hwctx->display;
  943. }
  944. break;
  945. #endif
  946. #if CONFIG_DXVA2
  947. case AV_HWDEVICE_TYPE_DXVA2:
  948. {
  949. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  950. handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
  951. handle = (mfxHDL)child_device_hwctx->devmgr;
  952. }
  953. break;
  954. #endif
  955. default:
  956. ret = AVERROR(ENOSYS);
  957. goto fail;
  958. }
  959. err = MFXInit(implementation, &ver, &hwctx->session);
  960. if (err != MFX_ERR_NONE) {
  961. av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
  962. "%d.\n", err);
  963. ret = AVERROR_UNKNOWN;
  964. goto fail;
  965. }
  966. err = MFXQueryVersion(hwctx->session, &ver);
  967. if (err != MFX_ERR_NONE) {
  968. av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
  969. ret = AVERROR_UNKNOWN;
  970. goto fail;
  971. }
  972. av_log(ctx, AV_LOG_VERBOSE,
  973. "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
  974. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  975. MFXClose(hwctx->session);
  976. err = MFXInit(implementation, &ver, &hwctx->session);
  977. if (err != MFX_ERR_NONE) {
  978. av_log(ctx, AV_LOG_ERROR,
  979. "Error initializing an MFX session: %d.\n", err);
  980. ret = AVERROR_UNKNOWN;
  981. goto fail;
  982. }
  983. err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
  984. if (err != MFX_ERR_NONE) {
  985. av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
  986. "%d\n", err);
  987. ret = AVERROR_UNKNOWN;
  988. goto fail;
  989. }
  990. ret = MFXQueryVersion(hwctx->session,&ver);
  991. if (ret == MFX_ERR_NONE) {
  992. av_log(ctx, AV_LOG_VERBOSE, "MFX compile/runtime API: %d.%d/%d.%d\n",
  993. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  994. }
  995. return 0;
  996. fail:
  997. if (hwctx->session)
  998. MFXClose(hwctx->session);
  999. return ret;
  1000. }
  1001. static int qsv_device_derive(AVHWDeviceContext *ctx,
  1002. AVHWDeviceContext *child_device_ctx, int flags)
  1003. {
  1004. return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
  1005. child_device_ctx, flags);
  1006. }
  1007. static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
  1008. AVDictionary *opts, int flags)
  1009. {
  1010. QSVDevicePriv *priv;
  1011. enum AVHWDeviceType child_device_type;
  1012. AVHWDeviceContext *child_device;
  1013. AVDictionary *child_device_opts;
  1014. AVDictionaryEntry *e;
  1015. mfxIMPL impl;
  1016. int ret;
  1017. priv = av_mallocz(sizeof(*priv));
  1018. if (!priv)
  1019. return AVERROR(ENOMEM);
  1020. ctx->user_opaque = priv;
  1021. ctx->free = qsv_device_free;
  1022. e = av_dict_get(opts, "child_device", NULL, 0);
  1023. child_device_opts = NULL;
  1024. if (CONFIG_VAAPI) {
  1025. child_device_type = AV_HWDEVICE_TYPE_VAAPI;
  1026. // libmfx does not actually implement VAAPI properly, rather it
  1027. // depends on the specific behaviour of a matching iHD driver when
  1028. // used on recent Intel hardware. Set options to the VAAPI device
  1029. // creation so that we should pick a usable setup by default if
  1030. // possible, even when multiple devices and drivers are available.
  1031. av_dict_set(&child_device_opts, "kernel_driver", "i915", 0);
  1032. av_dict_set(&child_device_opts, "driver", "iHD", 0);
  1033. } else if (CONFIG_DXVA2)
  1034. child_device_type = AV_HWDEVICE_TYPE_DXVA2;
  1035. else {
  1036. av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
  1037. return AVERROR(ENOSYS);
  1038. }
  1039. ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
  1040. e ? e->value : NULL, child_device_opts, 0);
  1041. if (ret < 0)
  1042. return ret;
  1043. child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
  1044. impl = choose_implementation(device);
  1045. return qsv_device_derive_from_child(ctx, impl, child_device, 0);
  1046. }
  1047. const HWContextType ff_hwcontext_type_qsv = {
  1048. .type = AV_HWDEVICE_TYPE_QSV,
  1049. .name = "QSV",
  1050. .device_hwctx_size = sizeof(AVQSVDeviceContext),
  1051. .device_priv_size = sizeof(QSVDeviceContext),
  1052. .frames_hwctx_size = sizeof(AVQSVFramesContext),
  1053. .frames_priv_size = sizeof(QSVFramesContext),
  1054. .device_create = qsv_device_create,
  1055. .device_derive = qsv_device_derive,
  1056. .device_init = qsv_device_init,
  1057. .frames_get_constraints = qsv_frames_get_constraints,
  1058. .frames_init = qsv_frames_init,
  1059. .frames_uninit = qsv_frames_uninit,
  1060. .frames_get_buffer = qsv_get_buffer,
  1061. .transfer_get_formats = qsv_transfer_get_formats,
  1062. .transfer_data_to = qsv_transfer_data_to,
  1063. .transfer_data_from = qsv_transfer_data_from,
  1064. .map_to = qsv_map_to,
  1065. .map_from = qsv_map_from,
  1066. .frames_derive_to = qsv_frames_derive_to,
  1067. .frames_derive_from = qsv_frames_derive_from,
  1068. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
  1069. };