2
0

hwcontext_qsv.c 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269
  1. /*
  2. * This file is part of FFmpeg.
  3. *
  4. * FFmpeg is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * FFmpeg is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with FFmpeg; if not, write to the Free Software
  16. * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
  17. */
  18. #include <stdint.h>
  19. #include <string.h>
  20. #include <mfx/mfxvideo.h>
  21. #include "config.h"
  22. #if HAVE_PTHREADS
  23. #include <pthread.h>
  24. #endif
  25. #if CONFIG_VAAPI
  26. #include "hwcontext_vaapi.h"
  27. #endif
  28. #if CONFIG_DXVA2
  29. #include "hwcontext_dxva2.h"
  30. #endif
  31. #include "buffer.h"
  32. #include "common.h"
  33. #include "hwcontext.h"
  34. #include "hwcontext_internal.h"
  35. #include "hwcontext_qsv.h"
  36. #include "mem.h"
  37. #include "pixfmt.h"
  38. #include "pixdesc.h"
  39. #include "time.h"
  40. typedef struct QSVDevicePriv {
  41. AVBufferRef *child_device_ctx;
  42. } QSVDevicePriv;
  43. typedef struct QSVDeviceContext {
  44. mfxHDL handle;
  45. mfxHandleType handle_type;
  46. mfxVersion ver;
  47. mfxIMPL impl;
  48. enum AVHWDeviceType child_device_type;
  49. enum AVPixelFormat child_pix_fmt;
  50. } QSVDeviceContext;
  51. typedef struct QSVFramesContext {
  52. mfxSession session_download;
  53. int session_download_init;
  54. mfxSession session_upload;
  55. int session_upload_init;
  56. #if HAVE_PTHREADS
  57. pthread_mutex_t session_lock;
  58. pthread_cond_t session_cond;
  59. #endif
  60. AVBufferRef *child_frames_ref;
  61. mfxFrameSurface1 *surfaces_internal;
  62. int nb_surfaces_used;
  63. // used in the frame allocator for non-opaque surfaces
  64. mfxMemId *mem_ids;
  65. // used in the opaque alloc request for opaque surfaces
  66. mfxFrameSurface1 **surface_ptrs;
  67. mfxExtOpaqueSurfaceAlloc opaque_alloc;
  68. mfxExtBuffer *ext_buffers[1];
  69. } QSVFramesContext;
  70. static const struct {
  71. mfxHandleType handle_type;
  72. enum AVHWDeviceType device_type;
  73. enum AVPixelFormat pix_fmt;
  74. } supported_handle_types[] = {
  75. #if CONFIG_VAAPI
  76. { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
  77. #endif
  78. #if CONFIG_DXVA2
  79. { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
  80. #endif
  81. { 0 },
  82. };
  83. static const struct {
  84. enum AVPixelFormat pix_fmt;
  85. uint32_t fourcc;
  86. } supported_pixel_formats[] = {
  87. { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
  88. { AV_PIX_FMT_BGRA, MFX_FOURCC_RGB4 },
  89. { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
  90. { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
  91. };
  92. static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
  93. {
  94. int i;
  95. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
  96. if (supported_pixel_formats[i].pix_fmt == pix_fmt)
  97. return supported_pixel_formats[i].fourcc;
  98. }
  99. return 0;
  100. }
  101. static int qsv_device_init(AVHWDeviceContext *ctx)
  102. {
  103. AVQSVDeviceContext *hwctx = ctx->hwctx;
  104. QSVDeviceContext *s = ctx->internal->priv;
  105. mfxStatus err;
  106. int i;
  107. for (i = 0; supported_handle_types[i].handle_type; i++) {
  108. err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
  109. &s->handle);
  110. if (err == MFX_ERR_NONE) {
  111. s->handle_type = supported_handle_types[i].handle_type;
  112. s->child_device_type = supported_handle_types[i].device_type;
  113. s->child_pix_fmt = supported_handle_types[i].pix_fmt;
  114. break;
  115. }
  116. }
  117. if (!s->handle) {
  118. av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
  119. "from the session\n");
  120. }
  121. err = MFXQueryIMPL(hwctx->session, &s->impl);
  122. if (err == MFX_ERR_NONE)
  123. err = MFXQueryVersion(hwctx->session, &s->ver);
  124. if (err != MFX_ERR_NONE) {
  125. av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
  126. return AVERROR_UNKNOWN;
  127. }
  128. return 0;
  129. }
  130. static void qsv_frames_uninit(AVHWFramesContext *ctx)
  131. {
  132. QSVFramesContext *s = ctx->internal->priv;
  133. if (s->session_download) {
  134. MFXVideoVPP_Close(s->session_download);
  135. MFXClose(s->session_download);
  136. }
  137. s->session_download = NULL;
  138. s->session_download_init = 0;
  139. if (s->session_upload) {
  140. MFXVideoVPP_Close(s->session_upload);
  141. MFXClose(s->session_upload);
  142. }
  143. s->session_upload = NULL;
  144. s->session_upload_init = 0;
  145. #if HAVE_PTHREADS
  146. pthread_mutex_destroy(&s->session_lock);
  147. pthread_cond_destroy(&s->session_cond);
  148. #endif
  149. av_freep(&s->mem_ids);
  150. av_freep(&s->surface_ptrs);
  151. av_freep(&s->surfaces_internal);
  152. av_buffer_unref(&s->child_frames_ref);
  153. }
  154. static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
  155. {
  156. }
  157. static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
  158. {
  159. AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
  160. QSVFramesContext *s = ctx->internal->priv;
  161. AVQSVFramesContext *hwctx = ctx->hwctx;
  162. if (s->nb_surfaces_used < hwctx->nb_surfaces) {
  163. s->nb_surfaces_used++;
  164. return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
  165. sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
  166. }
  167. return NULL;
  168. }
  169. static int qsv_init_child_ctx(AVHWFramesContext *ctx)
  170. {
  171. AVQSVFramesContext *hwctx = ctx->hwctx;
  172. QSVFramesContext *s = ctx->internal->priv;
  173. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  174. AVBufferRef *child_device_ref = NULL;
  175. AVBufferRef *child_frames_ref = NULL;
  176. AVHWDeviceContext *child_device_ctx;
  177. AVHWFramesContext *child_frames_ctx;
  178. int i, ret = 0;
  179. if (!device_priv->handle) {
  180. av_log(ctx, AV_LOG_ERROR,
  181. "Cannot create a non-opaque internal surface pool without "
  182. "a hardware handle\n");
  183. return AVERROR(EINVAL);
  184. }
  185. child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
  186. if (!child_device_ref)
  187. return AVERROR(ENOMEM);
  188. child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
  189. #if CONFIG_VAAPI
  190. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  191. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  192. child_device_hwctx->display = (VADisplay)device_priv->handle;
  193. }
  194. #endif
  195. #if CONFIG_DXVA2
  196. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  197. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  198. child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
  199. }
  200. #endif
  201. ret = av_hwdevice_ctx_init(child_device_ref);
  202. if (ret < 0) {
  203. av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
  204. goto fail;
  205. }
  206. child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
  207. if (!child_frames_ref) {
  208. ret = AVERROR(ENOMEM);
  209. goto fail;
  210. }
  211. child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
  212. child_frames_ctx->format = device_priv->child_pix_fmt;
  213. child_frames_ctx->sw_format = ctx->sw_format;
  214. child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
  215. child_frames_ctx->width = FFALIGN(ctx->width, 16);
  216. child_frames_ctx->height = FFALIGN(ctx->height, 16);
  217. #if CONFIG_DXVA2
  218. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  219. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  220. if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
  221. child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  222. else
  223. child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  224. }
  225. #endif
  226. ret = av_hwframe_ctx_init(child_frames_ref);
  227. if (ret < 0) {
  228. av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
  229. goto fail;
  230. }
  231. #if CONFIG_VAAPI
  232. if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
  233. AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  234. for (i = 0; i < ctx->initial_pool_size; i++)
  235. s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
  236. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  237. }
  238. #endif
  239. #if CONFIG_DXVA2
  240. if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
  241. AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
  242. for (i = 0; i < ctx->initial_pool_size; i++)
  243. s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
  244. if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  245. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  246. else
  247. hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  248. }
  249. #endif
  250. s->child_frames_ref = child_frames_ref;
  251. child_frames_ref = NULL;
  252. fail:
  253. av_buffer_unref(&child_device_ref);
  254. av_buffer_unref(&child_frames_ref);
  255. return ret;
  256. }
  257. static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
  258. {
  259. const AVPixFmtDescriptor *desc;
  260. uint32_t fourcc;
  261. desc = av_pix_fmt_desc_get(ctx->sw_format);
  262. if (!desc)
  263. return AVERROR(EINVAL);
  264. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  265. if (!fourcc)
  266. return AVERROR(EINVAL);
  267. surf->Info.BitDepthLuma = desc->comp[0].depth;
  268. surf->Info.BitDepthChroma = desc->comp[0].depth;
  269. surf->Info.Shift = desc->comp[0].depth > 8;
  270. if (desc->log2_chroma_w && desc->log2_chroma_h)
  271. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
  272. else if (desc->log2_chroma_w)
  273. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
  274. else
  275. surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
  276. surf->Info.FourCC = fourcc;
  277. surf->Info.Width = FFALIGN(ctx->width, 16);
  278. surf->Info.CropW = ctx->width;
  279. surf->Info.Height = FFALIGN(ctx->height, 16);
  280. surf->Info.CropH = ctx->height;
  281. surf->Info.FrameRateExtN = 25;
  282. surf->Info.FrameRateExtD = 1;
  283. surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
  284. return 0;
  285. }
  286. static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
  287. {
  288. QSVFramesContext *s = ctx->internal->priv;
  289. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  290. int i, ret = 0;
  291. if (ctx->initial_pool_size <= 0) {
  292. av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
  293. return AVERROR(EINVAL);
  294. }
  295. s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
  296. sizeof(*s->surfaces_internal));
  297. if (!s->surfaces_internal)
  298. return AVERROR(ENOMEM);
  299. for (i = 0; i < ctx->initial_pool_size; i++) {
  300. ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
  301. if (ret < 0)
  302. return ret;
  303. }
  304. if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
  305. ret = qsv_init_child_ctx(ctx);
  306. if (ret < 0)
  307. return ret;
  308. }
  309. ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
  310. ctx, qsv_pool_alloc, NULL);
  311. if (!ctx->internal->pool_internal)
  312. return AVERROR(ENOMEM);
  313. frames_hwctx->surfaces = s->surfaces_internal;
  314. frames_hwctx->nb_surfaces = ctx->initial_pool_size;
  315. return 0;
  316. }
  317. static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
  318. mfxFrameAllocResponse *resp)
  319. {
  320. AVHWFramesContext *ctx = pthis;
  321. QSVFramesContext *s = ctx->internal->priv;
  322. AVQSVFramesContext *hwctx = ctx->hwctx;
  323. mfxFrameInfo *i = &req->Info;
  324. mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
  325. if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
  326. !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
  327. !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
  328. return MFX_ERR_UNSUPPORTED;
  329. if (i->Width != i1->Width || i->Height != i1->Height ||
  330. i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
  331. av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
  332. "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
  333. i->Width, i->Height, i->FourCC, i->ChromaFormat,
  334. i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
  335. return MFX_ERR_UNSUPPORTED;
  336. }
  337. resp->mids = s->mem_ids;
  338. resp->NumFrameActual = hwctx->nb_surfaces;
  339. return MFX_ERR_NONE;
  340. }
  341. static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
  342. {
  343. return MFX_ERR_NONE;
  344. }
  345. static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  346. {
  347. return MFX_ERR_UNSUPPORTED;
  348. }
  349. static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
  350. {
  351. return MFX_ERR_UNSUPPORTED;
  352. }
  353. static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
  354. {
  355. *hdl = mid;
  356. return MFX_ERR_NONE;
  357. }
  358. static int qsv_init_internal_session(AVHWFramesContext *ctx,
  359. mfxSession *session, int upload)
  360. {
  361. QSVFramesContext *s = ctx->internal->priv;
  362. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  363. QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
  364. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  365. mfxFrameAllocator frame_allocator = {
  366. .pthis = ctx,
  367. .Alloc = frame_alloc,
  368. .Lock = frame_lock,
  369. .Unlock = frame_unlock,
  370. .GetHDL = frame_get_hdl,
  371. .Free = frame_free,
  372. };
  373. mfxVideoParam par;
  374. mfxStatus err;
  375. err = MFXInit(device_priv->impl, &device_priv->ver, session);
  376. if (err != MFX_ERR_NONE) {
  377. av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
  378. return AVERROR_UNKNOWN;
  379. }
  380. if (device_priv->handle) {
  381. err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
  382. device_priv->handle);
  383. if (err != MFX_ERR_NONE)
  384. return AVERROR_UNKNOWN;
  385. }
  386. if (!opaque) {
  387. err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
  388. if (err != MFX_ERR_NONE)
  389. return AVERROR_UNKNOWN;
  390. }
  391. memset(&par, 0, sizeof(par));
  392. if (opaque) {
  393. par.ExtParam = s->ext_buffers;
  394. par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
  395. par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
  396. MFX_IOPATTERN_IN_OPAQUE_MEMORY;
  397. } else {
  398. par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
  399. MFX_IOPATTERN_IN_VIDEO_MEMORY;
  400. }
  401. par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
  402. MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
  403. par.AsyncDepth = 1;
  404. par.vpp.In = frames_hwctx->surfaces[0].Info;
  405. /* Apparently VPP requires the frame rate to be set to some value, otherwise
  406. * init will fail (probably for the framerate conversion filter). Since we
  407. * are only doing data upload/download here, we just invent an arbitrary
  408. * value */
  409. par.vpp.In.FrameRateExtN = 25;
  410. par.vpp.In.FrameRateExtD = 1;
  411. par.vpp.Out = par.vpp.In;
  412. err = MFXVideoVPP_Init(*session, &par);
  413. if (err != MFX_ERR_NONE) {
  414. av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
  415. "Surface upload/download will not be possible\n");
  416. MFXClose(*session);
  417. *session = NULL;
  418. }
  419. return 0;
  420. }
  421. static int qsv_frames_init(AVHWFramesContext *ctx)
  422. {
  423. QSVFramesContext *s = ctx->internal->priv;
  424. AVQSVFramesContext *frames_hwctx = ctx->hwctx;
  425. int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
  426. uint32_t fourcc;
  427. int i, ret;
  428. fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
  429. if (!fourcc) {
  430. av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
  431. return AVERROR(ENOSYS);
  432. }
  433. if (!ctx->pool) {
  434. ret = qsv_init_pool(ctx, fourcc);
  435. if (ret < 0) {
  436. av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
  437. return ret;
  438. }
  439. }
  440. if (opaque) {
  441. s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
  442. sizeof(*s->surface_ptrs));
  443. if (!s->surface_ptrs)
  444. return AVERROR(ENOMEM);
  445. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  446. s->surface_ptrs[i] = frames_hwctx->surfaces + i;
  447. s->opaque_alloc.In.Surfaces = s->surface_ptrs;
  448. s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
  449. s->opaque_alloc.In.Type = frames_hwctx->frame_type;
  450. s->opaque_alloc.Out = s->opaque_alloc.In;
  451. s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
  452. s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
  453. s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
  454. } else {
  455. s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
  456. if (!s->mem_ids)
  457. return AVERROR(ENOMEM);
  458. for (i = 0; i < frames_hwctx->nb_surfaces; i++)
  459. s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
  460. }
  461. s->session_download = NULL;
  462. s->session_upload = NULL;
  463. s->session_download_init = 0;
  464. s->session_upload_init = 0;
  465. #if HAVE_PTHREADS
  466. pthread_mutex_init(&s->session_lock, NULL);
  467. pthread_cond_init(&s->session_cond, NULL);
  468. #endif
  469. return 0;
  470. }
  471. static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
  472. {
  473. frame->buf[0] = av_buffer_pool_get(ctx->pool);
  474. if (!frame->buf[0])
  475. return AVERROR(ENOMEM);
  476. frame->data[3] = frame->buf[0]->data;
  477. frame->format = AV_PIX_FMT_QSV;
  478. frame->width = ctx->width;
  479. frame->height = ctx->height;
  480. return 0;
  481. }
  482. static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
  483. enum AVHWFrameTransferDirection dir,
  484. enum AVPixelFormat **formats)
  485. {
  486. enum AVPixelFormat *fmts;
  487. fmts = av_malloc_array(2, sizeof(*fmts));
  488. if (!fmts)
  489. return AVERROR(ENOMEM);
  490. fmts[0] = ctx->sw_format;
  491. fmts[1] = AV_PIX_FMT_NONE;
  492. *formats = fmts;
  493. return 0;
  494. }
  495. static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
  496. AVHWFramesContext *src_ctx, int flags)
  497. {
  498. AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
  499. int i;
  500. switch (dst_ctx->device_ctx->type) {
  501. #if CONFIG_VAAPI
  502. case AV_HWDEVICE_TYPE_VAAPI:
  503. {
  504. AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
  505. dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
  506. sizeof(*dst_hwctx->surface_ids));
  507. if (!dst_hwctx->surface_ids)
  508. return AVERROR(ENOMEM);
  509. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  510. dst_hwctx->surface_ids[i] =
  511. *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
  512. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  513. }
  514. break;
  515. #endif
  516. #if CONFIG_DXVA2
  517. case AV_HWDEVICE_TYPE_DXVA2:
  518. {
  519. AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
  520. dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
  521. sizeof(*dst_hwctx->surfaces));
  522. if (!dst_hwctx->surfaces)
  523. return AVERROR(ENOMEM);
  524. for (i = 0; i < src_hwctx->nb_surfaces; i++)
  525. dst_hwctx->surfaces[i] =
  526. (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
  527. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  528. if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
  529. dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
  530. else
  531. dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
  532. }
  533. break;
  534. #endif
  535. default:
  536. return AVERROR(ENOSYS);
  537. }
  538. return 0;
  539. }
  540. static int qsv_map_from(AVHWFramesContext *ctx,
  541. AVFrame *dst, const AVFrame *src, int flags)
  542. {
  543. QSVFramesContext *s = ctx->internal->priv;
  544. mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
  545. AVHWFramesContext *child_frames_ctx;
  546. const AVPixFmtDescriptor *desc;
  547. uint8_t *child_data;
  548. AVFrame *dummy;
  549. int ret = 0;
  550. if (!s->child_frames_ref)
  551. return AVERROR(ENOSYS);
  552. child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  553. switch (child_frames_ctx->device_ctx->type) {
  554. #if CONFIG_VAAPI
  555. case AV_HWDEVICE_TYPE_VAAPI:
  556. child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
  557. break;
  558. #endif
  559. #if CONFIG_DXVA2
  560. case AV_HWDEVICE_TYPE_DXVA2:
  561. child_data = surf->Data.MemId;
  562. break;
  563. #endif
  564. default:
  565. return AVERROR(ENOSYS);
  566. }
  567. if (dst->format == child_frames_ctx->format) {
  568. ret = ff_hwframe_map_create(s->child_frames_ref,
  569. dst, src, NULL, NULL);
  570. if (ret < 0)
  571. return ret;
  572. dst->width = src->width;
  573. dst->height = src->height;
  574. dst->data[3] = child_data;
  575. return 0;
  576. }
  577. desc = av_pix_fmt_desc_get(dst->format);
  578. if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
  579. // This only supports mapping to software.
  580. return AVERROR(ENOSYS);
  581. }
  582. dummy = av_frame_alloc();
  583. if (!dummy)
  584. return AVERROR(ENOMEM);
  585. dummy->buf[0] = av_buffer_ref(src->buf[0]);
  586. dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
  587. if (!dummy->buf[0] || !dummy->hw_frames_ctx)
  588. goto fail;
  589. dummy->format = child_frames_ctx->format;
  590. dummy->width = src->width;
  591. dummy->height = src->height;
  592. dummy->data[3] = child_data;
  593. ret = av_hwframe_map(dst, dummy, flags);
  594. fail:
  595. av_frame_free(&dummy);
  596. return ret;
  597. }
  598. static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
  599. const AVFrame *src)
  600. {
  601. QSVFramesContext *s = ctx->internal->priv;
  602. AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
  603. int download = !!src->hw_frames_ctx;
  604. mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
  605. AVFrame *dummy;
  606. int ret;
  607. dummy = av_frame_alloc();
  608. if (!dummy)
  609. return AVERROR(ENOMEM);
  610. dummy->format = child_frames_ctx->format;
  611. dummy->width = src->width;
  612. dummy->height = src->height;
  613. dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
  614. dummy->data[3] = surf->Data.MemId;
  615. dummy->hw_frames_ctx = s->child_frames_ref;
  616. ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
  617. av_hwframe_transfer_data(dummy, src, 0);
  618. dummy->buf[0] = NULL;
  619. dummy->data[3] = NULL;
  620. dummy->hw_frames_ctx = NULL;
  621. av_frame_free(&dummy);
  622. return ret;
  623. }
  624. static int map_frame_to_surface(const AVFrame *frame, mfxFrameSurface1 *surface)
  625. {
  626. switch (frame->format) {
  627. case AV_PIX_FMT_NV12:
  628. case AV_PIX_FMT_P010:
  629. surface->Data.Y = frame->data[0];
  630. surface->Data.UV = frame->data[1];
  631. break;
  632. case AV_PIX_FMT_YUV420P:
  633. surface->Data.Y = frame->data[0];
  634. surface->Data.U = frame->data[1];
  635. surface->Data.V = frame->data[2];
  636. break;
  637. case AV_PIX_FMT_BGRA:
  638. surface->Data.B = frame->data[0];
  639. surface->Data.G = frame->data[0] + 1;
  640. surface->Data.R = frame->data[0] + 2;
  641. surface->Data.A = frame->data[0] + 3;
  642. break;
  643. default:
  644. return MFX_ERR_UNSUPPORTED;
  645. }
  646. surface->Data.Pitch = frame->linesize[0];
  647. surface->Data.TimeStamp = frame->pts;
  648. return 0;
  649. }
  650. static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
  651. const AVFrame *src)
  652. {
  653. QSVFramesContext *s = ctx->internal->priv;
  654. mfxFrameSurface1 out = {{ 0 }};
  655. mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
  656. mfxSyncPoint sync = NULL;
  657. mfxStatus err;
  658. int ret = 0;
  659. while (!s->session_download_init && !s->session_download && !ret) {
  660. #if HAVE_PTHREADS
  661. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  662. #endif
  663. if (!s->session_download_init) {
  664. ret = qsv_init_internal_session(ctx, &s->session_download, 0);
  665. if (s->session_download)
  666. s->session_download_init = 1;
  667. }
  668. #if HAVE_PTHREADS
  669. pthread_mutex_unlock(&s->session_lock);
  670. pthread_cond_signal(&s->session_cond);
  671. } else {
  672. pthread_mutex_lock(&s->session_lock);
  673. while (!s->session_download_init && !s->session_download) {
  674. pthread_cond_wait(&s->session_cond, &s->session_lock);
  675. }
  676. pthread_mutex_unlock(&s->session_lock);
  677. }
  678. #endif
  679. }
  680. if (ret < 0)
  681. return ret;
  682. if (!s->session_download) {
  683. if (s->child_frames_ref)
  684. return qsv_transfer_data_child(ctx, dst, src);
  685. av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
  686. return AVERROR(ENOSYS);
  687. }
  688. out.Info = in->Info;
  689. map_frame_to_surface(dst, &out);
  690. do {
  691. err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
  692. if (err == MFX_WRN_DEVICE_BUSY)
  693. av_usleep(1);
  694. } while (err == MFX_WRN_DEVICE_BUSY);
  695. if (err < 0 || !sync) {
  696. av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
  697. return AVERROR_UNKNOWN;
  698. }
  699. do {
  700. err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
  701. } while (err == MFX_WRN_IN_EXECUTION);
  702. if (err < 0) {
  703. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
  704. return AVERROR_UNKNOWN;
  705. }
  706. return 0;
  707. }
  708. static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
  709. const AVFrame *src)
  710. {
  711. QSVFramesContext *s = ctx->internal->priv;
  712. mfxFrameSurface1 in = {{ 0 }};
  713. mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
  714. mfxSyncPoint sync = NULL;
  715. mfxStatus err;
  716. int ret = 0;
  717. /* make a copy if the input is not padded as libmfx requires */
  718. AVFrame tmp_frame, *src_frame;
  719. int realigned = 0;
  720. while (!s->session_upload_init && !s->session_upload && !ret) {
  721. #if HAVE_PTHREADS
  722. if (pthread_mutex_trylock(&s->session_lock) == 0) {
  723. #endif
  724. if (!s->session_upload_init) {
  725. ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
  726. if (s->session_upload)
  727. s->session_upload_init = 1;
  728. }
  729. #if HAVE_PTHREADS
  730. pthread_mutex_unlock(&s->session_lock);
  731. pthread_cond_signal(&s->session_cond);
  732. } else {
  733. pthread_mutex_lock(&s->session_lock);
  734. while (!s->session_upload_init && !s->session_upload) {
  735. pthread_cond_wait(&s->session_cond, &s->session_lock);
  736. }
  737. pthread_mutex_unlock(&s->session_lock);
  738. }
  739. #endif
  740. }
  741. if (ret < 0)
  742. return ret;
  743. if (src->height & 16 || src->linesize[0] & 16) {
  744. realigned = 1;
  745. memset(&tmp_frame, 0, sizeof(tmp_frame));
  746. tmp_frame.format = src->format;
  747. tmp_frame.width = FFALIGN(src->width, 16);
  748. tmp_frame.height = FFALIGN(src->height, 16);
  749. ret = av_frame_get_buffer(&tmp_frame, 32);
  750. if (ret < 0)
  751. return ret;
  752. ret = av_frame_copy(&tmp_frame, src);
  753. if (ret < 0) {
  754. av_frame_unref(&tmp_frame);
  755. return ret;
  756. }
  757. }
  758. src_frame = realigned ? &tmp_frame : src;
  759. if (!s->session_upload) {
  760. if (s->child_frames_ref)
  761. return qsv_transfer_data_child(ctx, dst, src_frame);
  762. av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
  763. return AVERROR(ENOSYS);
  764. }
  765. in.Info = out->Info;
  766. map_frame_to_surface(src_frame, &in);
  767. do {
  768. err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
  769. if (err == MFX_WRN_DEVICE_BUSY)
  770. av_usleep(1);
  771. } while (err == MFX_WRN_DEVICE_BUSY);
  772. if (err < 0 || !sync) {
  773. av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
  774. return AVERROR_UNKNOWN;
  775. }
  776. do {
  777. err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
  778. } while (err == MFX_WRN_IN_EXECUTION);
  779. if (err < 0) {
  780. av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
  781. return AVERROR_UNKNOWN;
  782. }
  783. if (realigned)
  784. av_frame_unref(&tmp_frame);
  785. return 0;
  786. }
  787. static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
  788. AVHWFramesContext *src_ctx, int flags)
  789. {
  790. QSVFramesContext *s = dst_ctx->internal->priv;
  791. AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
  792. int i;
  793. switch (src_ctx->device_ctx->type) {
  794. #if CONFIG_VAAPI
  795. case AV_HWDEVICE_TYPE_VAAPI:
  796. {
  797. AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
  798. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  799. sizeof(*s->surfaces_internal));
  800. if (!s->surfaces_internal)
  801. return AVERROR(ENOMEM);
  802. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  803. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  804. s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
  805. }
  806. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  807. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  808. }
  809. break;
  810. #endif
  811. #if CONFIG_DXVA2
  812. case AV_HWDEVICE_TYPE_DXVA2:
  813. {
  814. AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
  815. s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
  816. sizeof(*s->surfaces_internal));
  817. if (!s->surfaces_internal)
  818. return AVERROR(ENOMEM);
  819. for (i = 0; i < src_hwctx->nb_surfaces; i++) {
  820. qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
  821. s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
  822. }
  823. dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
  824. if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
  825. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
  826. else
  827. dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
  828. }
  829. break;
  830. #endif
  831. default:
  832. return AVERROR(ENOSYS);
  833. }
  834. dst_hwctx->surfaces = s->surfaces_internal;
  835. return 0;
  836. }
  837. static int qsv_map_to(AVHWFramesContext *dst_ctx,
  838. AVFrame *dst, const AVFrame *src, int flags)
  839. {
  840. AVQSVFramesContext *hwctx = dst_ctx->hwctx;
  841. int i, err;
  842. for (i = 0; i < hwctx->nb_surfaces; i++) {
  843. #if CONFIG_VAAPI
  844. if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
  845. (VASurfaceID)(uintptr_t)src->data[3])
  846. break;
  847. #endif
  848. #if CONFIG_DXVA2
  849. if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
  850. (IDirect3DSurface9*)(uintptr_t)src->data[3])
  851. break;
  852. #endif
  853. }
  854. if (i >= hwctx->nb_surfaces) {
  855. av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
  856. "is not in the mapped frames context.\n");
  857. return AVERROR(EINVAL);
  858. }
  859. err = ff_hwframe_map_create(dst->hw_frames_ctx,
  860. dst, src, NULL, NULL);
  861. if (err)
  862. return err;
  863. dst->width = src->width;
  864. dst->height = src->height;
  865. dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
  866. return 0;
  867. }
  868. static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
  869. const void *hwconfig,
  870. AVHWFramesConstraints *constraints)
  871. {
  872. int i;
  873. constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
  874. sizeof(*constraints->valid_sw_formats));
  875. if (!constraints->valid_sw_formats)
  876. return AVERROR(ENOMEM);
  877. for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
  878. constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
  879. constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
  880. constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
  881. if (!constraints->valid_hw_formats)
  882. return AVERROR(ENOMEM);
  883. constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
  884. constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
  885. return 0;
  886. }
  887. static void qsv_device_free(AVHWDeviceContext *ctx)
  888. {
  889. AVQSVDeviceContext *hwctx = ctx->hwctx;
  890. QSVDevicePriv *priv = ctx->user_opaque;
  891. if (hwctx->session)
  892. MFXClose(hwctx->session);
  893. av_buffer_unref(&priv->child_device_ctx);
  894. av_freep(&priv);
  895. }
  896. static mfxIMPL choose_implementation(const char *device)
  897. {
  898. static const struct {
  899. const char *name;
  900. mfxIMPL impl;
  901. } impl_map[] = {
  902. { "auto", MFX_IMPL_AUTO },
  903. { "sw", MFX_IMPL_SOFTWARE },
  904. { "hw", MFX_IMPL_HARDWARE },
  905. { "auto_any", MFX_IMPL_AUTO_ANY },
  906. { "hw_any", MFX_IMPL_HARDWARE_ANY },
  907. { "hw2", MFX_IMPL_HARDWARE2 },
  908. { "hw3", MFX_IMPL_HARDWARE3 },
  909. { "hw4", MFX_IMPL_HARDWARE4 },
  910. };
  911. mfxIMPL impl = MFX_IMPL_AUTO_ANY;
  912. int i;
  913. if (device) {
  914. for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
  915. if (!strcmp(device, impl_map[i].name)) {
  916. impl = impl_map[i].impl;
  917. break;
  918. }
  919. if (i == FF_ARRAY_ELEMS(impl_map))
  920. impl = strtol(device, NULL, 0);
  921. }
  922. return impl;
  923. }
  924. static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
  925. mfxIMPL implementation,
  926. AVHWDeviceContext *child_device_ctx,
  927. int flags)
  928. {
  929. AVQSVDeviceContext *hwctx = ctx->hwctx;
  930. mfxVersion ver = { { 3, 1 } };
  931. mfxHDL handle;
  932. mfxHandleType handle_type;
  933. mfxStatus err;
  934. int ret;
  935. switch (child_device_ctx->type) {
  936. #if CONFIG_VAAPI
  937. case AV_HWDEVICE_TYPE_VAAPI:
  938. {
  939. AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  940. handle_type = MFX_HANDLE_VA_DISPLAY;
  941. handle = (mfxHDL)child_device_hwctx->display;
  942. }
  943. break;
  944. #endif
  945. #if CONFIG_DXVA2
  946. case AV_HWDEVICE_TYPE_DXVA2:
  947. {
  948. AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
  949. handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
  950. handle = (mfxHDL)child_device_hwctx->devmgr;
  951. }
  952. break;
  953. #endif
  954. default:
  955. ret = AVERROR(ENOSYS);
  956. goto fail;
  957. }
  958. err = MFXInit(implementation, &ver, &hwctx->session);
  959. if (err != MFX_ERR_NONE) {
  960. av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
  961. "%d.\n", err);
  962. ret = AVERROR_UNKNOWN;
  963. goto fail;
  964. }
  965. err = MFXQueryVersion(hwctx->session, &ver);
  966. if (err != MFX_ERR_NONE) {
  967. av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
  968. ret = AVERROR_UNKNOWN;
  969. goto fail;
  970. }
  971. av_log(ctx, AV_LOG_VERBOSE,
  972. "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
  973. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  974. MFXClose(hwctx->session);
  975. err = MFXInit(implementation, &ver, &hwctx->session);
  976. if (err != MFX_ERR_NONE) {
  977. av_log(ctx, AV_LOG_ERROR,
  978. "Error initializing an MFX session: %d.\n", err);
  979. ret = AVERROR_UNKNOWN;
  980. goto fail;
  981. }
  982. err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
  983. if (err != MFX_ERR_NONE) {
  984. av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
  985. "%d\n", err);
  986. ret = AVERROR_UNKNOWN;
  987. goto fail;
  988. }
  989. ret = MFXQueryVersion(hwctx->session,&ver);
  990. if (ret == MFX_ERR_NONE) {
  991. av_log(ctx, AV_LOG_VERBOSE, "MFX compile/runtime API: %d.%d/%d.%d\n",
  992. MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
  993. }
  994. return 0;
  995. fail:
  996. if (hwctx->session)
  997. MFXClose(hwctx->session);
  998. return ret;
  999. }
  1000. static int qsv_device_derive(AVHWDeviceContext *ctx,
  1001. AVHWDeviceContext *child_device_ctx, int flags)
  1002. {
  1003. return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
  1004. child_device_ctx, flags);
  1005. }
  1006. static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
  1007. AVDictionary *opts, int flags)
  1008. {
  1009. QSVDevicePriv *priv;
  1010. enum AVHWDeviceType child_device_type;
  1011. AVHWDeviceContext *child_device;
  1012. AVDictionaryEntry *e;
  1013. mfxIMPL impl;
  1014. int ret;
  1015. priv = av_mallocz(sizeof(*priv));
  1016. if (!priv)
  1017. return AVERROR(ENOMEM);
  1018. ctx->user_opaque = priv;
  1019. ctx->free = qsv_device_free;
  1020. e = av_dict_get(opts, "child_device", NULL, 0);
  1021. if (CONFIG_VAAPI)
  1022. child_device_type = AV_HWDEVICE_TYPE_VAAPI;
  1023. else if (CONFIG_DXVA2)
  1024. child_device_type = AV_HWDEVICE_TYPE_DXVA2;
  1025. else {
  1026. av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
  1027. return AVERROR(ENOSYS);
  1028. }
  1029. ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
  1030. e ? e->value : NULL, NULL, 0);
  1031. if (ret < 0)
  1032. return ret;
  1033. child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
  1034. impl = choose_implementation(device);
  1035. return qsv_device_derive_from_child(ctx, impl, child_device, 0);
  1036. }
  1037. const HWContextType ff_hwcontext_type_qsv = {
  1038. .type = AV_HWDEVICE_TYPE_QSV,
  1039. .name = "QSV",
  1040. .device_hwctx_size = sizeof(AVQSVDeviceContext),
  1041. .device_priv_size = sizeof(QSVDeviceContext),
  1042. .frames_hwctx_size = sizeof(AVQSVFramesContext),
  1043. .frames_priv_size = sizeof(QSVFramesContext),
  1044. .device_create = qsv_device_create,
  1045. .device_derive = qsv_device_derive,
  1046. .device_init = qsv_device_init,
  1047. .frames_get_constraints = qsv_frames_get_constraints,
  1048. .frames_init = qsv_frames_init,
  1049. .frames_uninit = qsv_frames_uninit,
  1050. .frames_get_buffer = qsv_get_buffer,
  1051. .transfer_get_formats = qsv_transfer_get_formats,
  1052. .transfer_data_to = qsv_transfer_data_to,
  1053. .transfer_data_from = qsv_transfer_data_from,
  1054. .map_to = qsv_map_to,
  1055. .map_from = qsv_map_from,
  1056. .frames_derive_to = qsv_frames_derive_to,
  1057. .frames_derive_from = qsv_frames_derive_from,
  1058. .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
  1059. };