2
0

convert_test.cc 182 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265
  1. /*
  2. * Copyright 2011 The LibYuv Project Authors. All rights reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include <assert.h>
  11. #include <stdlib.h>
  12. #include <time.h>
  13. #include "libyuv/basic_types.h"
  14. #include "libyuv/compare.h"
  15. #include "libyuv/convert.h"
  16. #include "libyuv/convert_argb.h"
  17. #include "libyuv/convert_from.h"
  18. #include "libyuv/convert_from_argb.h"
  19. #include "libyuv/cpu_id.h"
  20. #ifdef HAVE_JPEG
  21. #include "libyuv/mjpeg_decoder.h"
  22. #endif
  23. #include "../unit_test/unit_test.h"
  24. #include "libyuv/planar_functions.h"
  25. #include "libyuv/rotate.h"
  26. #include "libyuv/video_common.h"
  27. #ifdef ENABLE_ROW_TESTS
  28. #include "libyuv/row.h" /* For ARGBToAR30Row_AVX2 */
  29. #endif
  30. #if defined(__arm__) || defined(__aarch64__)
  31. // arm version subsamples by summing 4 pixels then multiplying by matrix with
  32. // 4x smaller coefficients which are rounded to nearest integer.
  33. #define ARM_YUV_ERROR 4
  34. #else
  35. #define ARM_YUV_ERROR 0
  36. #endif
  37. // Some functions fail on big endian. Enable these tests on all cpus except PowerPC
  38. #if !defined(__powerpc__)
  39. #define LITTLE_ENDIAN_TEST 1
  40. #endif
  41. namespace libyuv {
  42. // Alias to copy pixels as is
  43. #define AR30ToAR30 ARGBCopy
  44. #define ABGRToABGR ARGBCopy
  45. #define SUBSAMPLE(v, a) ((((v) + (a)-1)) / (a))
  46. // Planar test
  47. #define TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
  48. SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, \
  49. DST_SUBSAMP_X, DST_SUBSAMP_Y, W1280, N, NEG, OFF) \
  50. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  51. static_assert(SRC_BPC == 1 || SRC_BPC == 2, "SRC BPC unsupported"); \
  52. static_assert(DST_BPC == 1 || DST_BPC == 2, "DST BPC unsupported"); \
  53. static_assert(SRC_SUBSAMP_X == 1 || SRC_SUBSAMP_X == 2, \
  54. "DST SRC_SUBSAMP_X unsupported"); \
  55. static_assert(SRC_SUBSAMP_Y == 1 || SRC_SUBSAMP_Y == 2, \
  56. "DST SRC_SUBSAMP_Y unsupported"); \
  57. static_assert(DST_SUBSAMP_X == 1 || DST_SUBSAMP_X == 2, \
  58. "DST DST_SUBSAMP_X unsupported"); \
  59. static_assert(DST_SUBSAMP_Y == 1 || DST_SUBSAMP_Y == 2, \
  60. "DST DST_SUBSAMP_Y unsupported"); \
  61. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  62. const int kHeight = benchmark_height_; \
  63. const int kSrcHalfWidth = SUBSAMPLE(kWidth, SRC_SUBSAMP_X); \
  64. const int kSrcHalfHeight = SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
  65. const int kDstHalfWidth = SUBSAMPLE(kWidth, DST_SUBSAMP_X); \
  66. const int kDstHalfHeight = SUBSAMPLE(kHeight, DST_SUBSAMP_Y); \
  67. align_buffer_page_end(src_y, kWidth* kHeight* SRC_BPC + OFF); \
  68. align_buffer_page_end(src_u, \
  69. kSrcHalfWidth* kSrcHalfHeight* SRC_BPC + OFF); \
  70. align_buffer_page_end(src_v, \
  71. kSrcHalfWidth* kSrcHalfHeight* SRC_BPC + OFF); \
  72. align_buffer_page_end(dst_y_c, kWidth* kHeight* DST_BPC); \
  73. align_buffer_page_end(dst_u_c, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  74. align_buffer_page_end(dst_v_c, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  75. align_buffer_page_end(dst_y_opt, kWidth* kHeight* DST_BPC); \
  76. align_buffer_page_end(dst_u_opt, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  77. align_buffer_page_end(dst_v_opt, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  78. MemRandomize(src_y + OFF, kWidth * kHeight * SRC_BPC); \
  79. MemRandomize(src_u + OFF, kSrcHalfWidth * kSrcHalfHeight * SRC_BPC); \
  80. MemRandomize(src_v + OFF, kSrcHalfWidth * kSrcHalfHeight * SRC_BPC); \
  81. memset(dst_y_c, 1, kWidth* kHeight* DST_BPC); \
  82. memset(dst_u_c, 2, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  83. memset(dst_v_c, 3, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  84. memset(dst_y_opt, 101, kWidth* kHeight* DST_BPC); \
  85. memset(dst_u_opt, 102, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  86. memset(dst_v_opt, 103, kDstHalfWidth* kDstHalfHeight* DST_BPC); \
  87. MaskCpuFlags(disable_cpu_flags_); \
  88. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  89. reinterpret_cast<SRC_T*>(src_y + OFF), kWidth, \
  90. reinterpret_cast<SRC_T*>(src_u + OFF), kSrcHalfWidth, \
  91. reinterpret_cast<SRC_T*>(src_v + OFF), kSrcHalfWidth, \
  92. reinterpret_cast<DST_T*>(dst_y_c), kWidth, \
  93. reinterpret_cast<DST_T*>(dst_u_c), kDstHalfWidth, \
  94. reinterpret_cast<DST_T*>(dst_v_c), kDstHalfWidth, kWidth, \
  95. NEG kHeight); \
  96. MaskCpuFlags(benchmark_cpu_info_); \
  97. for (int i = 0; i < benchmark_iterations_; ++i) { \
  98. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  99. reinterpret_cast<SRC_T*>(src_y + OFF), kWidth, \
  100. reinterpret_cast<SRC_T*>(src_u + OFF), kSrcHalfWidth, \
  101. reinterpret_cast<SRC_T*>(src_v + OFF), kSrcHalfWidth, \
  102. reinterpret_cast<DST_T*>(dst_y_opt), kWidth, \
  103. reinterpret_cast<DST_T*>(dst_u_opt), kDstHalfWidth, \
  104. reinterpret_cast<DST_T*>(dst_v_opt), kDstHalfWidth, kWidth, \
  105. NEG kHeight); \
  106. } \
  107. for (int i = 0; i < kHeight * kWidth * DST_BPC; ++i) { \
  108. EXPECT_EQ(dst_y_c[i], dst_y_opt[i]); \
  109. } \
  110. for (int i = 0; i < kDstHalfWidth * kDstHalfHeight * DST_BPC; ++i) { \
  111. EXPECT_EQ(dst_u_c[i], dst_u_opt[i]); \
  112. EXPECT_EQ(dst_v_c[i], dst_v_opt[i]); \
  113. } \
  114. free_aligned_buffer_page_end(dst_y_c); \
  115. free_aligned_buffer_page_end(dst_u_c); \
  116. free_aligned_buffer_page_end(dst_v_c); \
  117. free_aligned_buffer_page_end(dst_y_opt); \
  118. free_aligned_buffer_page_end(dst_u_opt); \
  119. free_aligned_buffer_page_end(dst_v_opt); \
  120. free_aligned_buffer_page_end(src_y); \
  121. free_aligned_buffer_page_end(src_u); \
  122. free_aligned_buffer_page_end(src_v); \
  123. }
  124. #define TESTPLANARTOP(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, \
  125. SRC_SUBSAMP_Y, FMT_PLANAR, DST_T, DST_BPC, \
  126. DST_SUBSAMP_X, DST_SUBSAMP_Y) \
  127. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  128. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  129. benchmark_width_ - 4, _Any, +, 0) \
  130. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  131. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  132. benchmark_width_, _Unaligned, +, 1) \
  133. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  134. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  135. benchmark_width_, _Invert, -, 0) \
  136. TESTPLANARTOPI(SRC_FMT_PLANAR, SRC_T, SRC_BPC, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  137. FMT_PLANAR, DST_T, DST_BPC, DST_SUBSAMP_X, DST_SUBSAMP_Y, \
  138. benchmark_width_, _Opt, +, 0)
  139. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I420, uint8_t, 1, 2, 2)
  140. TESTPLANARTOP(I422, uint8_t, 1, 2, 1, I420, uint8_t, 1, 2, 2)
  141. TESTPLANARTOP(I444, uint8_t, 1, 1, 1, I420, uint8_t, 1, 2, 2)
  142. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I422, uint8_t, 1, 2, 1)
  143. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I444, uint8_t, 1, 1, 1)
  144. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I420Mirror, uint8_t, 1, 2, 2)
  145. TESTPLANARTOP(I422, uint8_t, 1, 2, 1, I422, uint8_t, 1, 2, 1)
  146. TESTPLANARTOP(I444, uint8_t, 1, 1, 1, I444, uint8_t, 1, 1, 1)
  147. TESTPLANARTOP(I010, uint16_t, 2, 2, 2, I010, uint16_t, 2, 2, 2)
  148. TESTPLANARTOP(I010, uint16_t, 2, 2, 2, I420, uint8_t, 1, 2, 2)
  149. TESTPLANARTOP(I420, uint8_t, 1, 2, 2, I010, uint16_t, 2, 2, 2)
  150. TESTPLANARTOP(H010, uint16_t, 2, 2, 2, H010, uint16_t, 2, 2, 2)
  151. TESTPLANARTOP(H010, uint16_t, 2, 2, 2, H420, uint8_t, 1, 2, 2)
  152. TESTPLANARTOP(H420, uint8_t, 1, 2, 2, H010, uint16_t, 2, 2, 2)
  153. // Test Android 420 to I420
  154. #define TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, \
  155. SRC_SUBSAMP_Y, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  156. W1280, N, NEG, OFF, PN, OFF_U, OFF_V) \
  157. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##_##PN##N) { \
  158. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  159. const int kHeight = benchmark_height_; \
  160. const int kSizeUV = \
  161. SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); \
  162. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  163. align_buffer_page_end(src_uv, \
  164. kSizeUV*((PIXEL_STRIDE == 3) ? 3 : 2) + OFF); \
  165. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  166. align_buffer_page_end(dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  167. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  168. align_buffer_page_end(dst_v_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  169. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  170. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  171. align_buffer_page_end(dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  172. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  173. align_buffer_page_end(dst_v_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  174. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  175. uint8_t* src_u = src_uv + OFF_U; \
  176. uint8_t* src_v = src_uv + (PIXEL_STRIDE == 1 ? kSizeUV : OFF_V); \
  177. int src_stride_uv = SUBSAMPLE(kWidth, SUBSAMP_X) * PIXEL_STRIDE; \
  178. for (int i = 0; i < kHeight; ++i) \
  179. for (int j = 0; j < kWidth; ++j) \
  180. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  181. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  182. for (int j = 0; j < SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  183. src_u[(i * src_stride_uv) + j * PIXEL_STRIDE + OFF] = \
  184. (fastrand() & 0xff); \
  185. src_v[(i * src_stride_uv) + j * PIXEL_STRIDE + OFF] = \
  186. (fastrand() & 0xff); \
  187. } \
  188. } \
  189. memset(dst_y_c, 1, kWidth* kHeight); \
  190. memset(dst_u_c, 2, \
  191. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  192. memset(dst_v_c, 3, \
  193. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  194. memset(dst_y_opt, 101, kWidth* kHeight); \
  195. memset(dst_u_opt, 102, \
  196. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  197. memset(dst_v_opt, 103, \
  198. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  199. MaskCpuFlags(disable_cpu_flags_); \
  200. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  201. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  202. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), PIXEL_STRIDE, dst_y_c, \
  203. kWidth, dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X), dst_v_c, \
  204. SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  205. MaskCpuFlags(benchmark_cpu_info_); \
  206. for (int i = 0; i < benchmark_iterations_; ++i) { \
  207. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  208. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  209. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), PIXEL_STRIDE, \
  210. dst_y_opt, kWidth, dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X), \
  211. dst_v_opt, SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  212. } \
  213. int max_diff = 0; \
  214. for (int i = 0; i < kHeight; ++i) { \
  215. for (int j = 0; j < kWidth; ++j) { \
  216. int abs_diff = abs(static_cast<int>(dst_y_c[i * kWidth + j]) - \
  217. static_cast<int>(dst_y_opt[i * kWidth + j])); \
  218. if (abs_diff > max_diff) { \
  219. max_diff = abs_diff; \
  220. } \
  221. } \
  222. } \
  223. EXPECT_EQ(0, max_diff); \
  224. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  225. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  226. int abs_diff = abs( \
  227. static_cast<int>(dst_u_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]) - \
  228. static_cast<int>( \
  229. dst_u_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j])); \
  230. if (abs_diff > max_diff) { \
  231. max_diff = abs_diff; \
  232. } \
  233. } \
  234. } \
  235. EXPECT_LE(max_diff, 3); \
  236. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  237. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  238. int abs_diff = abs( \
  239. static_cast<int>(dst_v_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]) - \
  240. static_cast<int>( \
  241. dst_v_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j])); \
  242. if (abs_diff > max_diff) { \
  243. max_diff = abs_diff; \
  244. } \
  245. } \
  246. } \
  247. EXPECT_LE(max_diff, 3); \
  248. free_aligned_buffer_page_end(dst_y_c); \
  249. free_aligned_buffer_page_end(dst_u_c); \
  250. free_aligned_buffer_page_end(dst_v_c); \
  251. free_aligned_buffer_page_end(dst_y_opt); \
  252. free_aligned_buffer_page_end(dst_u_opt); \
  253. free_aligned_buffer_page_end(dst_v_opt); \
  254. free_aligned_buffer_page_end(src_y); \
  255. free_aligned_buffer_page_end(src_uv); \
  256. }
  257. #define TESTAPLANARTOP(SRC_FMT_PLANAR, PN, PIXEL_STRIDE, OFF_U, OFF_V, \
  258. SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, SUBSAMP_X, \
  259. SUBSAMP_Y) \
  260. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  261. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, \
  262. _Any, +, 0, PN, OFF_U, OFF_V) \
  263. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  264. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, \
  265. _Unaligned, +, 1, PN, OFF_U, OFF_V) \
  266. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  267. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, \
  268. -, 0, PN, OFF_U, OFF_V) \
  269. TESTAPLANARTOPI(SRC_FMT_PLANAR, PIXEL_STRIDE, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  270. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, \
  271. 0, PN, OFF_U, OFF_V)
  272. TESTAPLANARTOP(Android420, I420, 1, 0, 0, 2, 2, I420, 2, 2)
  273. TESTAPLANARTOP(Android420, NV12, 2, 0, 1, 2, 2, I420, 2, 2)
  274. TESTAPLANARTOP(Android420, NV21, 2, 1, 0, 2, 2, I420, 2, 2)
  275. // wrapper to keep API the same
  276. int I400ToNV21(const uint8_t* src_y,
  277. int src_stride_y,
  278. const uint8_t* /* src_u */,
  279. int /* src_stride_u */,
  280. const uint8_t* /* src_v */,
  281. int /* src_stride_v */,
  282. uint8_t* dst_y,
  283. int dst_stride_y,
  284. uint8_t* dst_vu,
  285. int dst_stride_vu,
  286. int width,
  287. int height) {
  288. return I400ToNV21(src_y, src_stride_y, dst_y, dst_stride_y, dst_vu,
  289. dst_stride_vu, width, height);
  290. }
  291. #define TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  292. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, W1280, N, NEG, OFF) \
  293. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  294. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  295. const int kHeight = benchmark_height_; \
  296. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  297. align_buffer_page_end(src_u, SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  298. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  299. OFF); \
  300. align_buffer_page_end(src_v, SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  301. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  302. OFF); \
  303. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  304. align_buffer_page_end(dst_uv_c, SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * \
  305. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  306. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  307. align_buffer_page_end(dst_uv_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * \
  308. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  309. for (int i = 0; i < kHeight; ++i) \
  310. for (int j = 0; j < kWidth; ++j) \
  311. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  312. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  313. for (int j = 0; j < SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  314. src_u[(i * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  315. (fastrand() & 0xff); \
  316. src_v[(i * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  317. (fastrand() & 0xff); \
  318. } \
  319. } \
  320. memset(dst_y_c, 1, kWidth* kHeight); \
  321. memset(dst_uv_c, 2, \
  322. SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  323. memset(dst_y_opt, 101, kWidth* kHeight); \
  324. memset(dst_uv_opt, 102, \
  325. SUBSAMPLE(kWidth, SUBSAMP_X) * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  326. MaskCpuFlags(disable_cpu_flags_); \
  327. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  328. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  329. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), dst_y_c, kWidth, \
  330. dst_uv_c, SUBSAMPLE(kWidth, SUBSAMP_X) * 2, kWidth, NEG kHeight); \
  331. MaskCpuFlags(benchmark_cpu_info_); \
  332. for (int i = 0; i < benchmark_iterations_; ++i) { \
  333. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  334. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), \
  335. src_v + OFF, SUBSAMPLE(kWidth, SRC_SUBSAMP_X), dst_y_opt, kWidth, \
  336. dst_uv_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * 2, kWidth, NEG kHeight); \
  337. } \
  338. int max_diff = 0; \
  339. for (int i = 0; i < kHeight; ++i) { \
  340. for (int j = 0; j < kWidth; ++j) { \
  341. int abs_diff = abs(static_cast<int>(dst_y_c[i * kWidth + j]) - \
  342. static_cast<int>(dst_y_opt[i * kWidth + j])); \
  343. if (abs_diff > max_diff) { \
  344. max_diff = abs_diff; \
  345. } \
  346. } \
  347. } \
  348. EXPECT_LE(max_diff, 1); \
  349. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  350. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X) * 2; ++j) { \
  351. int abs_diff = \
  352. abs(static_cast<int>( \
  353. dst_uv_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) * 2 + j]) - \
  354. static_cast<int>( \
  355. dst_uv_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) * 2 + j])); \
  356. if (abs_diff > max_diff) { \
  357. max_diff = abs_diff; \
  358. } \
  359. } \
  360. } \
  361. EXPECT_LE(max_diff, 1); \
  362. free_aligned_buffer_page_end(dst_y_c); \
  363. free_aligned_buffer_page_end(dst_uv_c); \
  364. free_aligned_buffer_page_end(dst_y_opt); \
  365. free_aligned_buffer_page_end(dst_uv_opt); \
  366. free_aligned_buffer_page_end(src_y); \
  367. free_aligned_buffer_page_end(src_u); \
  368. free_aligned_buffer_page_end(src_v); \
  369. }
  370. #define TESTPLANARTOBP(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  371. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  372. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  373. SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0) \
  374. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  375. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1) \
  376. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  377. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, -, 0) \
  378. TESTPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  379. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, 0)
  380. TESTPLANARTOBP(I420, 2, 2, NV12, 2, 2)
  381. TESTPLANARTOBP(I420, 2, 2, NV21, 2, 2)
  382. TESTPLANARTOBP(I422, 2, 1, NV21, 2, 2)
  383. TESTPLANARTOBP(I444, 1, 1, NV21, 2, 2)
  384. TESTPLANARTOBP(I400, 2, 2, NV21, 2, 2)
  385. #define TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  386. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, W1280, N, NEG, \
  387. OFF, DOY) \
  388. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  389. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  390. const int kHeight = benchmark_height_; \
  391. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  392. align_buffer_page_end(src_uv, 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  393. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  394. OFF); \
  395. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  396. align_buffer_page_end(dst_uv_c, 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * \
  397. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  398. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  399. align_buffer_page_end(dst_uv_opt, 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * \
  400. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  401. for (int i = 0; i < kHeight; ++i) \
  402. for (int j = 0; j < kWidth; ++j) \
  403. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  404. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  405. for (int j = 0; j < 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  406. src_uv[(i * 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  407. (fastrand() & 0xff); \
  408. } \
  409. } \
  410. memset(dst_y_c, 1, kWidth* kHeight); \
  411. memset(dst_uv_c, 2, \
  412. 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  413. memset(dst_y_opt, 101, kWidth* kHeight); \
  414. memset(dst_uv_opt, 102, \
  415. 2 * SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  416. MaskCpuFlags(disable_cpu_flags_); \
  417. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  418. src_y + OFF, kWidth, src_uv + OFF, \
  419. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_c : NULL, kWidth, \
  420. dst_uv_c, 2 * SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  421. MaskCpuFlags(benchmark_cpu_info_); \
  422. for (int i = 0; i < benchmark_iterations_; ++i) { \
  423. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  424. src_y + OFF, kWidth, src_uv + OFF, \
  425. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_opt : NULL, \
  426. kWidth, dst_uv_opt, 2 * SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, \
  427. NEG kHeight); \
  428. } \
  429. int max_diff = 0; \
  430. if (DOY) { \
  431. for (int i = 0; i < kHeight; ++i) { \
  432. for (int j = 0; j < kWidth; ++j) { \
  433. int abs_diff = abs(static_cast<int>(dst_y_c[i * kWidth + j]) - \
  434. static_cast<int>(dst_y_opt[i * kWidth + j])); \
  435. if (abs_diff > max_diff) { \
  436. max_diff = abs_diff; \
  437. } \
  438. } \
  439. } \
  440. EXPECT_LE(max_diff, 1); \
  441. } \
  442. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  443. for (int j = 0; j < 2 * SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  444. int abs_diff = \
  445. abs(static_cast<int>( \
  446. dst_uv_c[i * 2 * SUBSAMPLE(kWidth, SUBSAMP_X) + j]) - \
  447. static_cast<int>( \
  448. dst_uv_opt[i * 2 * SUBSAMPLE(kWidth, SUBSAMP_X) + j])); \
  449. if (abs_diff > max_diff) { \
  450. max_diff = abs_diff; \
  451. } \
  452. } \
  453. } \
  454. EXPECT_LE(max_diff, 1); \
  455. free_aligned_buffer_page_end(dst_y_c); \
  456. free_aligned_buffer_page_end(dst_uv_c); \
  457. free_aligned_buffer_page_end(dst_y_opt); \
  458. free_aligned_buffer_page_end(dst_uv_opt); \
  459. free_aligned_buffer_page_end(src_y); \
  460. free_aligned_buffer_page_end(src_uv); \
  461. }
  462. #define TESTBIPLANARTOBP(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  463. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  464. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  465. SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, 1) \
  466. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  467. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1, \
  468. 1) \
  469. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  470. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, -, 0, 1) \
  471. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  472. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, 0, 1) \
  473. TESTBIPLANARTOBPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  474. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _NullY, +, 0, 0)
  475. TESTBIPLANARTOBP(NV21, 2, 2, NV12, 2, 2)
  476. #define TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  477. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, W1280, N, NEG, OFF, \
  478. DOY) \
  479. TEST_F(LibYUVConvertTest, SRC_FMT_PLANAR##To##FMT_PLANAR##N) { \
  480. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  481. const int kHeight = benchmark_height_; \
  482. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  483. align_buffer_page_end(src_uv, 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X) * \
  484. SUBSAMPLE(kHeight, SRC_SUBSAMP_Y) + \
  485. OFF); \
  486. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  487. align_buffer_page_end(dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  488. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  489. align_buffer_page_end(dst_v_c, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  490. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  491. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  492. align_buffer_page_end(dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  493. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  494. align_buffer_page_end(dst_v_opt, SUBSAMPLE(kWidth, SUBSAMP_X) * \
  495. SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  496. for (int i = 0; i < kHeight; ++i) \
  497. for (int j = 0; j < kWidth; ++j) \
  498. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  499. for (int i = 0; i < SUBSAMPLE(kHeight, SRC_SUBSAMP_Y); ++i) { \
  500. for (int j = 0; j < 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X); ++j) { \
  501. src_uv[(i * 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X)) + j + OFF] = \
  502. (fastrand() & 0xff); \
  503. } \
  504. } \
  505. memset(dst_y_c, 1, kWidth* kHeight); \
  506. memset(dst_u_c, 2, \
  507. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  508. memset(dst_v_c, 3, \
  509. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  510. memset(dst_y_opt, 101, kWidth* kHeight); \
  511. memset(dst_u_opt, 102, \
  512. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  513. memset(dst_v_opt, 103, \
  514. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  515. MaskCpuFlags(disable_cpu_flags_); \
  516. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  517. src_y + OFF, kWidth, src_uv + OFF, \
  518. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_c : NULL, kWidth, \
  519. dst_u_c, SUBSAMPLE(kWidth, SUBSAMP_X), dst_v_c, \
  520. SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  521. MaskCpuFlags(benchmark_cpu_info_); \
  522. for (int i = 0; i < benchmark_iterations_; ++i) { \
  523. SRC_FMT_PLANAR##To##FMT_PLANAR( \
  524. src_y + OFF, kWidth, src_uv + OFF, \
  525. 2 * SUBSAMPLE(kWidth, SRC_SUBSAMP_X), DOY ? dst_y_opt : NULL, \
  526. kWidth, dst_u_opt, SUBSAMPLE(kWidth, SUBSAMP_X), dst_v_opt, \
  527. SUBSAMPLE(kWidth, SUBSAMP_X), kWidth, NEG kHeight); \
  528. } \
  529. int max_diff = 0; \
  530. if (DOY) { \
  531. for (int i = 0; i < kHeight; ++i) { \
  532. for (int j = 0; j < kWidth; ++j) { \
  533. int abs_diff = abs(static_cast<int>(dst_y_c[i * kWidth + j]) - \
  534. static_cast<int>(dst_y_opt[i * kWidth + j])); \
  535. if (abs_diff > max_diff) { \
  536. max_diff = abs_diff; \
  537. } \
  538. } \
  539. } \
  540. EXPECT_LE(max_diff, 1); \
  541. } \
  542. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  543. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  544. int abs_diff = abs( \
  545. static_cast<int>(dst_u_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]) - \
  546. static_cast<int>( \
  547. dst_u_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j])); \
  548. if (abs_diff > max_diff) { \
  549. max_diff = abs_diff; \
  550. } \
  551. } \
  552. } \
  553. EXPECT_LE(max_diff, 1); \
  554. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  555. for (int j = 0; j < SUBSAMPLE(kWidth, SUBSAMP_X); ++j) { \
  556. int abs_diff = abs( \
  557. static_cast<int>(dst_v_c[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j]) - \
  558. static_cast<int>( \
  559. dst_v_opt[i * SUBSAMPLE(kWidth, SUBSAMP_X) + j])); \
  560. if (abs_diff > max_diff) { \
  561. max_diff = abs_diff; \
  562. } \
  563. } \
  564. } \
  565. EXPECT_LE(max_diff, 1); \
  566. free_aligned_buffer_page_end(dst_y_c); \
  567. free_aligned_buffer_page_end(dst_u_c); \
  568. free_aligned_buffer_page_end(dst_v_c); \
  569. free_aligned_buffer_page_end(dst_y_opt); \
  570. free_aligned_buffer_page_end(dst_u_opt); \
  571. free_aligned_buffer_page_end(dst_v_opt); \
  572. free_aligned_buffer_page_end(src_y); \
  573. free_aligned_buffer_page_end(src_uv); \
  574. }
  575. #define TESTBIPLANARTOP(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, \
  576. FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  577. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  578. SUBSAMP_X, SUBSAMP_Y, benchmark_width_ - 4, _Any, +, 0, 1) \
  579. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  580. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Unaligned, +, 1, \
  581. 1) \
  582. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  583. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Invert, -, 0, 1) \
  584. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  585. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _Opt, +, 0, 1) \
  586. TESTBIPLANARTOPI(SRC_FMT_PLANAR, SRC_SUBSAMP_X, SRC_SUBSAMP_Y, FMT_PLANAR, \
  587. SUBSAMP_X, SUBSAMP_Y, benchmark_width_, _NullY, +, 0, 0)
  588. TESTBIPLANARTOP(NV12, 2, 2, I420, 2, 2)
  589. TESTBIPLANARTOP(NV21, 2, 2, I420, 2, 2)
  590. #define ALIGNINT(V, ALIGN) (((V) + (ALIGN)-1) / (ALIGN) * (ALIGN))
  591. #define TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  592. YALIGN, W1280, N, NEG, OFF) \
  593. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  594. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  595. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  596. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  597. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  598. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  599. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  600. align_buffer_page_end(src_u, kSizeUV + OFF); \
  601. align_buffer_page_end(src_v, kSizeUV + OFF); \
  602. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
  603. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
  604. for (int i = 0; i < kWidth * kHeight; ++i) { \
  605. src_y[i + OFF] = (fastrand() & 0xff); \
  606. } \
  607. for (int i = 0; i < kSizeUV; ++i) { \
  608. src_u[i + OFF] = (fastrand() & 0xff); \
  609. src_v[i + OFF] = (fastrand() & 0xff); \
  610. } \
  611. memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
  612. memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
  613. MaskCpuFlags(disable_cpu_flags_); \
  614. double time0 = get_time(); \
  615. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  616. src_v + OFF, kStrideUV, dst_argb_c + OFF, kStrideB, \
  617. kWidth, NEG kHeight); \
  618. double time1 = get_time(); \
  619. MaskCpuFlags(benchmark_cpu_info_); \
  620. for (int i = 0; i < benchmark_iterations_; ++i) { \
  621. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  622. src_v + OFF, kStrideUV, dst_argb_opt + OFF, \
  623. kStrideB, kWidth, NEG kHeight); \
  624. } \
  625. double time2 = get_time(); \
  626. printf(" %8d us C - %8d us OPT\n", \
  627. static_cast<int>((time1 - time0) * 1e6), \
  628. static_cast<int>((time2 - time1) * 1e6 / benchmark_iterations_)); \
  629. for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
  630. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_opt[i + OFF]); \
  631. } \
  632. free_aligned_buffer_page_end(src_y); \
  633. free_aligned_buffer_page_end(src_u); \
  634. free_aligned_buffer_page_end(src_v); \
  635. free_aligned_buffer_page_end(dst_argb_c); \
  636. free_aligned_buffer_page_end(dst_argb_opt); \
  637. }
  638. #define TESTPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  639. YALIGN) \
  640. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  641. YALIGN, benchmark_width_ - 4, _Any, +, 0) \
  642. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  643. YALIGN, benchmark_width_, _Unaligned, +, 1) \
  644. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  645. YALIGN, benchmark_width_, _Invert, -, 0) \
  646. TESTPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  647. YALIGN, benchmark_width_, _Opt, +, 0)
  648. TESTPLANARTOB(I420, 2, 2, ARGB, 4, 4, 1)
  649. TESTPLANARTOB(J420, 2, 2, ARGB, 4, 4, 1)
  650. TESTPLANARTOB(J420, 2, 2, ABGR, 4, 4, 1)
  651. TESTPLANARTOB(H420, 2, 2, ARGB, 4, 4, 1)
  652. TESTPLANARTOB(H420, 2, 2, ABGR, 4, 4, 1)
  653. TESTPLANARTOB(I420, 2, 2, BGRA, 4, 4, 1)
  654. TESTPLANARTOB(I420, 2, 2, ABGR, 4, 4, 1)
  655. TESTPLANARTOB(I420, 2, 2, RGBA, 4, 4, 1)
  656. TESTPLANARTOB(I420, 2, 2, RAW, 3, 3, 1)
  657. TESTPLANARTOB(I420, 2, 2, RGB24, 3, 3, 1)
  658. TESTPLANARTOB(H420, 2, 2, RAW, 3, 3, 1)
  659. TESTPLANARTOB(H420, 2, 2, RGB24, 3, 3, 1)
  660. #ifdef LITTLE_ENDIAN_TEST
  661. TESTPLANARTOB(I420, 2, 2, RGB565, 2, 2, 1)
  662. TESTPLANARTOB(J420, 2, 2, RGB565, 2, 2, 1)
  663. TESTPLANARTOB(H420, 2, 2, RGB565, 2, 2, 1)
  664. TESTPLANARTOB(I420, 2, 2, ARGB1555, 2, 2, 1)
  665. TESTPLANARTOB(I420, 2, 2, ARGB4444, 2, 2, 1)
  666. TESTPLANARTOB(I422, 2, 1, RGB565, 2, 2, 1)
  667. #endif
  668. TESTPLANARTOB(I422, 2, 1, ARGB, 4, 4, 1)
  669. TESTPLANARTOB(J422, 2, 1, ARGB, 4, 4, 1)
  670. TESTPLANARTOB(J422, 2, 1, ABGR, 4, 4, 1)
  671. TESTPLANARTOB(H422, 2, 1, ARGB, 4, 4, 1)
  672. TESTPLANARTOB(H422, 2, 1, ABGR, 4, 4, 1)
  673. TESTPLANARTOB(I422, 2, 1, BGRA, 4, 4, 1)
  674. TESTPLANARTOB(I422, 2, 1, ABGR, 4, 4, 1)
  675. TESTPLANARTOB(I422, 2, 1, RGBA, 4, 4, 1)
  676. TESTPLANARTOB(I444, 1, 1, ARGB, 4, 4, 1)
  677. TESTPLANARTOB(J444, 1, 1, ARGB, 4, 4, 1)
  678. TESTPLANARTOB(I444, 1, 1, ABGR, 4, 4, 1)
  679. TESTPLANARTOB(I420, 2, 2, YUY2, 2, 4, 1)
  680. TESTPLANARTOB(I420, 2, 2, UYVY, 2, 4, 1)
  681. TESTPLANARTOB(I422, 2, 1, YUY2, 2, 4, 1)
  682. TESTPLANARTOB(I422, 2, 1, UYVY, 2, 4, 1)
  683. TESTPLANARTOB(I420, 2, 2, I400, 1, 1, 1)
  684. TESTPLANARTOB(J420, 2, 2, J400, 1, 1, 1)
  685. #ifdef LITTLE_ENDIAN_TEST
  686. TESTPLANARTOB(I420, 2, 2, AR30, 4, 4, 1)
  687. TESTPLANARTOB(H420, 2, 2, AR30, 4, 4, 1)
  688. #endif
  689. #define TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  690. YALIGN, W1280, DIFF, N, NEG, OFF, ATTEN) \
  691. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  692. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  693. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  694. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  695. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  696. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  697. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  698. align_buffer_page_end(src_u, kSizeUV + OFF); \
  699. align_buffer_page_end(src_v, kSizeUV + OFF); \
  700. align_buffer_page_end(src_a, kWidth* kHeight + OFF); \
  701. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
  702. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
  703. for (int i = 0; i < kWidth * kHeight; ++i) { \
  704. src_y[i + OFF] = (fastrand() & 0xff); \
  705. src_a[i + OFF] = (fastrand() & 0xff); \
  706. } \
  707. for (int i = 0; i < kSizeUV; ++i) { \
  708. src_u[i + OFF] = (fastrand() & 0xff); \
  709. src_v[i + OFF] = (fastrand() & 0xff); \
  710. } \
  711. memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
  712. memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
  713. MaskCpuFlags(disable_cpu_flags_); \
  714. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  715. src_v + OFF, kStrideUV, src_a + OFF, kWidth, \
  716. dst_argb_c + OFF, kStrideB, kWidth, NEG kHeight, \
  717. ATTEN); \
  718. MaskCpuFlags(benchmark_cpu_info_); \
  719. for (int i = 0; i < benchmark_iterations_; ++i) { \
  720. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  721. src_v + OFF, kStrideUV, src_a + OFF, kWidth, \
  722. dst_argb_opt + OFF, kStrideB, kWidth, NEG kHeight, \
  723. ATTEN); \
  724. } \
  725. int max_diff = 0; \
  726. for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
  727. int abs_diff = abs(static_cast<int>(dst_argb_c[i + OFF]) - \
  728. static_cast<int>(dst_argb_opt[i + OFF])); \
  729. if (abs_diff > max_diff) { \
  730. max_diff = abs_diff; \
  731. } \
  732. } \
  733. EXPECT_LE(max_diff, DIFF); \
  734. free_aligned_buffer_page_end(src_y); \
  735. free_aligned_buffer_page_end(src_u); \
  736. free_aligned_buffer_page_end(src_v); \
  737. free_aligned_buffer_page_end(src_a); \
  738. free_aligned_buffer_page_end(dst_argb_c); \
  739. free_aligned_buffer_page_end(dst_argb_opt); \
  740. }
  741. #define TESTQPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  742. YALIGN, DIFF) \
  743. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  744. YALIGN, benchmark_width_ - 4, DIFF, _Any, +, 0, 0) \
  745. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  746. YALIGN, benchmark_width_, DIFF, _Unaligned, +, 1, 0) \
  747. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  748. YALIGN, benchmark_width_, DIFF, _Invert, -, 0, 0) \
  749. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  750. YALIGN, benchmark_width_, DIFF, _Opt, +, 0, 0) \
  751. TESTQPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  752. YALIGN, benchmark_width_, DIFF, _Premult, +, 0, 1)
  753. TESTQPLANARTOB(I420Alpha, 2, 2, ARGB, 4, 4, 1, 2)
  754. TESTQPLANARTOB(I420Alpha, 2, 2, ABGR, 4, 4, 1, 2)
  755. #define TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, \
  756. BPP_B, W1280, DIFF, N, NEG, OFF) \
  757. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  758. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  759. const int kHeight = benchmark_height_; \
  760. const int kStrideB = kWidth * BPP_B; \
  761. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  762. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  763. align_buffer_page_end(src_uv, \
  764. kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y) * 2 + OFF); \
  765. align_buffer_page_end(dst_argb_c, kStrideB* kHeight); \
  766. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight); \
  767. for (int i = 0; i < kHeight; ++i) \
  768. for (int j = 0; j < kWidth; ++j) \
  769. src_y[i * kWidth + j + OFF] = (fastrand() & 0xff); \
  770. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  771. for (int j = 0; j < kStrideUV * 2; ++j) { \
  772. src_uv[i * kStrideUV * 2 + j + OFF] = (fastrand() & 0xff); \
  773. } \
  774. } \
  775. memset(dst_argb_c, 1, kStrideB* kHeight); \
  776. memset(dst_argb_opt, 101, kStrideB* kHeight); \
  777. MaskCpuFlags(disable_cpu_flags_); \
  778. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_uv + OFF, kStrideUV * 2, \
  779. dst_argb_c, kWidth * BPP_B, kWidth, NEG kHeight); \
  780. MaskCpuFlags(benchmark_cpu_info_); \
  781. for (int i = 0; i < benchmark_iterations_; ++i) { \
  782. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_uv + OFF, kStrideUV * 2, \
  783. dst_argb_opt, kWidth * BPP_B, kWidth, \
  784. NEG kHeight); \
  785. } \
  786. /* Convert to ARGB so 565 is expanded to bytes that can be compared. */ \
  787. align_buffer_page_end(dst_argb32_c, kWidth * 4 * kHeight); \
  788. align_buffer_page_end(dst_argb32_opt, kWidth * 4 * kHeight); \
  789. memset(dst_argb32_c, 2, kWidth * 4 * kHeight); \
  790. memset(dst_argb32_opt, 102, kWidth * 4 * kHeight); \
  791. FMT_C##ToARGB(dst_argb_c, kStrideB, dst_argb32_c, kWidth * 4, kWidth, \
  792. kHeight); \
  793. FMT_C##ToARGB(dst_argb_opt, kStrideB, dst_argb32_opt, kWidth * 4, kWidth, \
  794. kHeight); \
  795. int max_diff = 0; \
  796. for (int i = 0; i < kHeight; ++i) { \
  797. for (int j = 0; j < kWidth * 4; ++j) { \
  798. int abs_diff = \
  799. abs(static_cast<int>(dst_argb32_c[i * kWidth * 4 + j]) - \
  800. static_cast<int>(dst_argb32_opt[i * kWidth * 4 + j])); \
  801. if (abs_diff > max_diff) { \
  802. max_diff = abs_diff; \
  803. } \
  804. } \
  805. } \
  806. EXPECT_LE(max_diff, DIFF); \
  807. free_aligned_buffer_page_end(src_y); \
  808. free_aligned_buffer_page_end(src_uv); \
  809. free_aligned_buffer_page_end(dst_argb_c); \
  810. free_aligned_buffer_page_end(dst_argb_opt); \
  811. free_aligned_buffer_page_end(dst_argb32_c); \
  812. free_aligned_buffer_page_end(dst_argb32_opt); \
  813. }
  814. #define TESTBIPLANARTOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  815. DIFF) \
  816. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  817. benchmark_width_ - 4, DIFF, _Any, +, 0) \
  818. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  819. benchmark_width_, DIFF, _Unaligned, +, 1) \
  820. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  821. benchmark_width_, DIFF, _Invert, -, 0) \
  822. TESTBIPLANARTOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, FMT_C, BPP_B, \
  823. benchmark_width_, DIFF, _Opt, +, 0)
  824. TESTBIPLANARTOB(NV12, 2, 2, ARGB, ARGB, 4, 2)
  825. TESTBIPLANARTOB(NV21, 2, 2, ARGB, ARGB, 4, 2)
  826. TESTBIPLANARTOB(NV12, 2, 2, ABGR, ABGR, 4, 2)
  827. TESTBIPLANARTOB(NV21, 2, 2, ABGR, ABGR, 4, 2)
  828. TESTBIPLANARTOB(NV12, 2, 2, RGB24, RGB24, 3, 2)
  829. TESTBIPLANARTOB(NV21, 2, 2, RGB24, RGB24, 3, 2)
  830. TESTBIPLANARTOB(NV12, 2, 2, RAW, RAW, 3, 2)
  831. TESTBIPLANARTOB(NV21, 2, 2, RAW, RAW, 3, 2)
  832. #ifdef LITTLE_ENDIAN_TEST
  833. TESTBIPLANARTOB(NV12, 2, 2, RGB565, RGB565, 2, 9)
  834. #endif
  835. TESTBIPLANARTOB(NV21, 2, 2, YUV24, RAW, 3, 2)
  836. #ifdef DO_THREE_PLANES
  837. // Do 3 allocations for yuv. conventional but slower.
  838. #define TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  839. W1280, DIFF, N, NEG, OFF) \
  840. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
  841. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  842. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  843. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  844. const int kStride = (kStrideUV * SUBSAMP_X * 8 * BPP_A + 7) / 8; \
  845. align_buffer_page_end(src_argb, kStride* kHeight + OFF); \
  846. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  847. align_buffer_page_end(dst_u_c, kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  848. align_buffer_page_end(dst_v_c, kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  849. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  850. align_buffer_page_end(dst_u_opt, \
  851. kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  852. align_buffer_page_end(dst_v_opt, \
  853. kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  854. memset(dst_y_c, 1, kWidth* kHeight); \
  855. memset(dst_u_c, 2, kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  856. memset(dst_v_c, 3, kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  857. memset(dst_y_opt, 101, kWidth* kHeight); \
  858. memset(dst_u_opt, 102, kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  859. memset(dst_v_opt, 103, kStrideUV* SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  860. for (int i = 0; i < kHeight; ++i) \
  861. for (int j = 0; j < kStride; ++j) \
  862. src_argb[(i * kStride) + j + OFF] = (fastrand() & 0xff); \
  863. MaskCpuFlags(disable_cpu_flags_); \
  864. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_c, kWidth, dst_u_c, \
  865. kStrideUV, dst_v_c, kStrideUV, kWidth, NEG kHeight); \
  866. MaskCpuFlags(benchmark_cpu_info_); \
  867. for (int i = 0; i < benchmark_iterations_; ++i) { \
  868. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_opt, kWidth, \
  869. dst_u_opt, kStrideUV, dst_v_opt, kStrideUV, \
  870. kWidth, NEG kHeight); \
  871. } \
  872. for (int i = 0; i < kHeight; ++i) { \
  873. for (int j = 0; j < kWidth; ++j) { \
  874. EXPECT_NEAR(static_cast<int>(dst_y_c[i * kWidth + j]), \
  875. static_cast<int>(dst_y_opt[i * kWidth + j]), DIFF); \
  876. } \
  877. } \
  878. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  879. for (int j = 0; j < kStrideUV; ++j) { \
  880. EXPECT_NEAR(static_cast<int>(dst_u_c[i * kStrideUV + j]), \
  881. static_cast<int>(dst_u_opt[i * kStrideUV + j]), DIFF); \
  882. } \
  883. } \
  884. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  885. for (int j = 0; j < kStrideUV; ++j) { \
  886. EXPECT_NEAR(static_cast<int>(dst_v_c[i * kStrideUV + j]), \
  887. static_cast<int>(dst_v_opt[i * kStrideUV + j]), DIFF); \
  888. } \
  889. } \
  890. free_aligned_buffer_page_end(dst_y_c); \
  891. free_aligned_buffer_page_end(dst_u_c); \
  892. free_aligned_buffer_page_end(dst_v_c); \
  893. free_aligned_buffer_page_end(dst_y_opt); \
  894. free_aligned_buffer_page_end(dst_u_opt); \
  895. free_aligned_buffer_page_end(dst_v_opt); \
  896. free_aligned_buffer_page_end(src_argb); \
  897. }
  898. #else
  899. #define TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  900. W1280, DIFF, N, NEG, OFF) \
  901. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
  902. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  903. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  904. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  905. const int kStride = (kStrideUV * SUBSAMP_X * 8 * BPP_A + 7) / 8; \
  906. align_buffer_page_end(src_argb, kStride* kHeight + OFF); \
  907. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  908. align_buffer_page_end(dst_uv_c, \
  909. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  910. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  911. align_buffer_page_end(dst_uv_opt, \
  912. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  913. memset(dst_y_c, 1, kWidth* kHeight); \
  914. memset(dst_uv_c, 2, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  915. memset(dst_y_opt, 101, kWidth* kHeight); \
  916. memset(dst_uv_opt, 102, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  917. for (int i = 0; i < kHeight; ++i) \
  918. for (int j = 0; j < kStride; ++j) \
  919. src_argb[(i * kStride) + j + OFF] = (fastrand() & 0xff); \
  920. MaskCpuFlags(disable_cpu_flags_); \
  921. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_c, kWidth, dst_uv_c, \
  922. kStrideUV * 2, dst_uv_c + kStrideUV, kStrideUV * 2, \
  923. kWidth, NEG kHeight); \
  924. MaskCpuFlags(benchmark_cpu_info_); \
  925. for (int i = 0; i < benchmark_iterations_; ++i) { \
  926. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_opt, kWidth, \
  927. dst_uv_opt, kStrideUV * 2, dst_uv_opt + kStrideUV, \
  928. kStrideUV * 2, kWidth, NEG kHeight); \
  929. } \
  930. for (int i = 0; i < kHeight; ++i) { \
  931. for (int j = 0; j < kWidth; ++j) { \
  932. EXPECT_NEAR(static_cast<int>(dst_y_c[i * kWidth + j]), \
  933. static_cast<int>(dst_y_opt[i * kWidth + j]), DIFF); \
  934. } \
  935. } \
  936. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y) * 2; ++i) { \
  937. for (int j = 0; j < kStrideUV; ++j) { \
  938. EXPECT_NEAR(static_cast<int>(dst_uv_c[i * kStrideUV + j]), \
  939. static_cast<int>(dst_uv_opt[i * kStrideUV + j]), DIFF); \
  940. } \
  941. } \
  942. free_aligned_buffer_page_end(dst_y_c); \
  943. free_aligned_buffer_page_end(dst_uv_c); \
  944. free_aligned_buffer_page_end(dst_y_opt); \
  945. free_aligned_buffer_page_end(dst_uv_opt); \
  946. free_aligned_buffer_page_end(src_argb); \
  947. }
  948. #endif
  949. #define TESTATOPLANAR(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  950. DIFF) \
  951. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  952. benchmark_width_ - 4, DIFF, _Any, +, 0) \
  953. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  954. benchmark_width_, DIFF, _Unaligned, +, 1) \
  955. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  956. benchmark_width_, DIFF, _Invert, -, 0) \
  957. TESTATOPLANARI(FMT_A, BPP_A, YALIGN, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  958. benchmark_width_, DIFF, _Opt, +, 0)
  959. TESTATOPLANAR(ABGR, 4, 1, I420, 2, 2, 4)
  960. TESTATOPLANAR(ARGB, 4, 1, I420, 2, 2, 4)
  961. TESTATOPLANAR(ARGB, 4, 1, I422, 2, 1, 2)
  962. TESTATOPLANAR(ARGB, 4, 1, I444, 1, 1, 2)
  963. TESTATOPLANAR(ARGB, 4, 1, J420, 2, 2, ARM_YUV_ERROR)
  964. TESTATOPLANAR(ARGB, 4, 1, J422, 2, 1, ARM_YUV_ERROR)
  965. #ifdef LITTLE_ENDIAN_TEST
  966. TESTATOPLANAR(ARGB1555, 2, 1, I420, 2, 2, 15)
  967. TESTATOPLANAR(ARGB4444, 2, 1, I420, 2, 2, 17)
  968. #endif
  969. TESTATOPLANAR(BGRA, 4, 1, I420, 2, 2, 4)
  970. TESTATOPLANAR(I400, 1, 1, I420, 2, 2, 2)
  971. TESTATOPLANAR(J400, 1, 1, J420, 2, 2, 2)
  972. TESTATOPLANAR(RAW, 3, 1, I420, 2, 2, 4)
  973. TESTATOPLANAR(RGB24, 3, 1, I420, 2, 2, 4)
  974. TESTATOPLANAR(RGB24, 3, 1, J420, 2, 2, ARM_YUV_ERROR)
  975. #ifdef LITTLE_ENDIAN_TEST
  976. TESTATOPLANAR(RGB565, 2, 1, I420, 2, 2, 5)
  977. #endif
  978. TESTATOPLANAR(RGBA, 4, 1, I420, 2, 2, 4)
  979. TESTATOPLANAR(UYVY, 2, 1, I420, 2, 2, 2)
  980. TESTATOPLANAR(UYVY, 2, 1, I422, 2, 1, 2)
  981. TESTATOPLANAR(YUY2, 2, 1, I420, 2, 2, 2)
  982. TESTATOPLANAR(YUY2, 2, 1, I422, 2, 1, 2)
  983. #define TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, \
  984. SUBSAMP_Y, W1280, N, NEG, OFF) \
  985. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_PLANAR##N) { \
  986. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  987. const int kHeight = benchmark_height_; \
  988. const int kStride = SUBSAMPLE(kWidth, SUB_A) * BPP_A; \
  989. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  990. align_buffer_page_end(src_argb, kStride* kHeight + OFF); \
  991. align_buffer_page_end(dst_y_c, kWidth* kHeight); \
  992. align_buffer_page_end(dst_uv_c, \
  993. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  994. align_buffer_page_end(dst_y_opt, kWidth* kHeight); \
  995. align_buffer_page_end(dst_uv_opt, \
  996. kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  997. for (int i = 0; i < kHeight; ++i) \
  998. for (int j = 0; j < kStride; ++j) \
  999. src_argb[(i * kStride) + j + OFF] = (fastrand() & 0xff); \
  1000. memset(dst_y_c, 1, kWidth* kHeight); \
  1001. memset(dst_uv_c, 2, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  1002. memset(dst_y_opt, 101, kWidth* kHeight); \
  1003. memset(dst_uv_opt, 102, kStrideUV * 2 * SUBSAMPLE(kHeight, SUBSAMP_Y)); \
  1004. MaskCpuFlags(disable_cpu_flags_); \
  1005. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_c, kWidth, dst_uv_c, \
  1006. kStrideUV * 2, kWidth, NEG kHeight); \
  1007. MaskCpuFlags(benchmark_cpu_info_); \
  1008. for (int i = 0; i < benchmark_iterations_; ++i) { \
  1009. FMT_A##To##FMT_PLANAR(src_argb + OFF, kStride, dst_y_opt, kWidth, \
  1010. dst_uv_opt, kStrideUV * 2, kWidth, NEG kHeight); \
  1011. } \
  1012. int max_diff = 0; \
  1013. for (int i = 0; i < kHeight; ++i) { \
  1014. for (int j = 0; j < kWidth; ++j) { \
  1015. int abs_diff = abs(static_cast<int>(dst_y_c[i * kWidth + j]) - \
  1016. static_cast<int>(dst_y_opt[i * kWidth + j])); \
  1017. if (abs_diff > max_diff) { \
  1018. max_diff = abs_diff; \
  1019. } \
  1020. } \
  1021. } \
  1022. EXPECT_LE(max_diff, 4); \
  1023. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y); ++i) { \
  1024. for (int j = 0; j < kStrideUV * 2; ++j) { \
  1025. int abs_diff = \
  1026. abs(static_cast<int>(dst_uv_c[i * kStrideUV * 2 + j]) - \
  1027. static_cast<int>(dst_uv_opt[i * kStrideUV * 2 + j])); \
  1028. if (abs_diff > max_diff) { \
  1029. max_diff = abs_diff; \
  1030. } \
  1031. } \
  1032. } \
  1033. EXPECT_LE(max_diff, 4); \
  1034. free_aligned_buffer_page_end(dst_y_c); \
  1035. free_aligned_buffer_page_end(dst_uv_c); \
  1036. free_aligned_buffer_page_end(dst_y_opt); \
  1037. free_aligned_buffer_page_end(dst_uv_opt); \
  1038. free_aligned_buffer_page_end(src_argb); \
  1039. }
  1040. #define TESTATOBIPLANAR(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y) \
  1041. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  1042. benchmark_width_ - 4, _Any, +, 0) \
  1043. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  1044. benchmark_width_, _Unaligned, +, 1) \
  1045. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  1046. benchmark_width_, _Invert, -, 0) \
  1047. TESTATOBIPLANARI(FMT_A, SUB_A, BPP_A, FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, \
  1048. benchmark_width_, _Opt, +, 0)
  1049. TESTATOBIPLANAR(ARGB, 1, 4, NV12, 2, 2)
  1050. TESTATOBIPLANAR(ARGB, 1, 4, NV21, 2, 2)
  1051. TESTATOBIPLANAR(ABGR, 1, 4, NV12, 2, 2)
  1052. TESTATOBIPLANAR(ABGR, 1, 4, NV21, 2, 2)
  1053. TESTATOBIPLANAR(YUY2, 2, 4, NV12, 2, 2)
  1054. TESTATOBIPLANAR(UYVY, 2, 4, NV12, 2, 2)
  1055. TESTATOBIPLANAR(AYUV, 1, 4, NV12, 2, 2)
  1056. TESTATOBIPLANAR(AYUV, 1, 4, NV21, 2, 2)
  1057. #define TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1058. HEIGHT_B, W1280, DIFF, N, NEG, OFF) \
  1059. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##N) { \
  1060. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  1061. const int kHeight = benchmark_height_; \
  1062. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1063. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1064. const int kStrideA = \
  1065. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1066. const int kStrideB = \
  1067. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1068. align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
  1069. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1070. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1071. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1072. src_argb[i + OFF] = (fastrand() & 0xff); \
  1073. } \
  1074. memset(dst_argb_c, 1, kStrideB* kHeightB); \
  1075. memset(dst_argb_opt, 101, kStrideB* kHeightB); \
  1076. MaskCpuFlags(disable_cpu_flags_); \
  1077. FMT_A##To##FMT_B(src_argb + OFF, kStrideA, dst_argb_c, kStrideB, kWidth, \
  1078. NEG kHeight); \
  1079. MaskCpuFlags(benchmark_cpu_info_); \
  1080. for (int i = 0; i < benchmark_iterations_; ++i) { \
  1081. FMT_A##To##FMT_B(src_argb + OFF, kStrideA, dst_argb_opt, kStrideB, \
  1082. kWidth, NEG kHeight); \
  1083. } \
  1084. int max_diff = 0; \
  1085. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1086. int abs_diff = abs(static_cast<int>(dst_argb_c[i]) - \
  1087. static_cast<int>(dst_argb_opt[i])); \
  1088. if (abs_diff > max_diff) { \
  1089. max_diff = abs_diff; \
  1090. } \
  1091. } \
  1092. EXPECT_LE(max_diff, DIFF); \
  1093. free_aligned_buffer_page_end(src_argb); \
  1094. free_aligned_buffer_page_end(dst_argb_c); \
  1095. free_aligned_buffer_page_end(dst_argb_opt); \
  1096. }
  1097. #define TESTATOBRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, \
  1098. STRIDE_B, HEIGHT_B, DIFF) \
  1099. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_Random) { \
  1100. for (int times = 0; times < benchmark_iterations_; ++times) { \
  1101. const int kWidth = (fastrand() & 63) + 1; \
  1102. const int kHeight = (fastrand() & 31) + 1; \
  1103. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1104. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1105. const int kStrideA = \
  1106. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1107. const int kStrideB = \
  1108. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1109. align_buffer_page_end(src_argb, kStrideA* kHeightA); \
  1110. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1111. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1112. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1113. src_argb[i] = (fastrand() & 0xff); \
  1114. } \
  1115. memset(dst_argb_c, 123, kStrideB* kHeightB); \
  1116. memset(dst_argb_opt, 123, kStrideB* kHeightB); \
  1117. MaskCpuFlags(disable_cpu_flags_); \
  1118. FMT_A##To##FMT_B(src_argb, kStrideA, dst_argb_c, kStrideB, kWidth, \
  1119. kHeight); \
  1120. MaskCpuFlags(benchmark_cpu_info_); \
  1121. FMT_A##To##FMT_B(src_argb, kStrideA, dst_argb_opt, kStrideB, kWidth, \
  1122. kHeight); \
  1123. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1124. EXPECT_NEAR(dst_argb_c[i], dst_argb_opt[i], DIFF); \
  1125. } \
  1126. free_aligned_buffer_page_end(src_argb); \
  1127. free_aligned_buffer_page_end(dst_argb_c); \
  1128. free_aligned_buffer_page_end(dst_argb_opt); \
  1129. } \
  1130. }
  1131. #define TESTATOB(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1132. HEIGHT_B, DIFF) \
  1133. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1134. HEIGHT_B, benchmark_width_ - 4, DIFF, _Any, +, 0) \
  1135. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1136. HEIGHT_B, benchmark_width_, DIFF, _Unaligned, +, 1) \
  1137. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1138. HEIGHT_B, benchmark_width_, DIFF, _Invert, -, 0) \
  1139. TESTATOBI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1140. HEIGHT_B, benchmark_width_, DIFF, _Opt, +, 0) \
  1141. TESTATOBRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1142. HEIGHT_B, DIFF)
  1143. // TODO(fbarchard): make ARM version of C code that matches NEON.
  1144. TESTATOB(AB30, 4, 4, 1, ABGR, 4, 4, 1, 0)
  1145. TESTATOB(AB30, 4, 4, 1, ARGB, 4, 4, 1, 0)
  1146. #ifdef LITTLE_ENDIAN_TEST
  1147. TESTATOB(ABGR, 4, 4, 1, AR30, 4, 4, 1, 0)
  1148. #endif
  1149. TESTATOB(ABGR, 4, 4, 1, ARGB, 4, 4, 1, 0)
  1150. #ifdef LITTLE_ENDIAN_TEST
  1151. TESTATOB(AR30, 4, 4, 1, AB30, 4, 4, 1, 0)
  1152. #endif
  1153. TESTATOB(AR30, 4, 4, 1, ABGR, 4, 4, 1, 0)
  1154. #ifdef LITTLE_ENDIAN_TEST
  1155. TESTATOB(AR30, 4, 4, 1, AR30, 4, 4, 1, 0)
  1156. TESTATOB(AR30, 4, 4, 1, ARGB, 4, 4, 1, 0)
  1157. #endif
  1158. TESTATOB(ARGB, 4, 4, 1, ABGR, 4, 4, 1, 0)
  1159. #ifdef LITTLE_ENDIAN_TEST
  1160. TESTATOB(ARGB, 4, 4, 1, AR30, 4, 4, 1, 0)
  1161. #endif
  1162. TESTATOB(ARGB, 4, 4, 1, ARGB, 4, 4, 1, 0)
  1163. TESTATOB(ARGB, 4, 4, 1, ARGB1555, 2, 2, 1, 0)
  1164. TESTATOB(ARGB, 4, 4, 1, ARGB4444, 2, 2, 1, 0)
  1165. TESTATOB(ARGB, 4, 4, 1, ARGBMirror, 4, 4, 1, 0)
  1166. TESTATOB(ARGB, 4, 4, 1, BGRA, 4, 4, 1, 0)
  1167. TESTATOB(ARGB, 4, 4, 1, I400, 1, 1, 1, 2)
  1168. TESTATOB(ARGB, 4, 4, 1, J400, 1, 1, 1, 2)
  1169. TESTATOB(ARGB, 4, 4, 1, RAW, 3, 3, 1, 0)
  1170. TESTATOB(ARGB, 4, 4, 1, RGB24, 3, 3, 1, 0)
  1171. #ifdef LITTLE_ENDIAN_TEST
  1172. TESTATOB(ARGB, 4, 4, 1, RGB565, 2, 2, 1, 0)
  1173. #endif
  1174. TESTATOB(ARGB, 4, 4, 1, RGBA, 4, 4, 1, 0)
  1175. TESTATOB(ARGB, 4, 4, 1, UYVY, 2, 4, 1, 4)
  1176. TESTATOB(ARGB, 4, 4, 1, YUY2, 2, 4, 1, 4)
  1177. TESTATOB(ARGB1555, 2, 2, 1, ARGB, 4, 4, 1, 0)
  1178. TESTATOB(ARGB4444, 2, 2, 1, ARGB, 4, 4, 1, 0)
  1179. TESTATOB(BGRA, 4, 4, 1, ARGB, 4, 4, 1, 0)
  1180. TESTATOB(I400, 1, 1, 1, ARGB, 4, 4, 1, 0)
  1181. TESTATOB(I400, 1, 1, 1, I400, 1, 1, 1, 0)
  1182. TESTATOB(I400, 1, 1, 1, I400Mirror, 1, 1, 1, 0)
  1183. TESTATOB(J400, 1, 1, 1, ARGB, 4, 4, 1, 0)
  1184. TESTATOB(J400, 1, 1, 1, J400, 1, 1, 1, 0)
  1185. TESTATOB(RAW, 3, 3, 1, ARGB, 4, 4, 1, 0)
  1186. TESTATOB(RAW, 3, 3, 1, RGB24, 3, 3, 1, 0)
  1187. TESTATOB(RGB24, 3, 3, 1, ARGB, 4, 4, 1, 0)
  1188. TESTATOB(RGB24, 3, 3, 1, J400, 1, 1, 1, 0)
  1189. #ifdef LITTLE_ENDIAN_TEST
  1190. TESTATOB(RGB565, 2, 2, 1, ARGB, 4, 4, 1, 0)
  1191. #endif
  1192. TESTATOB(RGBA, 4, 4, 1, ARGB, 4, 4, 1, 0)
  1193. TESTATOB(UYVY, 2, 4, 1, ARGB, 4, 4, 1, ARM_YUV_ERROR)
  1194. TESTATOB(YUY2, 2, 4, 1, ARGB, 4, 4, 1, ARM_YUV_ERROR)
  1195. TESTATOB(YUY2, 2, 4, 1, Y, 1, 1, 1, 0)
  1196. #define TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1197. HEIGHT_B, W1280, DIFF, N, NEG, OFF) \
  1198. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##Dither##N) { \
  1199. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  1200. const int kHeight = benchmark_height_; \
  1201. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1202. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1203. const int kStrideA = \
  1204. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1205. const int kStrideB = \
  1206. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1207. align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
  1208. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1209. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1210. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1211. src_argb[i + OFF] = (fastrand() & 0xff); \
  1212. } \
  1213. memset(dst_argb_c, 1, kStrideB* kHeightB); \
  1214. memset(dst_argb_opt, 101, kStrideB* kHeightB); \
  1215. MaskCpuFlags(disable_cpu_flags_); \
  1216. FMT_A##To##FMT_B##Dither(src_argb + OFF, kStrideA, dst_argb_c, kStrideB, \
  1217. NULL, kWidth, NEG kHeight); \
  1218. MaskCpuFlags(benchmark_cpu_info_); \
  1219. for (int i = 0; i < benchmark_iterations_; ++i) { \
  1220. FMT_A##To##FMT_B##Dither(src_argb + OFF, kStrideA, dst_argb_opt, \
  1221. kStrideB, NULL, kWidth, NEG kHeight); \
  1222. } \
  1223. int max_diff = 0; \
  1224. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1225. int abs_diff = abs(static_cast<int>(dst_argb_c[i]) - \
  1226. static_cast<int>(dst_argb_opt[i])); \
  1227. if (abs_diff > max_diff) { \
  1228. max_diff = abs_diff; \
  1229. } \
  1230. } \
  1231. EXPECT_LE(max_diff, DIFF); \
  1232. free_aligned_buffer_page_end(src_argb); \
  1233. free_aligned_buffer_page_end(dst_argb_c); \
  1234. free_aligned_buffer_page_end(dst_argb_opt); \
  1235. }
  1236. #define TESTATOBDRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, \
  1237. STRIDE_B, HEIGHT_B, DIFF) \
  1238. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##Dither_Random) { \
  1239. for (int times = 0; times < benchmark_iterations_; ++times) { \
  1240. const int kWidth = (fastrand() & 63) + 1; \
  1241. const int kHeight = (fastrand() & 31) + 1; \
  1242. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1243. const int kHeightB = (kHeight + HEIGHT_B - 1) / HEIGHT_B * HEIGHT_B; \
  1244. const int kStrideA = \
  1245. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1246. const int kStrideB = \
  1247. (kWidth * BPP_B + STRIDE_B - 1) / STRIDE_B * STRIDE_B; \
  1248. align_buffer_page_end(src_argb, kStrideA* kHeightA); \
  1249. align_buffer_page_end(dst_argb_c, kStrideB* kHeightB); \
  1250. align_buffer_page_end(dst_argb_opt, kStrideB* kHeightB); \
  1251. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1252. src_argb[i] = (fastrand() & 0xff); \
  1253. } \
  1254. memset(dst_argb_c, 123, kStrideB* kHeightB); \
  1255. memset(dst_argb_opt, 123, kStrideB* kHeightB); \
  1256. MaskCpuFlags(disable_cpu_flags_); \
  1257. FMT_A##To##FMT_B##Dither(src_argb, kStrideA, dst_argb_c, kStrideB, NULL, \
  1258. kWidth, kHeight); \
  1259. MaskCpuFlags(benchmark_cpu_info_); \
  1260. FMT_A##To##FMT_B##Dither(src_argb, kStrideA, dst_argb_opt, kStrideB, \
  1261. NULL, kWidth, kHeight); \
  1262. int max_diff = 0; \
  1263. for (int i = 0; i < kStrideB * kHeightB; ++i) { \
  1264. int abs_diff = abs(static_cast<int>(dst_argb_c[i]) - \
  1265. static_cast<int>(dst_argb_opt[i])); \
  1266. if (abs_diff > max_diff) { \
  1267. max_diff = abs_diff; \
  1268. } \
  1269. } \
  1270. EXPECT_LE(max_diff, DIFF); \
  1271. free_aligned_buffer_page_end(src_argb); \
  1272. free_aligned_buffer_page_end(dst_argb_c); \
  1273. free_aligned_buffer_page_end(dst_argb_opt); \
  1274. } \
  1275. }
  1276. #define TESTATOBD(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1277. HEIGHT_B, DIFF) \
  1278. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1279. HEIGHT_B, benchmark_width_ - 4, DIFF, _Any, +, 0) \
  1280. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1281. HEIGHT_B, benchmark_width_, DIFF, _Unaligned, +, 1) \
  1282. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1283. HEIGHT_B, benchmark_width_, DIFF, _Invert, -, 0) \
  1284. TESTATOBDI(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1285. HEIGHT_B, benchmark_width_, DIFF, _Opt, +, 0) \
  1286. TESTATOBDRANDOM(FMT_A, BPP_A, STRIDE_A, HEIGHT_A, FMT_B, BPP_B, STRIDE_B, \
  1287. HEIGHT_B, DIFF)
  1288. #ifdef LITTLE_ENDIAN_TEST
  1289. TESTATOBD(ARGB, 4, 4, 1, RGB565, 2, 2, 1, 0)
  1290. #endif
  1291. #define TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, W1280, N, NEG, OFF) \
  1292. TEST_F(LibYUVConvertTest, FMT_ATOB##_Symetric##N) { \
  1293. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  1294. const int kHeight = benchmark_height_; \
  1295. const int kHeightA = (kHeight + HEIGHT_A - 1) / HEIGHT_A * HEIGHT_A; \
  1296. const int kStrideA = \
  1297. (kWidth * BPP_A + STRIDE_A - 1) / STRIDE_A * STRIDE_A; \
  1298. align_buffer_page_end(src_argb, kStrideA* kHeightA + OFF); \
  1299. align_buffer_page_end(dst_argb_c, kStrideA* kHeightA); \
  1300. align_buffer_page_end(dst_argb_opt, kStrideA* kHeightA); \
  1301. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1302. src_argb[i + OFF] = (fastrand() & 0xff); \
  1303. } \
  1304. memset(dst_argb_c, 1, kStrideA* kHeightA); \
  1305. memset(dst_argb_opt, 101, kStrideA* kHeightA); \
  1306. MaskCpuFlags(disable_cpu_flags_); \
  1307. FMT_ATOB(src_argb + OFF, kStrideA, dst_argb_c, kStrideA, kWidth, \
  1308. NEG kHeight); \
  1309. MaskCpuFlags(benchmark_cpu_info_); \
  1310. for (int i = 0; i < benchmark_iterations_; ++i) { \
  1311. FMT_ATOB(src_argb + OFF, kStrideA, dst_argb_opt, kStrideA, kWidth, \
  1312. NEG kHeight); \
  1313. } \
  1314. MaskCpuFlags(disable_cpu_flags_); \
  1315. FMT_ATOB(dst_argb_c, kStrideA, dst_argb_c, kStrideA, kWidth, NEG kHeight); \
  1316. MaskCpuFlags(benchmark_cpu_info_); \
  1317. FMT_ATOB(dst_argb_opt, kStrideA, dst_argb_opt, kStrideA, kWidth, \
  1318. NEG kHeight); \
  1319. for (int i = 0; i < kStrideA * kHeightA; ++i) { \
  1320. EXPECT_EQ(src_argb[i + OFF], dst_argb_opt[i]); \
  1321. EXPECT_EQ(dst_argb_c[i], dst_argb_opt[i]); \
  1322. } \
  1323. free_aligned_buffer_page_end(src_argb); \
  1324. free_aligned_buffer_page_end(dst_argb_c); \
  1325. free_aligned_buffer_page_end(dst_argb_opt); \
  1326. }
  1327. #define TESTSYM(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A) \
  1328. TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_ - 4, _Any, +, \
  1329. 0) \
  1330. TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, _Unaligned, \
  1331. +, 1) \
  1332. TESTSYMI(FMT_ATOB, BPP_A, STRIDE_A, HEIGHT_A, benchmark_width_, _Opt, +, 0)
  1333. TESTSYM(ARGBToARGB, 4, 4, 1)
  1334. TESTSYM(ARGBToBGRA, 4, 4, 1)
  1335. TESTSYM(ARGBToABGR, 4, 4, 1)
  1336. TESTSYM(BGRAToARGB, 4, 4, 1)
  1337. TESTSYM(ABGRToARGB, 4, 4, 1)
  1338. TEST_F(LibYUVConvertTest, Test565) {
  1339. SIMD_ALIGNED(uint8_t orig_pixels[256][4]);
  1340. SIMD_ALIGNED(uint8_t pixels565[256][2]);
  1341. for (int i = 0; i < 256; ++i) {
  1342. for (int j = 0; j < 4; ++j) {
  1343. orig_pixels[i][j] = i;
  1344. }
  1345. }
  1346. ARGBToRGB565(&orig_pixels[0][0], 0, &pixels565[0][0], 0, 256, 1);
  1347. uint32_t checksum = HashDjb2(&pixels565[0][0], sizeof(pixels565), 5381);
  1348. EXPECT_EQ(610919429u, checksum);
  1349. }
  1350. #ifdef HAVE_JPEG
  1351. TEST_F(LibYUVConvertTest, ValidateJpeg) {
  1352. const int kOff = 10;
  1353. const int kMinJpeg = 64;
  1354. const int kImageSize = benchmark_width_ * benchmark_height_ >= kMinJpeg
  1355. ? benchmark_width_ * benchmark_height_
  1356. : kMinJpeg;
  1357. const int kSize = kImageSize + kOff;
  1358. align_buffer_page_end(orig_pixels, kSize);
  1359. // No SOI or EOI. Expect fail.
  1360. memset(orig_pixels, 0, kSize);
  1361. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1362. // Test special value that matches marker start.
  1363. memset(orig_pixels, 0xff, kSize);
  1364. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1365. // EOI, SOI. Expect pass.
  1366. orig_pixels[0] = 0xff;
  1367. orig_pixels[1] = 0xd8; // SOI.
  1368. orig_pixels[2] = 0xff;
  1369. orig_pixels[kSize - kOff + 0] = 0xff;
  1370. orig_pixels[kSize - kOff + 1] = 0xd9; // EOI.
  1371. for (int times = 0; times < benchmark_iterations_; ++times) {
  1372. EXPECT_TRUE(ValidateJpeg(orig_pixels, kSize));
  1373. }
  1374. free_aligned_buffer_page_end(orig_pixels);
  1375. }
  1376. TEST_F(LibYUVConvertTest, ValidateJpegLarge) {
  1377. const int kOff = 10;
  1378. const int kMinJpeg = 64;
  1379. const int kImageSize = benchmark_width_ * benchmark_height_ >= kMinJpeg
  1380. ? benchmark_width_ * benchmark_height_
  1381. : kMinJpeg;
  1382. const int kSize = kImageSize + kOff;
  1383. const int kMultiple = 10;
  1384. const int kBufSize = kImageSize * kMultiple + kOff;
  1385. align_buffer_page_end(orig_pixels, kBufSize);
  1386. // No SOI or EOI. Expect fail.
  1387. memset(orig_pixels, 0, kBufSize);
  1388. EXPECT_FALSE(ValidateJpeg(orig_pixels, kBufSize));
  1389. // EOI, SOI. Expect pass.
  1390. orig_pixels[0] = 0xff;
  1391. orig_pixels[1] = 0xd8; // SOI.
  1392. orig_pixels[2] = 0xff;
  1393. orig_pixels[kSize - kOff + 0] = 0xff;
  1394. orig_pixels[kSize - kOff + 1] = 0xd9; // EOI.
  1395. for (int times = 0; times < benchmark_iterations_; ++times) {
  1396. EXPECT_TRUE(ValidateJpeg(orig_pixels, kBufSize));
  1397. }
  1398. free_aligned_buffer_page_end(orig_pixels);
  1399. }
  1400. TEST_F(LibYUVConvertTest, InvalidateJpeg) {
  1401. const int kOff = 10;
  1402. const int kMinJpeg = 64;
  1403. const int kImageSize = benchmark_width_ * benchmark_height_ >= kMinJpeg
  1404. ? benchmark_width_ * benchmark_height_
  1405. : kMinJpeg;
  1406. const int kSize = kImageSize + kOff;
  1407. align_buffer_page_end(orig_pixels, kSize);
  1408. // NULL pointer. Expect fail.
  1409. EXPECT_FALSE(ValidateJpeg(NULL, kSize));
  1410. // Negative size. Expect fail.
  1411. EXPECT_FALSE(ValidateJpeg(orig_pixels, -1));
  1412. // Too large size. Expect fail.
  1413. EXPECT_FALSE(ValidateJpeg(orig_pixels, 0xfb000000ull));
  1414. // No SOI or EOI. Expect fail.
  1415. memset(orig_pixels, 0, kSize);
  1416. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1417. // SOI but no EOI. Expect fail.
  1418. orig_pixels[0] = 0xff;
  1419. orig_pixels[1] = 0xd8; // SOI.
  1420. orig_pixels[2] = 0xff;
  1421. for (int times = 0; times < benchmark_iterations_; ++times) {
  1422. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1423. }
  1424. // EOI but no SOI. Expect fail.
  1425. orig_pixels[0] = 0;
  1426. orig_pixels[1] = 0;
  1427. orig_pixels[kSize - kOff + 0] = 0xff;
  1428. orig_pixels[kSize - kOff + 1] = 0xd9; // EOI.
  1429. EXPECT_FALSE(ValidateJpeg(orig_pixels, kSize));
  1430. free_aligned_buffer_page_end(orig_pixels);
  1431. }
  1432. TEST_F(LibYUVConvertTest, FuzzJpeg) {
  1433. // SOI but no EOI. Expect fail.
  1434. for (int times = 0; times < benchmark_iterations_; ++times) {
  1435. const int kSize = fastrand() % 5000 + 3;
  1436. align_buffer_page_end(orig_pixels, kSize);
  1437. MemRandomize(orig_pixels, kSize);
  1438. // Add SOI so frame will be scanned.
  1439. orig_pixels[0] = 0xff;
  1440. orig_pixels[1] = 0xd8; // SOI.
  1441. orig_pixels[2] = 0xff;
  1442. orig_pixels[kSize - 1] = 0xff;
  1443. ValidateJpeg(orig_pixels,
  1444. kSize); // Failure normally expected.
  1445. free_aligned_buffer_page_end(orig_pixels);
  1446. }
  1447. }
  1448. // Test data created in GIMP. In export jpeg, disable
  1449. // thumbnails etc, choose a subsampling, and use low quality
  1450. // (50) to keep size small. Generated with xxd -i test.jpg
  1451. // test 0 is J400
  1452. static const uint8_t kTest0Jpg[] = {
  1453. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1454. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1455. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1456. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1457. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1458. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1459. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1460. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xc2, 0x00, 0x0b, 0x08, 0x00, 0x10,
  1461. 0x00, 0x20, 0x01, 0x01, 0x11, 0x00, 0xff, 0xc4, 0x00, 0x17, 0x00, 0x01,
  1462. 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1463. 0x00, 0x00, 0x00, 0x03, 0x04, 0x01, 0x02, 0xff, 0xda, 0x00, 0x08, 0x01,
  1464. 0x01, 0x00, 0x00, 0x00, 0x01, 0x43, 0x7e, 0xa7, 0x97, 0x57, 0xff, 0xc4,
  1465. 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00,
  1466. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03,
  1467. 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05,
  1468. 0x02, 0x3b, 0xc0, 0x6f, 0x66, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26,
  1469. 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03,
  1470. 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1471. 0x00, 0x11, 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff,
  1472. 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28,
  1473. 0x32, 0xd2, 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4,
  1474. 0x00, 0x1c, 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00,
  1475. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51,
  1476. 0x31, 0x61, 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1477. 0x3f, 0x21, 0x65, 0x6e, 0x31, 0x86, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb,
  1478. 0xa9, 0x01, 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9,
  1479. 0xc6, 0x48, 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x08,
  1480. 0x01, 0x01, 0x00, 0x00, 0x00, 0x10, 0x35, 0xff, 0xc4, 0x00, 0x1f, 0x10,
  1481. 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1482. 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31, 0x41, 0x61, 0x71, 0x91,
  1483. 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1484. 0x3f, 0x10, 0x0b, 0x30, 0xe9, 0x58, 0xbe, 0x1a, 0xfd, 0x88, 0xab, 0x8b,
  1485. 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd, 0x46, 0x96, 0x2e, 0xec,
  1486. 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30, 0x49, 0xad, 0x88, 0x7c,
  1487. 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03, 0x0b, 0xb7, 0xd4, 0xff,
  1488. 0xd9};
  1489. static const size_t kTest0JpgLen = 421;
  1490. // test 1 is J444
  1491. static const uint8_t kTest1Jpg[] = {
  1492. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1493. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1494. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1495. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1496. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1497. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1498. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1499. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1500. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1501. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1502. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1503. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1504. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1505. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1506. 0x01, 0x11, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1507. 0x17, 0x00, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1508. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x04, 0x01, 0x02, 0xff, 0xc4,
  1509. 0x00, 0x16, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1510. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x01, 0x03, 0xff, 0xda,
  1511. 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00, 0x01,
  1512. 0x40, 0x8f, 0x26, 0xe8, 0xf4, 0xcc, 0xf9, 0x69, 0x2b, 0x1b, 0x2a, 0xcb,
  1513. 0xff, 0xc4, 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00,
  1514. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11,
  1515. 0x00, 0x03, 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00,
  1516. 0x01, 0x05, 0x02, 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99,
  1517. 0x0d, 0x26, 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x19, 0x11, 0x01, 0x00,
  1518. 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1519. 0x00, 0x00, 0x01, 0x00, 0x10, 0x11, 0x02, 0x12, 0xff, 0xda, 0x00, 0x08,
  1520. 0x01, 0x03, 0x01, 0x01, 0x3f, 0x01, 0xf1, 0x00, 0x27, 0x45, 0xbb, 0x31,
  1521. 0xaf, 0xff, 0xc4, 0x00, 0x1a, 0x11, 0x00, 0x02, 0x03, 0x01, 0x01, 0x00,
  1522. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  1523. 0x02, 0x10, 0x11, 0x41, 0x12, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02, 0x01,
  1524. 0x01, 0x3f, 0x01, 0xf6, 0x4b, 0x5f, 0x48, 0xb3, 0x69, 0x63, 0x35, 0x72,
  1525. 0xbf, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03, 0x05, 0x00,
  1526. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11,
  1527. 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff, 0xda, 0x00,
  1528. 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28, 0x32, 0xd2,
  1529. 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4, 0x00, 0x1c,
  1530. 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00,
  1531. 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51, 0x31, 0x61,
  1532. 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f, 0x21,
  1533. 0x75, 0x6e, 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb, 0xa9, 0x01,
  1534. 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9, 0xc6, 0x48,
  1535. 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01,
  1536. 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x26, 0x61, 0xd4, 0xff,
  1537. 0xc4, 0x00, 0x1a, 0x11, 0x00, 0x03, 0x01, 0x00, 0x03, 0x00, 0x00, 0x00,
  1538. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x21,
  1539. 0x31, 0x41, 0x51, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f,
  1540. 0x10, 0x54, 0xa8, 0xbf, 0x50, 0x87, 0xb0, 0x9d, 0x8b, 0xc4, 0x6a, 0x26,
  1541. 0x6b, 0x2a, 0x9c, 0x1f, 0xff, 0xc4, 0x00, 0x18, 0x11, 0x01, 0x01, 0x01,
  1542. 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1543. 0x00, 0x01, 0x00, 0x11, 0x21, 0x51, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02,
  1544. 0x01, 0x01, 0x3f, 0x10, 0x70, 0xe1, 0x3e, 0xd1, 0x8e, 0x0d, 0xe1, 0xb5,
  1545. 0xd5, 0x91, 0x76, 0x43, 0x82, 0x45, 0x4c, 0x7b, 0x7f, 0xff, 0xc4, 0x00,
  1546. 0x1f, 0x10, 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00, 0x00, 0x00,
  1547. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31, 0x41, 0x61,
  1548. 0x71, 0x91, 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01,
  1549. 0x00, 0x01, 0x3f, 0x10, 0x1b, 0x30, 0xe9, 0x58, 0xbe, 0x1a, 0xfd, 0x8a,
  1550. 0xeb, 0x8b, 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd, 0x46, 0x96,
  1551. 0x2e, 0xec, 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30, 0x49, 0xad,
  1552. 0x88, 0x7c, 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03, 0x0b, 0xb7,
  1553. 0xd4, 0xff, 0xd9};
  1554. static const size_t kTest1JpgLen = 735;
  1555. // test 2 is J420
  1556. static const uint8_t kTest2Jpg[] = {
  1557. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1558. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1559. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1560. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1561. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1562. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1563. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1564. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1565. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1566. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1567. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1568. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1569. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1570. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1571. 0x01, 0x22, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1572. 0x18, 0x00, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1573. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x05, 0x01, 0x02, 0x04, 0xff,
  1574. 0xc4, 0x00, 0x16, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1575. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x01, 0x02, 0xff,
  1576. 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00,
  1577. 0x01, 0x20, 0xe7, 0x28, 0xa3, 0x0b, 0x2e, 0x2d, 0xcf, 0xff, 0xc4, 0x00,
  1578. 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00,
  1579. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03, 0x10,
  1580. 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05, 0x02,
  1581. 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26, 0x62,
  1582. 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x00, 0x03, 0x00, 0x00,
  1583. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1584. 0x01, 0x11, 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f,
  1585. 0x01, 0xc8, 0x53, 0xff, 0xc4, 0x00, 0x16, 0x11, 0x01, 0x01, 0x01, 0x00,
  1586. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1587. 0x00, 0x11, 0x32, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02, 0x01, 0x01, 0x3f,
  1588. 0x01, 0xd2, 0xc7, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03,
  1589. 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1590. 0x00, 0x11, 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff,
  1591. 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28,
  1592. 0x32, 0xd2, 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4,
  1593. 0x00, 0x1c, 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00,
  1594. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51,
  1595. 0x31, 0x61, 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1596. 0x3f, 0x21, 0x75, 0x6e, 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb,
  1597. 0xa9, 0x01, 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9,
  1598. 0xc6, 0x48, 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c,
  1599. 0x03, 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x13, 0x5f,
  1600. 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00,
  1601. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11,
  1602. 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f, 0x10, 0x0e,
  1603. 0xa1, 0x3a, 0x76, 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x01, 0x01, 0x01,
  1604. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1605. 0x01, 0x00, 0x21, 0x11, 0xff, 0xda, 0x00, 0x08, 0x01, 0x02, 0x01, 0x01,
  1606. 0x3f, 0x10, 0x57, 0x0b, 0x08, 0x70, 0xdb, 0xff, 0xc4, 0x00, 0x1f, 0x10,
  1607. 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1608. 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31, 0x41, 0x61, 0x71, 0x91,
  1609. 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01,
  1610. 0x3f, 0x10, 0x1b, 0x30, 0xe9, 0x58, 0xbe, 0x1a, 0xfd, 0x8a, 0xeb, 0x8b,
  1611. 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd, 0x46, 0x96, 0x2e, 0xec,
  1612. 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30, 0x49, 0xad, 0x88, 0x7c,
  1613. 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03, 0x0b, 0xb7, 0xd4, 0xff,
  1614. 0xd9};
  1615. static const size_t kTest2JpgLen = 685;
  1616. // test 3 is J422
  1617. static const uint8_t kTest3Jpg[] = {
  1618. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1619. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1620. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1621. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1622. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1623. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1624. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1625. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1626. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1627. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1628. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1629. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1630. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1631. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1632. 0x01, 0x21, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1633. 0x17, 0x00, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1634. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x03, 0x04, 0x01, 0x02, 0xff, 0xc4,
  1635. 0x00, 0x17, 0x01, 0x00, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1636. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x03, 0x00, 0xff,
  1637. 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00,
  1638. 0x01, 0x43, 0x8d, 0x1f, 0xa2, 0xb3, 0xca, 0x1b, 0x57, 0x0f, 0xff, 0xc4,
  1639. 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00,
  1640. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03,
  1641. 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05,
  1642. 0x02, 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26,
  1643. 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x19, 0x11, 0x00, 0x02, 0x03, 0x01,
  1644. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1645. 0x00, 0x01, 0x02, 0x10, 0x11, 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03,
  1646. 0x01, 0x01, 0x3f, 0x01, 0x51, 0xce, 0x8c, 0x75, 0xff, 0xc4, 0x00, 0x18,
  1647. 0x11, 0x00, 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1648. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x61, 0x21, 0xff, 0xda,
  1649. 0x00, 0x08, 0x01, 0x02, 0x01, 0x01, 0x3f, 0x01, 0xa6, 0xd9, 0x2f, 0x84,
  1650. 0xe8, 0xf0, 0xff, 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03, 0x05,
  1651. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1652. 0x11, 0x21, 0x02, 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff, 0xda,
  1653. 0x00, 0x08, 0x01, 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28, 0x32,
  1654. 0xd2, 0xed, 0xf9, 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4, 0x00,
  1655. 0x1c, 0x10, 0x01, 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00,
  1656. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51, 0x31,
  1657. 0x61, 0x81, 0xf0, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f,
  1658. 0x21, 0x75, 0x6e, 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb, 0xa9,
  1659. 0x01, 0xf3, 0xde, 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9, 0xc6,
  1660. 0x48, 0x5d, 0x7a, 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c, 0x03,
  1661. 0x01, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x2e, 0x45, 0xff,
  1662. 0xc4, 0x00, 0x18, 0x11, 0x00, 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00,
  1663. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x21,
  1664. 0x31, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f, 0x10, 0x53,
  1665. 0x50, 0xba, 0x54, 0xc1, 0x67, 0x4f, 0xff, 0xc4, 0x00, 0x18, 0x11, 0x00,
  1666. 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1667. 0x00, 0x00, 0x00, 0x01, 0x11, 0x21, 0x00, 0x10, 0xff, 0xda, 0x00, 0x08,
  1668. 0x01, 0x02, 0x01, 0x01, 0x3f, 0x10, 0x18, 0x81, 0x5c, 0x04, 0x1a, 0xca,
  1669. 0x91, 0xbf, 0xff, 0xc4, 0x00, 0x1f, 0x10, 0x01, 0x00, 0x02, 0x01, 0x04,
  1670. 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
  1671. 0x00, 0x11, 0x31, 0x41, 0x61, 0x71, 0x91, 0x21, 0x81, 0xd1, 0xb1, 0xff,
  1672. 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f, 0x10, 0x1b, 0x30, 0xe9,
  1673. 0x58, 0xbe, 0x1a, 0xfd, 0x8a, 0xeb, 0x8b, 0x34, 0x74, 0x80, 0x4b, 0xb5,
  1674. 0xd5, 0xab, 0xcd, 0x46, 0x96, 0x2e, 0xec, 0xbd, 0xaa, 0x78, 0x47, 0x5c,
  1675. 0x47, 0xa7, 0x30, 0x49, 0xad, 0x88, 0x7c, 0x40, 0x74, 0x30, 0xff, 0x00,
  1676. 0x23, 0x1d, 0x03, 0x0b, 0xb7, 0xd4, 0xff, 0xd9};
  1677. static const size_t kTest3JpgLen = 704;
  1678. // test 4 is J422 vertical - not supported
  1679. static const uint8_t kTest4Jpg[] = {
  1680. 0xff, 0xd8, 0xff, 0xe0, 0x00, 0x10, 0x4a, 0x46, 0x49, 0x46, 0x00, 0x01,
  1681. 0x01, 0x01, 0x00, 0x48, 0x00, 0x48, 0x00, 0x00, 0xff, 0xdb, 0x00, 0x43,
  1682. 0x00, 0x10, 0x0b, 0x0c, 0x0e, 0x0c, 0x0a, 0x10, 0x0e, 0x0d, 0x0e, 0x12,
  1683. 0x11, 0x10, 0x13, 0x18, 0x28, 0x1a, 0x18, 0x16, 0x16, 0x18, 0x31, 0x23,
  1684. 0x25, 0x1d, 0x28, 0x3a, 0x33, 0x3d, 0x3c, 0x39, 0x33, 0x38, 0x37, 0x40,
  1685. 0x48, 0x5c, 0x4e, 0x40, 0x44, 0x57, 0x45, 0x37, 0x38, 0x50, 0x6d, 0x51,
  1686. 0x57, 0x5f, 0x62, 0x67, 0x68, 0x67, 0x3e, 0x4d, 0x71, 0x79, 0x70, 0x64,
  1687. 0x78, 0x5c, 0x65, 0x67, 0x63, 0xff, 0xdb, 0x00, 0x43, 0x01, 0x11, 0x12,
  1688. 0x12, 0x18, 0x15, 0x18, 0x2f, 0x1a, 0x1a, 0x2f, 0x63, 0x42, 0x38, 0x42,
  1689. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1690. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1691. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1692. 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63, 0x63,
  1693. 0x63, 0x63, 0xff, 0xc2, 0x00, 0x11, 0x08, 0x00, 0x10, 0x00, 0x20, 0x03,
  1694. 0x01, 0x12, 0x00, 0x02, 0x11, 0x01, 0x03, 0x11, 0x01, 0xff, 0xc4, 0x00,
  1695. 0x18, 0x00, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1696. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x05, 0x01, 0x02, 0x03, 0xff,
  1697. 0xc4, 0x00, 0x16, 0x01, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00,
  1698. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x03, 0xff,
  1699. 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02, 0x10, 0x03, 0x10, 0x00, 0x00,
  1700. 0x01, 0xd2, 0x98, 0xe9, 0x03, 0x0c, 0x00, 0x46, 0x21, 0xd9, 0xff, 0xc4,
  1701. 0x00, 0x1b, 0x10, 0x00, 0x03, 0x00, 0x02, 0x03, 0x00, 0x00, 0x00, 0x00,
  1702. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x02, 0x11, 0x00, 0x03,
  1703. 0x10, 0x12, 0x13, 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x05,
  1704. 0x02, 0x3b, 0x80, 0x6f, 0x56, 0x76, 0x56, 0x23, 0x87, 0x99, 0x0d, 0x26,
  1705. 0x62, 0xf6, 0xbf, 0xff, 0xc4, 0x00, 0x17, 0x11, 0x01, 0x01, 0x01, 0x01,
  1706. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1707. 0x00, 0x11, 0x01, 0x21, 0xff, 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01,
  1708. 0x3f, 0x01, 0x98, 0xb1, 0xbd, 0x47, 0xff, 0xc4, 0x00, 0x18, 0x11, 0x00,
  1709. 0x03, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1710. 0x00, 0x00, 0x00, 0x00, 0x01, 0x12, 0x11, 0x21, 0xff, 0xda, 0x00, 0x08,
  1711. 0x01, 0x02, 0x01, 0x01, 0x3f, 0x01, 0xb6, 0x35, 0xa2, 0xe1, 0x47, 0xff,
  1712. 0xc4, 0x00, 0x1e, 0x10, 0x00, 0x02, 0x01, 0x03, 0x05, 0x00, 0x00, 0x00,
  1713. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x11, 0x21, 0x02,
  1714. 0x12, 0x32, 0x10, 0x31, 0x71, 0x81, 0xa1, 0xff, 0xda, 0x00, 0x08, 0x01,
  1715. 0x01, 0x00, 0x06, 0x3f, 0x02, 0x4b, 0xb3, 0x28, 0x32, 0xd2, 0xed, 0xf9,
  1716. 0x1d, 0x3e, 0x13, 0x51, 0x73, 0x83, 0xff, 0xc4, 0x00, 0x1c, 0x10, 0x01,
  1717. 0x01, 0x01, 0x00, 0x02, 0x03, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1718. 0x00, 0x00, 0x00, 0x01, 0x11, 0x00, 0x21, 0x51, 0x31, 0x61, 0x81, 0xf0,
  1719. 0xff, 0xda, 0x00, 0x08, 0x01, 0x01, 0x00, 0x01, 0x3f, 0x21, 0x75, 0x6e,
  1720. 0x31, 0x94, 0x28, 0xf9, 0x30, 0xdc, 0x27, 0xdb, 0xa9, 0x01, 0xf3, 0xde,
  1721. 0x02, 0xa0, 0xed, 0x1e, 0x34, 0x68, 0x23, 0xf9, 0xc6, 0x48, 0x5d, 0x7a,
  1722. 0x35, 0x02, 0xf5, 0x6f, 0xff, 0xda, 0x00, 0x0c, 0x03, 0x01, 0x00, 0x02,
  1723. 0x00, 0x03, 0x00, 0x00, 0x00, 0x10, 0x24, 0xaf, 0xff, 0xc4, 0x00, 0x19,
  1724. 0x11, 0x00, 0x03, 0x01, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1725. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x11, 0x51, 0x21, 0x31, 0xff,
  1726. 0xda, 0x00, 0x08, 0x01, 0x03, 0x01, 0x01, 0x3f, 0x10, 0x59, 0x11, 0xca,
  1727. 0x42, 0x60, 0x9f, 0x69, 0xff, 0xc4, 0x00, 0x19, 0x11, 0x00, 0x02, 0x03,
  1728. 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
  1729. 0x00, 0x00, 0x01, 0x11, 0x21, 0x31, 0x61, 0xff, 0xda, 0x00, 0x08, 0x01,
  1730. 0x02, 0x01, 0x01, 0x3f, 0x10, 0xb0, 0xd7, 0x27, 0x51, 0xb6, 0x41, 0xff,
  1731. 0xc4, 0x00, 0x1f, 0x10, 0x01, 0x00, 0x02, 0x01, 0x04, 0x03, 0x01, 0x00,
  1732. 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x11, 0x31,
  1733. 0x41, 0x61, 0x71, 0x91, 0x21, 0x81, 0xd1, 0xb1, 0xff, 0xda, 0x00, 0x08,
  1734. 0x01, 0x01, 0x00, 0x01, 0x3f, 0x10, 0x1b, 0x30, 0xe9, 0x58, 0xbe, 0x1a,
  1735. 0xfd, 0x8a, 0xeb, 0x8b, 0x34, 0x74, 0x80, 0x4b, 0xb5, 0xd5, 0xab, 0xcd,
  1736. 0x46, 0x96, 0x2e, 0xec, 0xbd, 0xaa, 0x78, 0x47, 0x5c, 0x47, 0xa7, 0x30,
  1737. 0x49, 0xad, 0x88, 0x7c, 0x40, 0x74, 0x30, 0xff, 0x00, 0x23, 0x1d, 0x03,
  1738. 0x0b, 0xb7, 0xd4, 0xff, 0xd9};
  1739. static const size_t kTest4JpgLen = 701;
  1740. TEST_F(LibYUVConvertTest, TestMJPGSize) {
  1741. int width = 0;
  1742. int height = 0;
  1743. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1744. EXPECT_EQ(0, ret);
  1745. printf("test jpeg size %d x %d\n", width, height);
  1746. }
  1747. TEST_F(LibYUVConvertTest, TestMJPGToI420) {
  1748. int width = 0;
  1749. int height = 0;
  1750. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1751. EXPECT_EQ(0, ret);
  1752. int half_width = (width + 1) / 2;
  1753. int half_height = (height + 1) / 2;
  1754. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1755. benchmark_height_ / (width * height);
  1756. align_buffer_page_end(dst_y, width * height);
  1757. align_buffer_page_end(dst_u, half_width * half_height);
  1758. align_buffer_page_end(dst_v, half_width * half_height);
  1759. for (int times = 0; times < benchmark_iterations; ++times) {
  1760. ret = MJPGToI420(kTest2Jpg, kTest2JpgLen, dst_y, width, dst_u, half_width,
  1761. dst_v, half_width, width, height, width, height);
  1762. }
  1763. // Expect sucesss
  1764. EXPECT_EQ(0, ret);
  1765. // Test result matches known hash value.
  1766. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1767. uint32_t dst_u_hash = HashDjb2(dst_u, half_width * half_height, 5381);
  1768. uint32_t dst_v_hash = HashDjb2(dst_v, half_width * half_height, 5381);
  1769. EXPECT_EQ(dst_y_hash, 2682851208u);
  1770. EXPECT_EQ(dst_u_hash, 2501859930u);
  1771. EXPECT_EQ(dst_v_hash, 2126459123u);
  1772. free_aligned_buffer_page_end(dst_y);
  1773. free_aligned_buffer_page_end(dst_u);
  1774. free_aligned_buffer_page_end(dst_v);
  1775. }
  1776. TEST_F(LibYUVConvertTest, TestMJPGToI420_NV21) {
  1777. int width = 0;
  1778. int height = 0;
  1779. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1780. EXPECT_EQ(0, ret);
  1781. int half_width = (width + 1) / 2;
  1782. int half_height = (height + 1) / 2;
  1783. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1784. benchmark_height_ / (width * height);
  1785. // Convert to NV21
  1786. align_buffer_page_end(dst_y, width * height);
  1787. align_buffer_page_end(dst_vu, half_width * half_height * 2);
  1788. for (int times = 0; times < benchmark_iterations; ++times) {
  1789. ret = MJPGToNV21(kTest2Jpg, kTest2JpgLen, dst_y, width, dst_vu,
  1790. half_width * 2, width, height, width, height);
  1791. }
  1792. // Expect sucesss
  1793. EXPECT_EQ(0, ret);
  1794. // Convert to I420
  1795. align_buffer_page_end(dst2_y, width * height);
  1796. align_buffer_page_end(dst2_u, half_width * half_height);
  1797. align_buffer_page_end(dst2_v, half_width * half_height);
  1798. for (int times = 0; times < benchmark_iterations; ++times) {
  1799. ret = MJPGToI420(kTest2Jpg, kTest2JpgLen, dst2_y, width, dst2_u, half_width,
  1800. dst2_v, half_width, width, height, width, height);
  1801. }
  1802. // Expect sucesss
  1803. EXPECT_EQ(0, ret);
  1804. // Convert I420 to NV21
  1805. align_buffer_page_end(dst3_y, width * height);
  1806. align_buffer_page_end(dst3_vu, half_width * half_height * 2);
  1807. I420ToNV21(dst2_y, width, dst2_u, half_width, dst2_v, half_width, dst3_y,
  1808. width, dst3_vu, half_width * 2, width, height);
  1809. for (int i = 0; i < width * height; ++i) {
  1810. EXPECT_EQ(dst_y[i], dst3_y[i]);
  1811. }
  1812. for (int i = 0; i < half_width * half_height * 2; ++i) {
  1813. EXPECT_EQ(dst_vu[i], dst3_vu[i]);
  1814. EXPECT_EQ(dst_vu[i], dst3_vu[i]);
  1815. }
  1816. free_aligned_buffer_page_end(dst3_y);
  1817. free_aligned_buffer_page_end(dst3_vu);
  1818. free_aligned_buffer_page_end(dst2_y);
  1819. free_aligned_buffer_page_end(dst2_u);
  1820. free_aligned_buffer_page_end(dst2_v);
  1821. free_aligned_buffer_page_end(dst_y);
  1822. free_aligned_buffer_page_end(dst_vu);
  1823. }
  1824. TEST_F(LibYUVConvertTest, TestMJPGToNV21_420) {
  1825. int width = 0;
  1826. int height = 0;
  1827. int ret = MJPGSize(kTest2Jpg, kTest2JpgLen, &width, &height);
  1828. EXPECT_EQ(0, ret);
  1829. int half_width = (width + 1) / 2;
  1830. int half_height = (height + 1) / 2;
  1831. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1832. benchmark_height_ / (width * height);
  1833. align_buffer_page_end(dst_y, width * height);
  1834. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1835. for (int times = 0; times < benchmark_iterations; ++times) {
  1836. ret = MJPGToNV21(kTest2Jpg, kTest2JpgLen, dst_y, width, dst_uv,
  1837. half_width * 2, width, height, width, height);
  1838. }
  1839. // Expect sucesss
  1840. EXPECT_EQ(0, ret);
  1841. // Test result matches known hash value.
  1842. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1843. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1844. EXPECT_EQ(dst_y_hash, 2682851208u);
  1845. EXPECT_EQ(dst_uv_hash, 1069662856u);
  1846. free_aligned_buffer_page_end(dst_y);
  1847. free_aligned_buffer_page_end(dst_uv);
  1848. }
  1849. TEST_F(LibYUVConvertTest, TestMJPGToNV21_422) {
  1850. int width = 0;
  1851. int height = 0;
  1852. int ret = MJPGSize(kTest3Jpg, kTest3JpgLen, &width, &height);
  1853. EXPECT_EQ(0, ret);
  1854. int half_width = (width + 1) / 2;
  1855. int half_height = (height + 1) / 2;
  1856. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1857. benchmark_height_ / (width * height);
  1858. align_buffer_page_end(dst_y, width * height);
  1859. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1860. for (int times = 0; times < benchmark_iterations; ++times) {
  1861. ret = MJPGToNV21(kTest3Jpg, kTest3JpgLen, dst_y, width, dst_uv,
  1862. half_width * 2, width, height, width, height);
  1863. }
  1864. // Expect sucesss
  1865. EXPECT_EQ(0, ret);
  1866. // Test result matches known hash value.
  1867. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1868. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1869. EXPECT_EQ(dst_y_hash, 2682851208u);
  1870. EXPECT_EQ(dst_uv_hash, 3543430771u);
  1871. free_aligned_buffer_page_end(dst_y);
  1872. free_aligned_buffer_page_end(dst_uv);
  1873. }
  1874. TEST_F(LibYUVConvertTest, TestMJPGToNV21_400) {
  1875. int width = 0;
  1876. int height = 0;
  1877. int ret = MJPGSize(kTest0Jpg, kTest0JpgLen, &width, &height);
  1878. EXPECT_EQ(0, ret);
  1879. int half_width = (width + 1) / 2;
  1880. int half_height = (height + 1) / 2;
  1881. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1882. benchmark_height_ / (width * height);
  1883. align_buffer_page_end(dst_y, width * height);
  1884. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1885. for (int times = 0; times < benchmark_iterations; ++times) {
  1886. ret = MJPGToNV21(kTest0Jpg, kTest0JpgLen, dst_y, width, dst_uv,
  1887. half_width * 2, width, height, width, height);
  1888. }
  1889. // Expect sucesss
  1890. EXPECT_EQ(0, ret);
  1891. // Test result matches known hash value.
  1892. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1893. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1894. EXPECT_EQ(dst_y_hash, 330644005u);
  1895. EXPECT_EQ(dst_uv_hash, 135214341u);
  1896. free_aligned_buffer_page_end(dst_y);
  1897. free_aligned_buffer_page_end(dst_uv);
  1898. }
  1899. TEST_F(LibYUVConvertTest, TestMJPGToNV21_444) {
  1900. int width = 0;
  1901. int height = 0;
  1902. int ret = MJPGSize(kTest1Jpg, kTest1JpgLen, &width, &height);
  1903. EXPECT_EQ(0, ret);
  1904. int half_width = (width + 1) / 2;
  1905. int half_height = (height + 1) / 2;
  1906. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1907. benchmark_height_ / (width * height);
  1908. align_buffer_page_end(dst_y, width * height);
  1909. align_buffer_page_end(dst_uv, half_width * half_height * 2);
  1910. for (int times = 0; times < benchmark_iterations; ++times) {
  1911. ret = MJPGToNV21(kTest1Jpg, kTest1JpgLen, dst_y, width, dst_uv,
  1912. half_width * 2, width, height, width, height);
  1913. }
  1914. // Expect sucesss
  1915. EXPECT_EQ(0, ret);
  1916. // Test result matches known hash value.
  1917. uint32_t dst_y_hash = HashDjb2(dst_y, width * height, 5381);
  1918. uint32_t dst_uv_hash = HashDjb2(dst_uv, half_width * half_height * 2, 5381);
  1919. EXPECT_EQ(dst_y_hash, 2682851208u);
  1920. EXPECT_EQ(dst_uv_hash, 506143297u);
  1921. free_aligned_buffer_page_end(dst_y);
  1922. free_aligned_buffer_page_end(dst_uv);
  1923. }
  1924. TEST_F(LibYUVConvertTest, TestMJPGToARGB) {
  1925. int width = 0;
  1926. int height = 0;
  1927. int ret = MJPGSize(kTest3Jpg, kTest3JpgLen, &width, &height);
  1928. EXPECT_EQ(0, ret);
  1929. int benchmark_iterations = benchmark_iterations_ * benchmark_width_ *
  1930. benchmark_height_ / (width * height);
  1931. align_buffer_page_end(dst_argb, width * height * 4);
  1932. for (int times = 0; times < benchmark_iterations; ++times) {
  1933. ret = MJPGToARGB(kTest3Jpg, kTest3JpgLen, dst_argb, width * 4, width,
  1934. height, width, height);
  1935. }
  1936. // Expect sucesss
  1937. EXPECT_EQ(0, ret);
  1938. // Test result matches known hash value.
  1939. uint32_t dst_argb_hash = HashDjb2(dst_argb, width * height, 5381);
  1940. EXPECT_EQ(dst_argb_hash, 2355976473u);
  1941. free_aligned_buffer_page_end(dst_argb);
  1942. }
  1943. static int ShowJPegInfo(const uint8_t* sample, size_t sample_size) {
  1944. MJpegDecoder mjpeg_decoder;
  1945. LIBYUV_BOOL ret = mjpeg_decoder.LoadFrame(sample, sample_size);
  1946. int width = mjpeg_decoder.GetWidth();
  1947. int height = mjpeg_decoder.GetHeight();
  1948. // YUV420
  1949. if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  1950. mjpeg_decoder.GetNumComponents() == 3 &&
  1951. mjpeg_decoder.GetVertSampFactor(0) == 2 &&
  1952. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  1953. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  1954. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  1955. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  1956. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  1957. printf("JPeg is J420, %dx%d %d bytes\n", width, height,
  1958. static_cast<int>(sample_size));
  1959. // YUV422
  1960. } else if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  1961. mjpeg_decoder.GetNumComponents() == 3 &&
  1962. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  1963. mjpeg_decoder.GetHorizSampFactor(0) == 2 &&
  1964. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  1965. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  1966. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  1967. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  1968. printf("JPeg is J422, %dx%d %d bytes\n", width, height,
  1969. static_cast<int>(sample_size));
  1970. // YUV444
  1971. } else if (mjpeg_decoder.GetColorSpace() == MJpegDecoder::kColorSpaceYCbCr &&
  1972. mjpeg_decoder.GetNumComponents() == 3 &&
  1973. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  1974. mjpeg_decoder.GetHorizSampFactor(0) == 1 &&
  1975. mjpeg_decoder.GetVertSampFactor(1) == 1 &&
  1976. mjpeg_decoder.GetHorizSampFactor(1) == 1 &&
  1977. mjpeg_decoder.GetVertSampFactor(2) == 1 &&
  1978. mjpeg_decoder.GetHorizSampFactor(2) == 1) {
  1979. printf("JPeg is J444, %dx%d %d bytes\n", width, height,
  1980. static_cast<int>(sample_size));
  1981. // YUV400
  1982. } else if (mjpeg_decoder.GetColorSpace() ==
  1983. MJpegDecoder::kColorSpaceGrayscale &&
  1984. mjpeg_decoder.GetNumComponents() == 1 &&
  1985. mjpeg_decoder.GetVertSampFactor(0) == 1 &&
  1986. mjpeg_decoder.GetHorizSampFactor(0) == 1) {
  1987. printf("JPeg is J400, %dx%d %d bytes\n", width, height,
  1988. static_cast<int>(sample_size));
  1989. } else {
  1990. // Unknown colorspace.
  1991. printf("JPeg is Unknown colorspace.\n");
  1992. }
  1993. mjpeg_decoder.UnloadFrame();
  1994. return ret;
  1995. }
  1996. TEST_F(LibYUVConvertTest, TestMJPGInfo) {
  1997. EXPECT_EQ(1, ShowJPegInfo(kTest0Jpg, kTest0JpgLen));
  1998. EXPECT_EQ(1, ShowJPegInfo(kTest1Jpg, kTest1JpgLen));
  1999. EXPECT_EQ(1, ShowJPegInfo(kTest2Jpg, kTest2JpgLen));
  2000. EXPECT_EQ(1, ShowJPegInfo(kTest3Jpg, kTest3JpgLen));
  2001. EXPECT_EQ(1, ShowJPegInfo(kTest4Jpg,
  2002. kTest4JpgLen)); // Valid but unsupported.
  2003. }
  2004. #endif // HAVE_JPEG
  2005. TEST_F(LibYUVConvertTest, NV12Crop) {
  2006. const int SUBSAMP_X = 2;
  2007. const int SUBSAMP_Y = 2;
  2008. const int kWidth = benchmark_width_;
  2009. const int kHeight = benchmark_height_;
  2010. const int crop_y =
  2011. ((benchmark_height_ - (benchmark_height_ * 360 / 480)) / 2 + 1) & ~1;
  2012. const int kDestWidth = benchmark_width_;
  2013. const int kDestHeight = benchmark_height_ - crop_y * 2;
  2014. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X);
  2015. const int sample_size =
  2016. kWidth * kHeight + kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y) * 2;
  2017. align_buffer_page_end(src_y, sample_size);
  2018. uint8_t* src_uv = src_y + kWidth * kHeight;
  2019. align_buffer_page_end(dst_y, kDestWidth * kDestHeight);
  2020. align_buffer_page_end(dst_u, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  2021. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2022. align_buffer_page_end(dst_v, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  2023. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2024. align_buffer_page_end(dst_y_2, kDestWidth * kDestHeight);
  2025. align_buffer_page_end(dst_u_2, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  2026. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2027. align_buffer_page_end(dst_v_2, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  2028. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2029. for (int i = 0; i < kHeight * kWidth; ++i) {
  2030. src_y[i] = (fastrand() & 0xff);
  2031. }
  2032. for (int i = 0; i < (SUBSAMPLE(kHeight, SUBSAMP_Y) * kStrideUV) * 2; ++i) {
  2033. src_uv[i] = (fastrand() & 0xff);
  2034. }
  2035. memset(dst_y, 1, kDestWidth * kDestHeight);
  2036. memset(dst_u, 2,
  2037. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2038. memset(dst_v, 3,
  2039. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2040. memset(dst_y_2, 1, kDestWidth * kDestHeight);
  2041. memset(dst_u_2, 2,
  2042. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2043. memset(dst_v_2, 3,
  2044. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2045. ConvertToI420(src_y, sample_size, dst_y_2, kDestWidth, dst_u_2,
  2046. SUBSAMPLE(kDestWidth, SUBSAMP_X), dst_v_2,
  2047. SUBSAMPLE(kDestWidth, SUBSAMP_X), 0, crop_y, kWidth, kHeight,
  2048. kDestWidth, kDestHeight, libyuv::kRotate0, libyuv::FOURCC_NV12);
  2049. NV12ToI420(src_y + crop_y * kWidth, kWidth,
  2050. src_uv + (crop_y / 2) * kStrideUV * 2, kStrideUV * 2, dst_y,
  2051. kDestWidth, dst_u, SUBSAMPLE(kDestWidth, SUBSAMP_X), dst_v,
  2052. SUBSAMPLE(kDestWidth, SUBSAMP_X), kDestWidth, kDestHeight);
  2053. for (int i = 0; i < kDestHeight; ++i) {
  2054. for (int j = 0; j < kDestWidth; ++j) {
  2055. EXPECT_EQ(dst_y[i * kWidth + j], dst_y_2[i * kWidth + j]);
  2056. }
  2057. }
  2058. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  2059. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  2060. EXPECT_EQ(dst_u[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j],
  2061. dst_u_2[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  2062. }
  2063. }
  2064. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  2065. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  2066. EXPECT_EQ(dst_v[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j],
  2067. dst_v_2[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  2068. }
  2069. }
  2070. free_aligned_buffer_page_end(dst_y);
  2071. free_aligned_buffer_page_end(dst_u);
  2072. free_aligned_buffer_page_end(dst_v);
  2073. free_aligned_buffer_page_end(dst_y_2);
  2074. free_aligned_buffer_page_end(dst_u_2);
  2075. free_aligned_buffer_page_end(dst_v_2);
  2076. free_aligned_buffer_page_end(src_y);
  2077. }
  2078. TEST_F(LibYUVConvertTest, I420CropOddY) {
  2079. const int SUBSAMP_X = 2;
  2080. const int SUBSAMP_Y = 2;
  2081. const int kWidth = benchmark_width_;
  2082. const int kHeight = benchmark_height_;
  2083. const int crop_y = 1;
  2084. const int kDestWidth = benchmark_width_;
  2085. const int kDestHeight = benchmark_height_ - crop_y * 2;
  2086. const int kStrideU = SUBSAMPLE(kWidth, SUBSAMP_X);
  2087. const int kStrideV = SUBSAMPLE(kWidth, SUBSAMP_X);
  2088. const int sample_size = kWidth * kHeight +
  2089. kStrideU * SUBSAMPLE(kHeight, SUBSAMP_Y) +
  2090. kStrideV * SUBSAMPLE(kHeight, SUBSAMP_Y);
  2091. align_buffer_page_end(src_y, sample_size);
  2092. uint8_t* src_u = src_y + kWidth * kHeight;
  2093. uint8_t* src_v = src_u + kStrideU * SUBSAMPLE(kHeight, SUBSAMP_Y);
  2094. align_buffer_page_end(dst_y, kDestWidth * kDestHeight);
  2095. align_buffer_page_end(dst_u, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  2096. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2097. align_buffer_page_end(dst_v, SUBSAMPLE(kDestWidth, SUBSAMP_X) *
  2098. SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2099. for (int i = 0; i < kHeight * kWidth; ++i) {
  2100. src_y[i] = (fastrand() & 0xff);
  2101. }
  2102. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y) * kStrideU; ++i) {
  2103. src_u[i] = (fastrand() & 0xff);
  2104. }
  2105. for (int i = 0; i < SUBSAMPLE(kHeight, SUBSAMP_Y) * kStrideV; ++i) {
  2106. src_v[i] = (fastrand() & 0xff);
  2107. }
  2108. memset(dst_y, 1, kDestWidth * kDestHeight);
  2109. memset(dst_u, 2,
  2110. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2111. memset(dst_v, 3,
  2112. SUBSAMPLE(kDestWidth, SUBSAMP_X) * SUBSAMPLE(kDestHeight, SUBSAMP_Y));
  2113. MaskCpuFlags(benchmark_cpu_info_);
  2114. for (int i = 0; i < benchmark_iterations_; ++i) {
  2115. ConvertToI420(src_y, sample_size, dst_y, kDestWidth, dst_u,
  2116. SUBSAMPLE(kDestWidth, SUBSAMP_X), dst_v,
  2117. SUBSAMPLE(kDestWidth, SUBSAMP_X), 0, crop_y, kWidth, kHeight,
  2118. kDestWidth, kDestHeight, libyuv::kRotate0,
  2119. libyuv::FOURCC_I420);
  2120. }
  2121. for (int i = 0; i < kDestHeight; ++i) {
  2122. for (int j = 0; j < kDestWidth; ++j) {
  2123. EXPECT_EQ(src_y[crop_y * kWidth + i * kWidth + j],
  2124. dst_y[i * kDestWidth + j]);
  2125. }
  2126. }
  2127. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  2128. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  2129. EXPECT_EQ(src_u[(crop_y / 2 + i) * kStrideU + j],
  2130. dst_u[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  2131. }
  2132. }
  2133. for (int i = 0; i < SUBSAMPLE(kDestHeight, SUBSAMP_Y); ++i) {
  2134. for (int j = 0; j < SUBSAMPLE(kDestWidth, SUBSAMP_X); ++j) {
  2135. EXPECT_EQ(src_v[(crop_y / 2 + i) * kStrideV + j],
  2136. dst_v[i * SUBSAMPLE(kDestWidth, SUBSAMP_X) + j]);
  2137. }
  2138. }
  2139. free_aligned_buffer_page_end(dst_y);
  2140. free_aligned_buffer_page_end(dst_u);
  2141. free_aligned_buffer_page_end(dst_v);
  2142. free_aligned_buffer_page_end(src_y);
  2143. }
  2144. TEST_F(LibYUVConvertTest, TestYToARGB) {
  2145. uint8_t y[32];
  2146. uint8_t expectedg[32];
  2147. for (int i = 0; i < 32; ++i) {
  2148. y[i] = i * 5 + 17;
  2149. expectedg[i] = static_cast<int>((y[i] - 16) * 1.164f + 0.5f);
  2150. }
  2151. uint8_t argb[32 * 4];
  2152. YToARGB(y, 0, argb, 0, 32, 1);
  2153. for (int i = 0; i < 32; ++i) {
  2154. printf("%2d %d: %d <-> %d,%d,%d,%d\n", i, y[i], expectedg[i],
  2155. argb[i * 4 + 0], argb[i * 4 + 1], argb[i * 4 + 2], argb[i * 4 + 3]);
  2156. }
  2157. for (int i = 0; i < 32; ++i) {
  2158. EXPECT_EQ(expectedg[i], argb[i * 4 + 0]);
  2159. }
  2160. }
  2161. static const uint8_t kNoDither4x4[16] = {
  2162. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  2163. };
  2164. TEST_F(LibYUVConvertTest, TestNoDither) {
  2165. align_buffer_page_end(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2166. align_buffer_page_end(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2167. align_buffer_page_end(dst_rgb565dither,
  2168. benchmark_width_ * benchmark_height_ * 2);
  2169. MemRandomize(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2170. MemRandomize(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2171. MemRandomize(dst_rgb565dither, benchmark_width_ * benchmark_height_ * 2);
  2172. ARGBToRGB565(src_argb, benchmark_width_ * 4, dst_rgb565, benchmark_width_ * 2,
  2173. benchmark_width_, benchmark_height_);
  2174. ARGBToRGB565Dither(src_argb, benchmark_width_ * 4, dst_rgb565dither,
  2175. benchmark_width_ * 2, kNoDither4x4, benchmark_width_,
  2176. benchmark_height_);
  2177. for (int i = 0; i < benchmark_width_ * benchmark_height_ * 2; ++i) {
  2178. EXPECT_EQ(dst_rgb565[i], dst_rgb565dither[i]);
  2179. }
  2180. free_aligned_buffer_page_end(src_argb);
  2181. free_aligned_buffer_page_end(dst_rgb565);
  2182. free_aligned_buffer_page_end(dst_rgb565dither);
  2183. }
  2184. // Ordered 4x4 dither for 888 to 565. Values from 0 to 7.
  2185. static const uint8_t kDither565_4x4[16] = {
  2186. 0, 4, 1, 5, 6, 2, 7, 3, 1, 5, 0, 4, 7, 3, 6, 2,
  2187. };
  2188. TEST_F(LibYUVConvertTest, TestDither) {
  2189. align_buffer_page_end(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2190. align_buffer_page_end(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2191. align_buffer_page_end(dst_rgb565dither,
  2192. benchmark_width_ * benchmark_height_ * 2);
  2193. align_buffer_page_end(dst_argb, benchmark_width_ * benchmark_height_ * 4);
  2194. align_buffer_page_end(dst_argbdither,
  2195. benchmark_width_ * benchmark_height_ * 4);
  2196. MemRandomize(src_argb, benchmark_width_ * benchmark_height_ * 4);
  2197. MemRandomize(dst_rgb565, benchmark_width_ * benchmark_height_ * 2);
  2198. MemRandomize(dst_rgb565dither, benchmark_width_ * benchmark_height_ * 2);
  2199. MemRandomize(dst_argb, benchmark_width_ * benchmark_height_ * 4);
  2200. MemRandomize(dst_argbdither, benchmark_width_ * benchmark_height_ * 4);
  2201. ARGBToRGB565(src_argb, benchmark_width_ * 4, dst_rgb565, benchmark_width_ * 2,
  2202. benchmark_width_, benchmark_height_);
  2203. ARGBToRGB565Dither(src_argb, benchmark_width_ * 4, dst_rgb565dither,
  2204. benchmark_width_ * 2, kDither565_4x4, benchmark_width_,
  2205. benchmark_height_);
  2206. RGB565ToARGB(dst_rgb565, benchmark_width_ * 2, dst_argb, benchmark_width_ * 4,
  2207. benchmark_width_, benchmark_height_);
  2208. RGB565ToARGB(dst_rgb565dither, benchmark_width_ * 2, dst_argbdither,
  2209. benchmark_width_ * 4, benchmark_width_, benchmark_height_);
  2210. for (int i = 0; i < benchmark_width_ * benchmark_height_ * 4; ++i) {
  2211. EXPECT_NEAR(dst_argb[i], dst_argbdither[i], 9);
  2212. }
  2213. free_aligned_buffer_page_end(src_argb);
  2214. free_aligned_buffer_page_end(dst_rgb565);
  2215. free_aligned_buffer_page_end(dst_rgb565dither);
  2216. free_aligned_buffer_page_end(dst_argb);
  2217. free_aligned_buffer_page_end(dst_argbdither);
  2218. }
  2219. #define TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2220. YALIGN, W1280, DIFF, N, NEG, OFF, FMT_C, BPP_C) \
  2221. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##Dither##N) { \
  2222. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2223. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  2224. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  2225. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  2226. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2227. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  2228. align_buffer_page_end(src_u, kSizeUV + OFF); \
  2229. align_buffer_page_end(src_v, kSizeUV + OFF); \
  2230. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + OFF); \
  2231. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + OFF); \
  2232. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2233. src_y[i + OFF] = (fastrand() & 0xff); \
  2234. } \
  2235. for (int i = 0; i < kSizeUV; ++i) { \
  2236. src_u[i + OFF] = (fastrand() & 0xff); \
  2237. src_v[i + OFF] = (fastrand() & 0xff); \
  2238. } \
  2239. memset(dst_argb_c + OFF, 1, kStrideB * kHeight); \
  2240. memset(dst_argb_opt + OFF, 101, kStrideB * kHeight); \
  2241. MaskCpuFlags(disable_cpu_flags_); \
  2242. FMT_PLANAR##To##FMT_B##Dither(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  2243. src_v + OFF, kStrideUV, dst_argb_c + OFF, \
  2244. kStrideB, NULL, kWidth, NEG kHeight); \
  2245. MaskCpuFlags(benchmark_cpu_info_); \
  2246. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2247. FMT_PLANAR##To##FMT_B##Dither( \
  2248. src_y + OFF, kWidth, src_u + OFF, kStrideUV, src_v + OFF, kStrideUV, \
  2249. dst_argb_opt + OFF, kStrideB, NULL, kWidth, NEG kHeight); \
  2250. } \
  2251. int max_diff = 0; \
  2252. /* Convert to ARGB so 565 is expanded to bytes that can be compared. */ \
  2253. align_buffer_page_end(dst_argb32_c, kWidth* BPP_C* kHeight); \
  2254. align_buffer_page_end(dst_argb32_opt, kWidth* BPP_C* kHeight); \
  2255. memset(dst_argb32_c, 2, kWidth* BPP_C* kHeight); \
  2256. memset(dst_argb32_opt, 102, kWidth* BPP_C* kHeight); \
  2257. FMT_B##To##FMT_C(dst_argb_c + OFF, kStrideB, dst_argb32_c, kWidth * BPP_C, \
  2258. kWidth, kHeight); \
  2259. FMT_B##To##FMT_C(dst_argb_opt + OFF, kStrideB, dst_argb32_opt, \
  2260. kWidth * BPP_C, kWidth, kHeight); \
  2261. for (int i = 0; i < kWidth * BPP_C * kHeight; ++i) { \
  2262. int abs_diff = abs(static_cast<int>(dst_argb32_c[i]) - \
  2263. static_cast<int>(dst_argb32_opt[i])); \
  2264. if (abs_diff > max_diff) { \
  2265. max_diff = abs_diff; \
  2266. } \
  2267. } \
  2268. EXPECT_LE(max_diff, DIFF); \
  2269. free_aligned_buffer_page_end(src_y); \
  2270. free_aligned_buffer_page_end(src_u); \
  2271. free_aligned_buffer_page_end(src_v); \
  2272. free_aligned_buffer_page_end(dst_argb_c); \
  2273. free_aligned_buffer_page_end(dst_argb_opt); \
  2274. free_aligned_buffer_page_end(dst_argb32_c); \
  2275. free_aligned_buffer_page_end(dst_argb32_opt); \
  2276. }
  2277. #define TESTPLANARTOBD(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2278. YALIGN, DIFF, FMT_C, BPP_C) \
  2279. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2280. YALIGN, benchmark_width_ - 4, DIFF, _Any, +, 0, FMT_C, \
  2281. BPP_C) \
  2282. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2283. YALIGN, benchmark_width_, DIFF, _Unaligned, +, 1, FMT_C, \
  2284. BPP_C) \
  2285. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2286. YALIGN, benchmark_width_, DIFF, _Invert, -, 0, FMT_C, BPP_C) \
  2287. TESTPLANARTOBID(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2288. YALIGN, benchmark_width_, DIFF, _Opt, +, 0, FMT_C, BPP_C)
  2289. #ifdef LITTLE_ENDIAN_TEST
  2290. TESTPLANARTOBD(I420, 2, 2, RGB565, 2, 2, 1, 9, ARGB, 4)
  2291. #endif
  2292. #define TESTPTOB(NAME, UYVYTOI420, UYVYTONV12) \
  2293. TEST_F(LibYUVConvertTest, NAME) { \
  2294. const int kWidth = benchmark_width_; \
  2295. const int kHeight = benchmark_height_; \
  2296. \
  2297. align_buffer_page_end(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2) * kHeight); \
  2298. align_buffer_page_end(orig_y, kWidth* kHeight); \
  2299. align_buffer_page_end(orig_u, \
  2300. SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2301. align_buffer_page_end(orig_v, \
  2302. SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2303. \
  2304. align_buffer_page_end(dst_y_orig, kWidth* kHeight); \
  2305. align_buffer_page_end(dst_uv_orig, \
  2306. 2 * SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2307. \
  2308. align_buffer_page_end(dst_y, kWidth* kHeight); \
  2309. align_buffer_page_end(dst_uv, \
  2310. 2 * SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2)); \
  2311. \
  2312. MemRandomize(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2) * kHeight); \
  2313. \
  2314. /* Convert UYVY to NV12 in 2 steps for reference */ \
  2315. libyuv::UYVYTOI420(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2), orig_y, kWidth, \
  2316. orig_u, SUBSAMPLE(kWidth, 2), orig_v, \
  2317. SUBSAMPLE(kWidth, 2), kWidth, kHeight); \
  2318. libyuv::I420ToNV12(orig_y, kWidth, orig_u, SUBSAMPLE(kWidth, 2), orig_v, \
  2319. SUBSAMPLE(kWidth, 2), dst_y_orig, kWidth, dst_uv_orig, \
  2320. 2 * SUBSAMPLE(kWidth, 2), kWidth, kHeight); \
  2321. \
  2322. /* Convert to NV12 */ \
  2323. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2324. libyuv::UYVYTONV12(orig_uyvy, 4 * SUBSAMPLE(kWidth, 2), dst_y, kWidth, \
  2325. dst_uv, 2 * SUBSAMPLE(kWidth, 2), kWidth, kHeight); \
  2326. } \
  2327. \
  2328. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2329. EXPECT_EQ(orig_y[i], dst_y[i]); \
  2330. } \
  2331. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2332. EXPECT_EQ(dst_y_orig[i], dst_y[i]); \
  2333. } \
  2334. for (int i = 0; i < 2 * SUBSAMPLE(kWidth, 2) * SUBSAMPLE(kHeight, 2); \
  2335. ++i) { \
  2336. EXPECT_EQ(dst_uv_orig[i], dst_uv[i]); \
  2337. } \
  2338. \
  2339. free_aligned_buffer_page_end(orig_uyvy); \
  2340. free_aligned_buffer_page_end(orig_y); \
  2341. free_aligned_buffer_page_end(orig_u); \
  2342. free_aligned_buffer_page_end(orig_v); \
  2343. free_aligned_buffer_page_end(dst_y_orig); \
  2344. free_aligned_buffer_page_end(dst_uv_orig); \
  2345. free_aligned_buffer_page_end(dst_y); \
  2346. free_aligned_buffer_page_end(dst_uv); \
  2347. }
  2348. TESTPTOB(TestYUY2ToNV12, YUY2ToI420, YUY2ToNV12)
  2349. TESTPTOB(TestUYVYToNV12, UYVYToI420, UYVYToNV12)
  2350. // Transitive tests. A to B to C is same as A to C.
  2351. #define TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2352. W1280, N, NEG, OFF, FMT_C, BPP_C) \
  2353. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##_##FMT_C##N) { \
  2354. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2355. const int kHeight = benchmark_height_; \
  2356. const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
  2357. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  2358. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2359. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  2360. align_buffer_page_end(src_u, kSizeUV + OFF); \
  2361. align_buffer_page_end(src_v, kSizeUV + OFF); \
  2362. align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
  2363. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2364. src_y[i + OFF] = (fastrand() & 0xff); \
  2365. } \
  2366. for (int i = 0; i < kSizeUV; ++i) { \
  2367. src_u[i + OFF] = (fastrand() & 0xff); \
  2368. src_v[i + OFF] = (fastrand() & 0xff); \
  2369. } \
  2370. memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
  2371. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2372. FMT_PLANAR##To##FMT_B(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  2373. src_v + OFF, kStrideUV, dst_argb_b + OFF, \
  2374. kStrideB, kWidth, NEG kHeight); \
  2375. } \
  2376. /* Convert to a 3rd format in 1 step and 2 steps and compare */ \
  2377. const int kStrideC = kWidth * BPP_C; \
  2378. align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
  2379. align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
  2380. memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
  2381. memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
  2382. FMT_PLANAR##To##FMT_C(src_y + OFF, kWidth, src_u + OFF, kStrideUV, \
  2383. src_v + OFF, kStrideUV, dst_argb_c + OFF, kStrideC, \
  2384. kWidth, NEG kHeight); \
  2385. /* Convert B to C */ \
  2386. FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
  2387. kWidth, kHeight); \
  2388. for (int i = 0; i < kStrideC * kHeight; ++i) { \
  2389. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_bc[i + OFF]); \
  2390. } \
  2391. free_aligned_buffer_page_end(src_y); \
  2392. free_aligned_buffer_page_end(src_u); \
  2393. free_aligned_buffer_page_end(src_v); \
  2394. free_aligned_buffer_page_end(dst_argb_b); \
  2395. free_aligned_buffer_page_end(dst_argb_c); \
  2396. free_aligned_buffer_page_end(dst_argb_bc); \
  2397. }
  2398. #define TESTPLANARTOE(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2399. FMT_C, BPP_C) \
  2400. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2401. benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
  2402. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2403. benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C) \
  2404. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2405. benchmark_width_, _Invert, -, 0, FMT_C, BPP_C) \
  2406. TESTPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2407. benchmark_width_, _Opt, +, 0, FMT_C, BPP_C)
  2408. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ABGR, 4)
  2409. TESTPLANARTOE(J420, 2, 2, ARGB, 1, 4, ARGB, 4)
  2410. TESTPLANARTOE(J420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2411. TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, ARGB, 4)
  2412. TESTPLANARTOE(H420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2413. TESTPLANARTOE(I420, 2, 2, BGRA, 1, 4, ARGB, 4)
  2414. TESTPLANARTOE(I420, 2, 2, ABGR, 1, 4, ARGB, 4)
  2415. TESTPLANARTOE(I420, 2, 2, RGBA, 1, 4, ARGB, 4)
  2416. TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, ARGB, 4)
  2417. TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, RGB24, 3)
  2418. TESTPLANARTOE(I420, 2, 2, RGB24, 1, 3, RAW, 3)
  2419. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RAW, 3)
  2420. TESTPLANARTOE(I420, 2, 2, RAW, 1, 3, ARGB, 4)
  2421. TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, ARGB, 4)
  2422. TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, RGB24, 3)
  2423. TESTPLANARTOE(H420, 2, 2, RGB24, 1, 3, RAW, 3)
  2424. TESTPLANARTOE(H420, 2, 2, ARGB, 1, 4, RAW, 3)
  2425. TESTPLANARTOE(H420, 2, 2, RAW, 1, 3, ARGB, 4)
  2426. #ifdef LITTLE_ENDIAN_TEST
  2427. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, RGB565, 2)
  2428. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ARGB1555, 2)
  2429. TESTPLANARTOE(I420, 2, 2, ARGB, 1, 4, ARGB4444, 2)
  2430. TESTPLANARTOE(I422, 2, 1, ARGB, 1, 4, RGB565, 2)
  2431. #endif
  2432. TESTPLANARTOE(J422, 2, 1, ARGB, 1, 4, ARGB, 4)
  2433. TESTPLANARTOE(J422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2434. TESTPLANARTOE(H422, 2, 1, ARGB, 1, 4, ARGB, 4)
  2435. TESTPLANARTOE(H422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2436. TESTPLANARTOE(I422, 2, 1, BGRA, 1, 4, ARGB, 4)
  2437. TESTPLANARTOE(I422, 2, 1, ABGR, 1, 4, ARGB, 4)
  2438. TESTPLANARTOE(I422, 2, 1, RGBA, 1, 4, ARGB, 4)
  2439. TESTPLANARTOE(I444, 1, 1, ARGB, 1, 4, ARGB, 4)
  2440. TESTPLANARTOE(J444, 1, 1, ARGB, 1, 4, ARGB, 4)
  2441. TESTPLANARTOE(I444, 1, 1, ABGR, 1, 4, ARGB, 4)
  2442. TESTPLANARTOE(I420, 2, 2, YUY2, 2, 4, ARGB, 4)
  2443. TESTPLANARTOE(I420, 2, 2, UYVY, 2, 4, ARGB, 4)
  2444. TESTPLANARTOE(I422, 2, 1, YUY2, 2, 4, ARGB, 4)
  2445. TESTPLANARTOE(I422, 2, 1, UYVY, 2, 4, ARGB, 4)
  2446. #define TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2447. W1280, N, NEG, OFF, FMT_C, BPP_C, ATTEN) \
  2448. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##_##FMT_C##N) { \
  2449. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2450. const int kHeight = benchmark_height_; \
  2451. const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
  2452. const int kSizeUV = \
  2453. SUBSAMPLE(kWidth, SUBSAMP_X) * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2454. align_buffer_page_end(src_y, kWidth* kHeight + OFF); \
  2455. align_buffer_page_end(src_u, kSizeUV + OFF); \
  2456. align_buffer_page_end(src_v, kSizeUV + OFF); \
  2457. align_buffer_page_end(src_a, kWidth* kHeight + OFF); \
  2458. align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
  2459. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2460. src_y[i + OFF] = (fastrand() & 0xff); \
  2461. src_a[i + OFF] = (fastrand() & 0xff); \
  2462. } \
  2463. for (int i = 0; i < kSizeUV; ++i) { \
  2464. src_u[i + OFF] = (fastrand() & 0xff); \
  2465. src_v[i + OFF] = (fastrand() & 0xff); \
  2466. } \
  2467. memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
  2468. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2469. FMT_PLANAR##To##FMT_B( \
  2470. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
  2471. src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
  2472. dst_argb_b + OFF, kStrideB, kWidth, NEG kHeight, ATTEN); \
  2473. } \
  2474. /* Convert to a 3rd format in 1 step and 2 steps and compare */ \
  2475. const int kStrideC = kWidth * BPP_C; \
  2476. align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
  2477. align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
  2478. memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
  2479. memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
  2480. FMT_PLANAR##To##FMT_C( \
  2481. src_y + OFF, kWidth, src_u + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), \
  2482. src_v + OFF, SUBSAMPLE(kWidth, SUBSAMP_X), src_a + OFF, kWidth, \
  2483. dst_argb_c + OFF, kStrideC, kWidth, NEG kHeight, ATTEN); \
  2484. /* Convert B to C */ \
  2485. FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
  2486. kWidth, kHeight); \
  2487. for (int i = 0; i < kStrideC * kHeight; ++i) { \
  2488. EXPECT_EQ(dst_argb_c[i + OFF], dst_argb_bc[i + OFF]); \
  2489. } \
  2490. free_aligned_buffer_page_end(src_y); \
  2491. free_aligned_buffer_page_end(src_u); \
  2492. free_aligned_buffer_page_end(src_v); \
  2493. free_aligned_buffer_page_end(src_a); \
  2494. free_aligned_buffer_page_end(dst_argb_b); \
  2495. free_aligned_buffer_page_end(dst_argb_c); \
  2496. free_aligned_buffer_page_end(dst_argb_bc); \
  2497. }
  2498. #define TESTQPLANARTOE(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2499. FMT_C, BPP_C) \
  2500. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2501. benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C, 0) \
  2502. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2503. benchmark_width_, _Unaligned, +, 1, FMT_C, BPP_C, 0) \
  2504. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2505. benchmark_width_, _Invert, -, 0, FMT_C, BPP_C, 0) \
  2506. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2507. benchmark_width_, _Opt, +, 0, FMT_C, BPP_C, 0) \
  2508. TESTQPLANARTOEI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, SUB_B, BPP_B, \
  2509. benchmark_width_, _Premult, +, 0, FMT_C, BPP_C, 1)
  2510. TESTQPLANARTOE(I420Alpha, 2, 2, ARGB, 1, 4, ABGR, 4)
  2511. TESTQPLANARTOE(I420Alpha, 2, 2, ABGR, 1, 4, ARGB, 4)
  2512. #define TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, W1280, N, NEG, \
  2513. OFF, FMT_C, BPP_C) \
  2514. TEST_F(LibYUVConvertTest, FMT_A##To##FMT_B##_##FMT_C##N) { \
  2515. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2516. const int kHeight = benchmark_height_; \
  2517. const int kStrideA = SUBSAMPLE(kWidth, SUB_A) * BPP_A; \
  2518. const int kStrideB = SUBSAMPLE(kWidth, SUB_B) * BPP_B; \
  2519. align_buffer_page_end(src_argb_a, kStrideA* kHeight + OFF); \
  2520. align_buffer_page_end(dst_argb_b, kStrideB* kHeight + OFF); \
  2521. MemRandomize(src_argb_a + OFF, kStrideA * kHeight); \
  2522. memset(dst_argb_b + OFF, 1, kStrideB * kHeight); \
  2523. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2524. FMT_A##To##FMT_B(src_argb_a + OFF, kStrideA, dst_argb_b + OFF, kStrideB, \
  2525. kWidth, NEG kHeight); \
  2526. } \
  2527. /* Convert to a 3rd format in 1 step and 2 steps and compare */ \
  2528. const int kStrideC = kWidth * BPP_C; \
  2529. align_buffer_page_end(dst_argb_c, kStrideC* kHeight + OFF); \
  2530. align_buffer_page_end(dst_argb_bc, kStrideC* kHeight + OFF); \
  2531. memset(dst_argb_c + OFF, 2, kStrideC * kHeight); \
  2532. memset(dst_argb_bc + OFF, 3, kStrideC * kHeight); \
  2533. FMT_A##To##FMT_C(src_argb_a + OFF, kStrideA, dst_argb_c + OFF, kStrideC, \
  2534. kWidth, NEG kHeight); \
  2535. /* Convert B to C */ \
  2536. FMT_B##To##FMT_C(dst_argb_b + OFF, kStrideB, dst_argb_bc + OFF, kStrideC, \
  2537. kWidth, kHeight); \
  2538. for (int i = 0; i < kStrideC * kHeight; i += 4) { \
  2539. EXPECT_EQ(dst_argb_c[i + OFF + 0], dst_argb_bc[i + OFF + 0]); \
  2540. EXPECT_EQ(dst_argb_c[i + OFF + 1], dst_argb_bc[i + OFF + 1]); \
  2541. EXPECT_EQ(dst_argb_c[i + OFF + 2], dst_argb_bc[i + OFF + 2]); \
  2542. EXPECT_NEAR(dst_argb_c[i + OFF + 3], dst_argb_bc[i + OFF + 3], 64); \
  2543. } \
  2544. free_aligned_buffer_page_end(src_argb_a); \
  2545. free_aligned_buffer_page_end(dst_argb_b); \
  2546. free_aligned_buffer_page_end(dst_argb_c); \
  2547. free_aligned_buffer_page_end(dst_argb_bc); \
  2548. }
  2549. #define TESTPLANETOE(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, FMT_C, BPP_C) \
  2550. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, \
  2551. benchmark_width_ - 4, _Any, +, 0, FMT_C, BPP_C) \
  2552. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
  2553. _Unaligned, +, 1, FMT_C, BPP_C) \
  2554. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
  2555. _Invert, -, 0, FMT_C, BPP_C) \
  2556. TESTPLANETOEI(FMT_A, SUB_A, BPP_A, FMT_B, SUB_B, BPP_B, benchmark_width_, \
  2557. _Opt, +, 0, FMT_C, BPP_C)
  2558. // Caveat: Destination needs to be 4 bytes
  2559. #ifdef LITTLE_ENDIAN_TEST
  2560. TESTPLANETOE(ARGB, 1, 4, AR30, 1, 4, ARGB, 4)
  2561. TESTPLANETOE(ABGR, 1, 4, AR30, 1, 4, ABGR, 4)
  2562. TESTPLANETOE(AR30, 1, 4, ARGB, 1, 4, ABGR, 4)
  2563. TESTPLANETOE(AR30, 1, 4, ABGR, 1, 4, ARGB, 4)
  2564. TESTPLANETOE(ARGB, 1, 4, AB30, 1, 4, ARGB, 4)
  2565. TESTPLANETOE(ABGR, 1, 4, AB30, 1, 4, ABGR, 4)
  2566. TESTPLANETOE(AB30, 1, 4, ARGB, 1, 4, ABGR, 4)
  2567. TESTPLANETOE(AB30, 1, 4, ABGR, 1, 4, ARGB, 4)
  2568. #endif
  2569. TEST_F(LibYUVConvertTest, RotateWithARGBSource) {
  2570. // 2x2 frames
  2571. uint32_t src[4];
  2572. uint32_t dst[4];
  2573. // some random input
  2574. src[0] = 0x11000000;
  2575. src[1] = 0x00450000;
  2576. src[2] = 0x00009f00;
  2577. src[3] = 0x000000ff;
  2578. // zeros on destination
  2579. dst[0] = 0x00000000;
  2580. dst[1] = 0x00000000;
  2581. dst[2] = 0x00000000;
  2582. dst[3] = 0x00000000;
  2583. int r = ConvertToARGB(reinterpret_cast<uint8_t*>(src),
  2584. 16, // input size
  2585. reinterpret_cast<uint8_t*>(dst),
  2586. 8, // destination stride
  2587. 0, // crop_x
  2588. 0, // crop_y
  2589. 2, // width
  2590. 2, // height
  2591. 2, // crop width
  2592. 2, // crop height
  2593. kRotate90, FOURCC_ARGB);
  2594. EXPECT_EQ(r, 0);
  2595. // 90 degrees rotation, no conversion
  2596. EXPECT_EQ(dst[0], src[2]);
  2597. EXPECT_EQ(dst[1], src[0]);
  2598. EXPECT_EQ(dst[2], src[3]);
  2599. EXPECT_EQ(dst[3], src[1]);
  2600. }
  2601. #ifdef HAS_ARGBTOAR30ROW_AVX2
  2602. TEST_F(LibYUVConvertTest, ARGBToAR30Row_Opt) {
  2603. // ARGBToAR30Row_AVX2 expects a multiple of 8 pixels.
  2604. const int kPixels = (benchmark_width_ * benchmark_height_ + 7) & ~7;
  2605. align_buffer_page_end(src, kPixels * 4);
  2606. align_buffer_page_end(dst_opt, kPixels * 4);
  2607. align_buffer_page_end(dst_c, kPixels * 4);
  2608. MemRandomize(src, kPixels * 4);
  2609. memset(dst_opt, 0, kPixels * 4);
  2610. memset(dst_c, 1, kPixels * 4);
  2611. ARGBToAR30Row_C(src, dst_c, kPixels);
  2612. int has_avx2 = TestCpuFlag(kCpuHasAVX2);
  2613. int has_ssse3 = TestCpuFlag(kCpuHasSSSE3);
  2614. for (int i = 0; i < benchmark_iterations_; ++i) {
  2615. if (has_avx2) {
  2616. ARGBToAR30Row_AVX2(src, dst_opt, kPixels);
  2617. } else if (has_ssse3) {
  2618. ARGBToAR30Row_SSSE3(src, dst_opt, kPixels);
  2619. } else {
  2620. ARGBToAR30Row_C(src, dst_opt, kPixels);
  2621. }
  2622. }
  2623. for (int i = 0; i < kPixels * 4; ++i) {
  2624. EXPECT_EQ(dst_opt[i], dst_c[i]);
  2625. }
  2626. free_aligned_buffer_page_end(src);
  2627. free_aligned_buffer_page_end(dst_opt);
  2628. free_aligned_buffer_page_end(dst_c);
  2629. }
  2630. #endif // HAS_ARGBTOAR30ROW_AVX2
  2631. #ifdef HAS_ABGRTOAR30ROW_AVX2
  2632. TEST_F(LibYUVConvertTest, ABGRToAR30Row_Opt) {
  2633. // ABGRToAR30Row_AVX2 expects a multiple of 8 pixels.
  2634. const int kPixels = (benchmark_width_ * benchmark_height_ + 7) & ~7;
  2635. align_buffer_page_end(src, kPixels * 4);
  2636. align_buffer_page_end(dst_opt, kPixels * 4);
  2637. align_buffer_page_end(dst_c, kPixels * 4);
  2638. MemRandomize(src, kPixels * 4);
  2639. memset(dst_opt, 0, kPixels * 4);
  2640. memset(dst_c, 1, kPixels * 4);
  2641. ABGRToAR30Row_C(src, dst_c, kPixels);
  2642. int has_avx2 = TestCpuFlag(kCpuHasAVX2);
  2643. int has_ssse3 = TestCpuFlag(kCpuHasSSSE3);
  2644. for (int i = 0; i < benchmark_iterations_; ++i) {
  2645. if (has_avx2) {
  2646. ABGRToAR30Row_AVX2(src, dst_opt, kPixels);
  2647. } else if (has_ssse3) {
  2648. ABGRToAR30Row_SSSE3(src, dst_opt, kPixels);
  2649. } else {
  2650. ABGRToAR30Row_C(src, dst_opt, kPixels);
  2651. }
  2652. }
  2653. for (int i = 0; i < kPixels * 4; ++i) {
  2654. EXPECT_EQ(dst_opt[i], dst_c[i]);
  2655. }
  2656. free_aligned_buffer_page_end(src);
  2657. free_aligned_buffer_page_end(dst_opt);
  2658. free_aligned_buffer_page_end(dst_c);
  2659. }
  2660. #endif // HAS_ABGRTOAR30ROW_AVX2
  2661. // TODO(fbarchard): Fix clamping issue affected by U channel.
  2662. #define TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, \
  2663. ALIGN, YALIGN, W1280, DIFF, N, NEG, SOFF, DOFF) \
  2664. TEST_F(LibYUVConvertTest, FMT_PLANAR##To##FMT_B##N) { \
  2665. const int kWidth = ((W1280) > 0) ? (W1280) : 1; \
  2666. const int kHeight = ALIGNINT(benchmark_height_, YALIGN); \
  2667. const int kStrideB = ALIGNINT(kWidth * BPP_B, ALIGN); \
  2668. const int kStrideUV = SUBSAMPLE(kWidth, SUBSAMP_X); \
  2669. const int kSizeUV = kStrideUV * SUBSAMPLE(kHeight, SUBSAMP_Y); \
  2670. const int kBpc = 2; \
  2671. align_buffer_page_end(src_y, kWidth* kHeight* kBpc + SOFF); \
  2672. align_buffer_page_end(src_u, kSizeUV* kBpc + SOFF); \
  2673. align_buffer_page_end(src_v, kSizeUV* kBpc + SOFF); \
  2674. align_buffer_page_end(dst_argb_c, kStrideB* kHeight + DOFF); \
  2675. align_buffer_page_end(dst_argb_opt, kStrideB* kHeight + DOFF); \
  2676. for (int i = 0; i < kWidth * kHeight; ++i) { \
  2677. reinterpret_cast<uint16_t*>(src_y + SOFF)[i] = (fastrand() & 0x3ff); \
  2678. } \
  2679. for (int i = 0; i < kSizeUV; ++i) { \
  2680. reinterpret_cast<uint16_t*>(src_u + SOFF)[i] = (fastrand() & 0x3ff); \
  2681. reinterpret_cast<uint16_t*>(src_v + SOFF)[i] = (fastrand() & 0x3ff); \
  2682. } \
  2683. memset(dst_argb_c + DOFF, 1, kStrideB * kHeight); \
  2684. memset(dst_argb_opt + DOFF, 101, kStrideB * kHeight); \
  2685. MaskCpuFlags(disable_cpu_flags_); \
  2686. FMT_PLANAR##To##FMT_B( \
  2687. reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
  2688. reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
  2689. reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
  2690. dst_argb_c + DOFF, kStrideB, kWidth, NEG kHeight); \
  2691. MaskCpuFlags(benchmark_cpu_info_); \
  2692. for (int i = 0; i < benchmark_iterations_; ++i) { \
  2693. FMT_PLANAR##To##FMT_B( \
  2694. reinterpret_cast<uint16_t*>(src_y + SOFF), kWidth, \
  2695. reinterpret_cast<uint16_t*>(src_u + SOFF), kStrideUV, \
  2696. reinterpret_cast<uint16_t*>(src_v + SOFF), kStrideUV, \
  2697. dst_argb_opt + DOFF, kStrideB, kWidth, NEG kHeight); \
  2698. } \
  2699. int max_diff = 0; \
  2700. for (int i = 0; i < kWidth * BPP_B * kHeight; ++i) { \
  2701. int abs_diff = abs(static_cast<int>(dst_argb_c[i + DOFF]) - \
  2702. static_cast<int>(dst_argb_opt[i + DOFF])); \
  2703. if (abs_diff > max_diff) { \
  2704. max_diff = abs_diff; \
  2705. } \
  2706. } \
  2707. EXPECT_LE(max_diff, DIFF); \
  2708. free_aligned_buffer_page_end(src_y); \
  2709. free_aligned_buffer_page_end(src_u); \
  2710. free_aligned_buffer_page_end(src_v); \
  2711. free_aligned_buffer_page_end(dst_argb_c); \
  2712. free_aligned_buffer_page_end(dst_argb_opt); \
  2713. }
  2714. #define TESTPLANAR16TOB(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2715. YALIGN, DIFF) \
  2716. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2717. YALIGN, benchmark_width_ - 4, DIFF, _Any, +, 0, 0) \
  2718. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2719. YALIGN, benchmark_width_, DIFF, _Unaligned, +, 1, 1) \
  2720. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2721. YALIGN, benchmark_width_, DIFF, _Invert, -, 0, 0) \
  2722. TESTPLANAR16TOBI(FMT_PLANAR, SUBSAMP_X, SUBSAMP_Y, FMT_B, BPP_B, ALIGN, \
  2723. YALIGN, benchmark_width_, DIFF, _Opt, +, 0, 0)
  2724. TESTPLANAR16TOB(I010, 2, 2, ARGB, 4, 4, 1, 2)
  2725. TESTPLANAR16TOB(I010, 2, 2, ABGR, 4, 4, 1, 2)
  2726. #ifdef LITTLE_ENDIAN_TEST
  2727. TESTPLANAR16TOB(I010, 2, 2, AR30, 4, 4, 1, 2)
  2728. TESTPLANAR16TOB(I010, 2, 2, AB30, 4, 4, 1, 2)
  2729. #endif
  2730. TESTPLANAR16TOB(H010, 2, 2, ARGB, 4, 4, 1, 2)
  2731. TESTPLANAR16TOB(H010, 2, 2, ABGR, 4, 4, 1, 2)
  2732. #ifdef LITTLE_ENDIAN_TEST
  2733. TESTPLANAR16TOB(H010, 2, 2, AR30, 4, 4, 1, 2)
  2734. TESTPLANAR16TOB(H010, 2, 2, AB30, 4, 4, 1, 2)
  2735. #endif
  2736. static int Clamp(int y) {
  2737. if (y < 0) {
  2738. y = 0;
  2739. }
  2740. if (y > 255) {
  2741. y = 255;
  2742. }
  2743. return y;
  2744. }
  2745. static int Clamp10(int y) {
  2746. if (y < 0) {
  2747. y = 0;
  2748. }
  2749. if (y > 1023) {
  2750. y = 1023;
  2751. }
  2752. return y;
  2753. }
  2754. // Test 8 bit YUV to 8 bit RGB
  2755. TEST_F(LibYUVConvertTest, TestH420ToARGB) {
  2756. const int kSize = 256;
  2757. int histogram_b[256];
  2758. int histogram_g[256];
  2759. int histogram_r[256];
  2760. memset(histogram_b, 0, sizeof(histogram_b));
  2761. memset(histogram_g, 0, sizeof(histogram_g));
  2762. memset(histogram_r, 0, sizeof(histogram_r));
  2763. align_buffer_page_end(orig_yuv, kSize + kSize / 2 * 2);
  2764. align_buffer_page_end(argb_pixels, kSize * 4);
  2765. uint8_t* orig_y = orig_yuv;
  2766. uint8_t* orig_u = orig_y + kSize;
  2767. uint8_t* orig_v = orig_u + kSize / 2;
  2768. // Test grey scale
  2769. for (int i = 0; i < kSize; ++i) {
  2770. orig_y[i] = i;
  2771. }
  2772. for (int i = 0; i < kSize / 2; ++i) {
  2773. orig_u[i] = 128; // 128 is 0.
  2774. orig_v[i] = 128;
  2775. }
  2776. H420ToARGB(orig_y, 0, orig_u, 0, orig_v, 0, argb_pixels, 0, kSize, 1);
  2777. for (int i = 0; i < kSize; ++i) {
  2778. int b = argb_pixels[i * 4 + 0];
  2779. int g = argb_pixels[i * 4 + 1];
  2780. int r = argb_pixels[i * 4 + 2];
  2781. int a = argb_pixels[i * 4 + 3];
  2782. ++histogram_b[b];
  2783. ++histogram_g[g];
  2784. ++histogram_r[r];
  2785. int expected_y = Clamp(static_cast<int>((i - 16) * 1.164f));
  2786. EXPECT_NEAR(b, expected_y, 1);
  2787. EXPECT_NEAR(g, expected_y, 1);
  2788. EXPECT_NEAR(r, expected_y, 1);
  2789. EXPECT_EQ(a, 255);
  2790. }
  2791. int count_b = 0;
  2792. int count_g = 0;
  2793. int count_r = 0;
  2794. for (int i = 0; i < kSize; ++i) {
  2795. if (histogram_b[i]) {
  2796. ++count_b;
  2797. }
  2798. if (histogram_g[i]) {
  2799. ++count_g;
  2800. }
  2801. if (histogram_r[i]) {
  2802. ++count_r;
  2803. }
  2804. }
  2805. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2806. free_aligned_buffer_page_end(orig_yuv);
  2807. free_aligned_buffer_page_end(argb_pixels);
  2808. }
  2809. // Test 10 bit YUV to 8 bit RGB
  2810. TEST_F(LibYUVConvertTest, TestH010ToARGB) {
  2811. const int kSize = 1024;
  2812. int histogram_b[1024];
  2813. int histogram_g[1024];
  2814. int histogram_r[1024];
  2815. memset(histogram_b, 0, sizeof(histogram_b));
  2816. memset(histogram_g, 0, sizeof(histogram_g));
  2817. memset(histogram_r, 0, sizeof(histogram_r));
  2818. align_buffer_page_end(orig_yuv, kSize * 2 + kSize / 2 * 2 * 2);
  2819. align_buffer_page_end(argb_pixels, kSize * 4);
  2820. uint16_t* orig_y = reinterpret_cast<uint16_t*>(orig_yuv);
  2821. uint16_t* orig_u = orig_y + kSize;
  2822. uint16_t* orig_v = orig_u + kSize / 2;
  2823. // Test grey scale
  2824. for (int i = 0; i < kSize; ++i) {
  2825. orig_y[i] = i;
  2826. }
  2827. for (int i = 0; i < kSize / 2; ++i) {
  2828. orig_u[i] = 512; // 512 is 0.
  2829. orig_v[i] = 512;
  2830. }
  2831. H010ToARGB(orig_y, 0, orig_u, 0, orig_v, 0, argb_pixels, 0, kSize, 1);
  2832. for (int i = 0; i < kSize; ++i) {
  2833. int b = argb_pixels[i * 4 + 0];
  2834. int g = argb_pixels[i * 4 + 1];
  2835. int r = argb_pixels[i * 4 + 2];
  2836. int a = argb_pixels[i * 4 + 3];
  2837. ++histogram_b[b];
  2838. ++histogram_g[g];
  2839. ++histogram_r[r];
  2840. int expected_y = Clamp(static_cast<int>((i - 64) * 1.164f / 4));
  2841. EXPECT_NEAR(b, expected_y, 1);
  2842. EXPECT_NEAR(g, expected_y, 1);
  2843. EXPECT_NEAR(r, expected_y, 1);
  2844. EXPECT_EQ(a, 255);
  2845. }
  2846. int count_b = 0;
  2847. int count_g = 0;
  2848. int count_r = 0;
  2849. for (int i = 0; i < kSize; ++i) {
  2850. if (histogram_b[i]) {
  2851. ++count_b;
  2852. }
  2853. if (histogram_g[i]) {
  2854. ++count_g;
  2855. }
  2856. if (histogram_r[i]) {
  2857. ++count_r;
  2858. }
  2859. }
  2860. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2861. free_aligned_buffer_page_end(orig_yuv);
  2862. free_aligned_buffer_page_end(argb_pixels);
  2863. }
  2864. // Test 10 bit YUV to 10 bit RGB
  2865. // Caveat: Result is near due to float rounding in expected
  2866. // result.
  2867. TEST_F(LibYUVConvertTest, TestH010ToAR30) {
  2868. const int kSize = 1024;
  2869. int histogram_b[1024];
  2870. int histogram_g[1024];
  2871. int histogram_r[1024];
  2872. memset(histogram_b, 0, sizeof(histogram_b));
  2873. memset(histogram_g, 0, sizeof(histogram_g));
  2874. memset(histogram_r, 0, sizeof(histogram_r));
  2875. align_buffer_page_end(orig_yuv, kSize * 2 + kSize / 2 * 2 * 2);
  2876. align_buffer_page_end(ar30_pixels, kSize * 4);
  2877. uint16_t* orig_y = reinterpret_cast<uint16_t*>(orig_yuv);
  2878. uint16_t* orig_u = orig_y + kSize;
  2879. uint16_t* orig_v = orig_u + kSize / 2;
  2880. // Test grey scale
  2881. for (int i = 0; i < kSize; ++i) {
  2882. orig_y[i] = i;
  2883. }
  2884. for (int i = 0; i < kSize / 2; ++i) {
  2885. orig_u[i] = 512; // 512 is 0.
  2886. orig_v[i] = 512;
  2887. }
  2888. H010ToAR30(orig_y, 0, orig_u, 0, orig_v, 0, ar30_pixels, 0, kSize, 1);
  2889. for (int i = 0; i < kSize; ++i) {
  2890. int b10 = reinterpret_cast<uint32_t*>(ar30_pixels)[i] & 1023;
  2891. int g10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 10) & 1023;
  2892. int r10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 20) & 1023;
  2893. int a2 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 30) & 3;
  2894. ++histogram_b[b10];
  2895. ++histogram_g[g10];
  2896. ++histogram_r[r10];
  2897. int expected_y = Clamp10(static_cast<int>((i - 64) * 1.164f));
  2898. EXPECT_NEAR(b10, expected_y, 4);
  2899. EXPECT_NEAR(g10, expected_y, 4);
  2900. EXPECT_NEAR(r10, expected_y, 4);
  2901. EXPECT_EQ(a2, 3);
  2902. }
  2903. int count_b = 0;
  2904. int count_g = 0;
  2905. int count_r = 0;
  2906. for (int i = 0; i < kSize; ++i) {
  2907. if (histogram_b[i]) {
  2908. ++count_b;
  2909. }
  2910. if (histogram_g[i]) {
  2911. ++count_g;
  2912. }
  2913. if (histogram_r[i]) {
  2914. ++count_r;
  2915. }
  2916. }
  2917. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2918. free_aligned_buffer_page_end(orig_yuv);
  2919. free_aligned_buffer_page_end(ar30_pixels);
  2920. }
  2921. // Test 10 bit YUV to 10 bit RGB
  2922. // Caveat: Result is near due to float rounding in expected
  2923. // result.
  2924. TEST_F(LibYUVConvertTest, TestH010ToAB30) {
  2925. const int kSize = 1024;
  2926. int histogram_b[1024];
  2927. int histogram_g[1024];
  2928. int histogram_r[1024];
  2929. memset(histogram_b, 0, sizeof(histogram_b));
  2930. memset(histogram_g, 0, sizeof(histogram_g));
  2931. memset(histogram_r, 0, sizeof(histogram_r));
  2932. align_buffer_page_end(orig_yuv, kSize * 2 + kSize / 2 * 2 * 2);
  2933. align_buffer_page_end(ab30_pixels, kSize * 4);
  2934. uint16_t* orig_y = reinterpret_cast<uint16_t*>(orig_yuv);
  2935. uint16_t* orig_u = orig_y + kSize;
  2936. uint16_t* orig_v = orig_u + kSize / 2;
  2937. // Test grey scale
  2938. for (int i = 0; i < kSize; ++i) {
  2939. orig_y[i] = i;
  2940. }
  2941. for (int i = 0; i < kSize / 2; ++i) {
  2942. orig_u[i] = 512; // 512 is 0.
  2943. orig_v[i] = 512;
  2944. }
  2945. H010ToAB30(orig_y, 0, orig_u, 0, orig_v, 0, ab30_pixels, 0, kSize, 1);
  2946. for (int i = 0; i < kSize; ++i) {
  2947. int r10 = reinterpret_cast<uint32_t*>(ab30_pixels)[i] & 1023;
  2948. int g10 = (reinterpret_cast<uint32_t*>(ab30_pixels)[i] >> 10) & 1023;
  2949. int b10 = (reinterpret_cast<uint32_t*>(ab30_pixels)[i] >> 20) & 1023;
  2950. int a2 = (reinterpret_cast<uint32_t*>(ab30_pixels)[i] >> 30) & 3;
  2951. ++histogram_b[b10];
  2952. ++histogram_g[g10];
  2953. ++histogram_r[r10];
  2954. int expected_y = Clamp10(static_cast<int>((i - 64) * 1.164f));
  2955. EXPECT_NEAR(b10, expected_y, 4);
  2956. EXPECT_NEAR(g10, expected_y, 4);
  2957. EXPECT_NEAR(r10, expected_y, 4);
  2958. EXPECT_EQ(a2, 3);
  2959. }
  2960. int count_b = 0;
  2961. int count_g = 0;
  2962. int count_r = 0;
  2963. for (int i = 0; i < kSize; ++i) {
  2964. if (histogram_b[i]) {
  2965. ++count_b;
  2966. }
  2967. if (histogram_g[i]) {
  2968. ++count_g;
  2969. }
  2970. if (histogram_r[i]) {
  2971. ++count_r;
  2972. }
  2973. }
  2974. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  2975. free_aligned_buffer_page_end(orig_yuv);
  2976. free_aligned_buffer_page_end(ab30_pixels);
  2977. }
  2978. // Test 8 bit YUV to 10 bit RGB
  2979. TEST_F(LibYUVConvertTest, TestH420ToAR30) {
  2980. const int kSize = 256;
  2981. const int kHistSize = 1024;
  2982. int histogram_b[kHistSize];
  2983. int histogram_g[kHistSize];
  2984. int histogram_r[kHistSize];
  2985. memset(histogram_b, 0, sizeof(histogram_b));
  2986. memset(histogram_g, 0, sizeof(histogram_g));
  2987. memset(histogram_r, 0, sizeof(histogram_r));
  2988. align_buffer_page_end(orig_yuv, kSize + kSize / 2 * 2);
  2989. align_buffer_page_end(ar30_pixels, kSize * 4);
  2990. uint8_t* orig_y = orig_yuv;
  2991. uint8_t* orig_u = orig_y + kSize;
  2992. uint8_t* orig_v = orig_u + kSize / 2;
  2993. // Test grey scale
  2994. for (int i = 0; i < kSize; ++i) {
  2995. orig_y[i] = i;
  2996. }
  2997. for (int i = 0; i < kSize / 2; ++i) {
  2998. orig_u[i] = 128; // 128 is 0.
  2999. orig_v[i] = 128;
  3000. }
  3001. H420ToAR30(orig_y, 0, orig_u, 0, orig_v, 0, ar30_pixels, 0, kSize, 1);
  3002. for (int i = 0; i < kSize; ++i) {
  3003. int b10 = reinterpret_cast<uint32_t*>(ar30_pixels)[i] & 1023;
  3004. int g10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 10) & 1023;
  3005. int r10 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 20) & 1023;
  3006. int a2 = (reinterpret_cast<uint32_t*>(ar30_pixels)[i] >> 30) & 3;
  3007. ++histogram_b[b10];
  3008. ++histogram_g[g10];
  3009. ++histogram_r[r10];
  3010. int expected_y = Clamp10(static_cast<int>((i - 16) * 1.164f * 4.f));
  3011. EXPECT_NEAR(b10, expected_y, 4);
  3012. EXPECT_NEAR(g10, expected_y, 4);
  3013. EXPECT_NEAR(r10, expected_y, 4);
  3014. EXPECT_EQ(a2, 3);
  3015. }
  3016. int count_b = 0;
  3017. int count_g = 0;
  3018. int count_r = 0;
  3019. for (int i = 0; i < kHistSize; ++i) {
  3020. if (histogram_b[i]) {
  3021. ++count_b;
  3022. }
  3023. if (histogram_g[i]) {
  3024. ++count_g;
  3025. }
  3026. if (histogram_r[i]) {
  3027. ++count_r;
  3028. }
  3029. }
  3030. printf("uniques: B %d, G, %d, R %d\n", count_b, count_g, count_r);
  3031. free_aligned_buffer_page_end(orig_yuv);
  3032. free_aligned_buffer_page_end(ar30_pixels);
  3033. }
  3034. // Test RGB24 to ARGB and back to RGB24
  3035. TEST_F(LibYUVConvertTest, TestARGBToRGB24) {
  3036. const int kSize = 256;
  3037. align_buffer_page_end(orig_rgb24, kSize * 3);
  3038. align_buffer_page_end(argb_pixels, kSize * 4);
  3039. align_buffer_page_end(dest_rgb24, kSize * 3);
  3040. // Test grey scale
  3041. for (int i = 0; i < kSize * 3; ++i) {
  3042. orig_rgb24[i] = i;
  3043. }
  3044. RGB24ToARGB(orig_rgb24, 0, argb_pixels, 0, kSize, 1);
  3045. ARGBToRGB24(argb_pixels, 0, dest_rgb24, 0, kSize, 1);
  3046. for (int i = 0; i < kSize * 3; ++i) {
  3047. EXPECT_EQ(orig_rgb24[i], dest_rgb24[i]);
  3048. }
  3049. free_aligned_buffer_page_end(orig_rgb24);
  3050. free_aligned_buffer_page_end(argb_pixels);
  3051. free_aligned_buffer_page_end(dest_rgb24);
  3052. }
  3053. } // namespace libyuv