2
0

onyx_if.c 180 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437
  1. /*
  2. * Copyright (c) 2010 The WebM project authors. All Rights Reserved.
  3. *
  4. * Use of this source code is governed by a BSD-style license
  5. * that can be found in the LICENSE file in the root of the source
  6. * tree. An additional intellectual property rights grant can be found
  7. * in the file PATENTS. All contributing project authors may
  8. * be found in the AUTHORS file in the root of the source tree.
  9. */
  10. #include "vpx_config.h"
  11. #include "./vpx_scale_rtcd.h"
  12. #include "./vpx_dsp_rtcd.h"
  13. #include "./vp8_rtcd.h"
  14. #include "bitstream.h"
  15. #include "vp8/common/onyxc_int.h"
  16. #include "vp8/common/blockd.h"
  17. #include "onyx_int.h"
  18. #include "vp8/common/systemdependent.h"
  19. #include "vp8/common/vp8_skin_detection.h"
  20. #include "vp8/encoder/quantize.h"
  21. #include "vp8/common/alloccommon.h"
  22. #include "mcomp.h"
  23. #include "firstpass.h"
  24. #include "vpx_dsp/psnr.h"
  25. #include "vpx_scale/vpx_scale.h"
  26. #include "vp8/common/extend.h"
  27. #include "ratectrl.h"
  28. #include "vp8/common/quant_common.h"
  29. #include "segmentation.h"
  30. #if CONFIG_POSTPROC
  31. #include "vp8/common/postproc.h"
  32. #endif
  33. #include "vpx_mem/vpx_mem.h"
  34. #include "vp8/common/reconintra.h"
  35. #include "vp8/common/swapyv12buffer.h"
  36. #include "vp8/common/threading.h"
  37. #include "vpx_ports/system_state.h"
  38. #include "vpx_ports/vpx_timer.h"
  39. #include "vpx_util/vpx_write_yuv_frame.h"
  40. #if ARCH_ARM
  41. #include "vpx_ports/arm.h"
  42. #endif
  43. #if CONFIG_MULTI_RES_ENCODING
  44. #include "mr_dissim.h"
  45. #endif
  46. #include "encodeframe.h"
  47. #if CONFIG_MULTITHREAD
  48. #include "ethreading.h"
  49. #endif
  50. #include "picklpf.h"
  51. #if !CONFIG_REALTIME_ONLY
  52. #include "temporal_filter.h"
  53. #endif
  54. #include <assert.h>
  55. #include <math.h>
  56. #include <stdio.h>
  57. #include <limits.h>
  58. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  59. extern int vp8_update_coef_context(VP8_COMP *cpi);
  60. #endif
  61. extern void vp8_deblock_frame(YV12_BUFFER_CONFIG *source,
  62. YV12_BUFFER_CONFIG *post, int filt_lvl,
  63. int low_var_thresh, int flag);
  64. extern unsigned int vp8_get_processor_freq();
  65. int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest);
  66. static void set_default_lf_deltas(VP8_COMP *cpi);
  67. extern const int vp8_gf_interval_table[101];
  68. #if CONFIG_INTERNAL_STATS
  69. #include "math.h"
  70. #include "vpx_dsp/ssim.h"
  71. #endif
  72. #ifdef OUTPUT_YUV_SRC
  73. FILE *yuv_file;
  74. #endif
  75. #ifdef OUTPUT_YUV_DENOISED
  76. FILE *yuv_denoised_file;
  77. #endif
  78. #ifdef OUTPUT_YUV_SKINMAP
  79. static FILE *yuv_skinmap_file = NULL;
  80. #endif
  81. #if 0
  82. FILE *framepsnr;
  83. FILE *kf_list;
  84. FILE *keyfile;
  85. #endif
  86. #if 0
  87. extern int skip_true_count;
  88. extern int skip_false_count;
  89. #endif
  90. #ifdef SPEEDSTATS
  91. unsigned int frames_at_speed[16] = { 0, 0, 0, 0, 0, 0, 0, 0,
  92. 0, 0, 0, 0, 0, 0, 0, 0 };
  93. unsigned int tot_pm = 0;
  94. unsigned int cnt_pm = 0;
  95. unsigned int tot_ef = 0;
  96. unsigned int cnt_ef = 0;
  97. #endif
  98. #ifdef MODE_STATS
  99. extern unsigned __int64 Sectionbits[50];
  100. extern int y_modes[5];
  101. extern int uv_modes[4];
  102. extern int b_modes[10];
  103. extern int inter_y_modes[10];
  104. extern int inter_uv_modes[4];
  105. extern unsigned int inter_b_modes[15];
  106. #endif
  107. extern const int vp8_bits_per_mb[2][QINDEX_RANGE];
  108. extern const int qrounding_factors[129];
  109. extern const int qzbin_factors[129];
  110. extern void vp8cx_init_quantizer(VP8_COMP *cpi);
  111. extern const int vp8cx_base_skip_false_prob[128];
  112. /* Tables relating active max Q to active min Q */
  113. static const unsigned char kf_low_motion_minq[QINDEX_RANGE] = {
  114. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  115. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  116. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
  117. 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 4, 4, 4, 5, 5, 5,
  118. 5, 5, 6, 6, 6, 6, 7, 7, 8, 8, 8, 8, 9, 9, 10, 10, 10, 10, 11,
  119. 11, 11, 11, 12, 12, 13, 13, 13, 13, 14, 14, 15, 15, 15, 15, 16, 16, 16, 16,
  120. 17, 17, 18, 18, 18, 18, 19, 20, 20, 21, 21, 22, 23, 23
  121. };
  122. static const unsigned char kf_high_motion_minq[QINDEX_RANGE] = {
  123. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
  124. 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1,
  125. 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5,
  126. 5, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 8, 8, 8, 8, 9, 9, 10, 10,
  127. 10, 10, 11, 11, 11, 11, 12, 12, 13, 13, 13, 13, 14, 14, 15, 15, 15, 15, 16,
  128. 16, 16, 16, 17, 17, 18, 18, 18, 18, 19, 19, 20, 20, 20, 20, 21, 21, 21, 21,
  129. 22, 22, 23, 23, 24, 25, 25, 26, 26, 27, 28, 28, 29, 30
  130. };
  131. static const unsigned char gf_low_motion_minq[QINDEX_RANGE] = {
  132. 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3,
  133. 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8,
  134. 8, 8, 9, 9, 9, 9, 10, 10, 10, 10, 11, 11, 12, 12, 13, 13, 14, 14, 15,
  135. 15, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24,
  136. 25, 25, 26, 26, 27, 27, 28, 28, 29, 29, 30, 30, 31, 31, 32, 32, 33, 33, 34,
  137. 34, 35, 35, 36, 36, 37, 37, 38, 38, 39, 39, 40, 40, 41, 41, 42, 42, 43, 44,
  138. 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58
  139. };
  140. static const unsigned char gf_mid_motion_minq[QINDEX_RANGE] = {
  141. 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4, 5,
  142. 5, 5, 6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 10, 11,
  143. 11, 11, 12, 12, 12, 12, 13, 13, 13, 14, 14, 14, 15, 15, 16, 16, 17, 17, 18,
  144. 18, 19, 19, 20, 20, 21, 21, 22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27, 27,
  145. 28, 28, 29, 29, 30, 30, 31, 31, 32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 37,
  146. 37, 38, 39, 39, 40, 40, 41, 41, 42, 42, 43, 43, 44, 45, 46, 47, 48, 49, 50,
  147. 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64
  148. };
  149. static const unsigned char gf_high_motion_minq[QINDEX_RANGE] = {
  150. 0, 0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 5,
  151. 5, 5, 6, 6, 6, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10, 11, 11,
  152. 12, 12, 13, 13, 14, 14, 15, 15, 16, 16, 17, 17, 18, 18, 19, 19, 20, 20, 21,
  153. 21, 22, 22, 23, 23, 24, 24, 25, 25, 26, 26, 27, 27, 28, 28, 29, 29, 30, 30,
  154. 31, 31, 32, 32, 33, 33, 34, 34, 35, 35, 36, 36, 37, 37, 38, 38, 39, 39, 40,
  155. 40, 41, 41, 42, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56,
  156. 57, 58, 59, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80
  157. };
  158. static const unsigned char inter_minq[QINDEX_RANGE] = {
  159. 0, 0, 1, 1, 2, 3, 3, 4, 4, 5, 6, 6, 7, 8, 8, 9, 9, 10, 11,
  160. 11, 12, 13, 13, 14, 15, 15, 16, 17, 17, 18, 19, 20, 20, 21, 22, 22, 23, 24,
  161. 24, 25, 26, 27, 27, 28, 29, 30, 30, 31, 32, 33, 33, 34, 35, 36, 36, 37, 38,
  162. 39, 39, 40, 41, 42, 42, 43, 44, 45, 46, 46, 47, 48, 49, 50, 50, 51, 52, 53,
  163. 54, 55, 55, 56, 57, 58, 59, 60, 60, 61, 62, 63, 64, 65, 66, 67, 67, 68, 69,
  164. 70, 71, 72, 73, 74, 75, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 86,
  165. 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100
  166. };
  167. #ifdef PACKET_TESTING
  168. extern FILE *vpxlogc;
  169. #endif
  170. static void save_layer_context(VP8_COMP *cpi) {
  171. LAYER_CONTEXT *lc = &cpi->layer_context[cpi->current_layer];
  172. /* Save layer dependent coding state */
  173. lc->target_bandwidth = cpi->target_bandwidth;
  174. lc->starting_buffer_level = cpi->oxcf.starting_buffer_level;
  175. lc->optimal_buffer_level = cpi->oxcf.optimal_buffer_level;
  176. lc->maximum_buffer_size = cpi->oxcf.maximum_buffer_size;
  177. lc->starting_buffer_level_in_ms = cpi->oxcf.starting_buffer_level_in_ms;
  178. lc->optimal_buffer_level_in_ms = cpi->oxcf.optimal_buffer_level_in_ms;
  179. lc->maximum_buffer_size_in_ms = cpi->oxcf.maximum_buffer_size_in_ms;
  180. lc->buffer_level = cpi->buffer_level;
  181. lc->bits_off_target = cpi->bits_off_target;
  182. lc->total_actual_bits = cpi->total_actual_bits;
  183. lc->worst_quality = cpi->worst_quality;
  184. lc->active_worst_quality = cpi->active_worst_quality;
  185. lc->best_quality = cpi->best_quality;
  186. lc->active_best_quality = cpi->active_best_quality;
  187. lc->ni_av_qi = cpi->ni_av_qi;
  188. lc->ni_tot_qi = cpi->ni_tot_qi;
  189. lc->ni_frames = cpi->ni_frames;
  190. lc->avg_frame_qindex = cpi->avg_frame_qindex;
  191. lc->rate_correction_factor = cpi->rate_correction_factor;
  192. lc->key_frame_rate_correction_factor = cpi->key_frame_rate_correction_factor;
  193. lc->gf_rate_correction_factor = cpi->gf_rate_correction_factor;
  194. lc->zbin_over_quant = cpi->mb.zbin_over_quant;
  195. lc->inter_frame_target = cpi->inter_frame_target;
  196. lc->total_byte_count = cpi->total_byte_count;
  197. lc->filter_level = cpi->common.filter_level;
  198. lc->frames_since_last_drop_overshoot = cpi->frames_since_last_drop_overshoot;
  199. lc->force_maxqp = cpi->force_maxqp;
  200. lc->last_frame_percent_intra = cpi->last_frame_percent_intra;
  201. lc->last_q[0] = cpi->last_q[0];
  202. lc->last_q[1] = cpi->last_q[1];
  203. memcpy(lc->count_mb_ref_frame_usage, cpi->mb.count_mb_ref_frame_usage,
  204. sizeof(cpi->mb.count_mb_ref_frame_usage));
  205. }
  206. static void restore_layer_context(VP8_COMP *cpi, const int layer) {
  207. LAYER_CONTEXT *lc = &cpi->layer_context[layer];
  208. /* Restore layer dependent coding state */
  209. cpi->current_layer = layer;
  210. cpi->target_bandwidth = lc->target_bandwidth;
  211. cpi->oxcf.target_bandwidth = lc->target_bandwidth;
  212. cpi->oxcf.starting_buffer_level = lc->starting_buffer_level;
  213. cpi->oxcf.optimal_buffer_level = lc->optimal_buffer_level;
  214. cpi->oxcf.maximum_buffer_size = lc->maximum_buffer_size;
  215. cpi->oxcf.starting_buffer_level_in_ms = lc->starting_buffer_level_in_ms;
  216. cpi->oxcf.optimal_buffer_level_in_ms = lc->optimal_buffer_level_in_ms;
  217. cpi->oxcf.maximum_buffer_size_in_ms = lc->maximum_buffer_size_in_ms;
  218. cpi->buffer_level = lc->buffer_level;
  219. cpi->bits_off_target = lc->bits_off_target;
  220. cpi->total_actual_bits = lc->total_actual_bits;
  221. cpi->active_worst_quality = lc->active_worst_quality;
  222. cpi->active_best_quality = lc->active_best_quality;
  223. cpi->ni_av_qi = lc->ni_av_qi;
  224. cpi->ni_tot_qi = lc->ni_tot_qi;
  225. cpi->ni_frames = lc->ni_frames;
  226. cpi->avg_frame_qindex = lc->avg_frame_qindex;
  227. cpi->rate_correction_factor = lc->rate_correction_factor;
  228. cpi->key_frame_rate_correction_factor = lc->key_frame_rate_correction_factor;
  229. cpi->gf_rate_correction_factor = lc->gf_rate_correction_factor;
  230. cpi->mb.zbin_over_quant = lc->zbin_over_quant;
  231. cpi->inter_frame_target = lc->inter_frame_target;
  232. cpi->total_byte_count = lc->total_byte_count;
  233. cpi->common.filter_level = lc->filter_level;
  234. cpi->frames_since_last_drop_overshoot = lc->frames_since_last_drop_overshoot;
  235. cpi->force_maxqp = lc->force_maxqp;
  236. cpi->last_frame_percent_intra = lc->last_frame_percent_intra;
  237. cpi->last_q[0] = lc->last_q[0];
  238. cpi->last_q[1] = lc->last_q[1];
  239. memcpy(cpi->mb.count_mb_ref_frame_usage, lc->count_mb_ref_frame_usage,
  240. sizeof(cpi->mb.count_mb_ref_frame_usage));
  241. }
  242. static int rescale(int val, int num, int denom) {
  243. int64_t llnum = num;
  244. int64_t llden = denom;
  245. int64_t llval = val;
  246. return (int)(llval * llnum / llden);
  247. }
  248. static void init_temporal_layer_context(VP8_COMP *cpi, VP8_CONFIG *oxcf,
  249. const int layer,
  250. double prev_layer_framerate) {
  251. LAYER_CONTEXT *lc = &cpi->layer_context[layer];
  252. lc->framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[layer];
  253. lc->target_bandwidth = cpi->oxcf.target_bitrate[layer] * 1000;
  254. lc->starting_buffer_level_in_ms = oxcf->starting_buffer_level;
  255. lc->optimal_buffer_level_in_ms = oxcf->optimal_buffer_level;
  256. lc->maximum_buffer_size_in_ms = oxcf->maximum_buffer_size;
  257. lc->starting_buffer_level =
  258. rescale((int)(oxcf->starting_buffer_level), lc->target_bandwidth, 1000);
  259. if (oxcf->optimal_buffer_level == 0) {
  260. lc->optimal_buffer_level = lc->target_bandwidth / 8;
  261. } else {
  262. lc->optimal_buffer_level =
  263. rescale((int)(oxcf->optimal_buffer_level), lc->target_bandwidth, 1000);
  264. }
  265. if (oxcf->maximum_buffer_size == 0) {
  266. lc->maximum_buffer_size = lc->target_bandwidth / 8;
  267. } else {
  268. lc->maximum_buffer_size =
  269. rescale((int)(oxcf->maximum_buffer_size), lc->target_bandwidth, 1000);
  270. }
  271. /* Work out the average size of a frame within this layer */
  272. if (layer > 0) {
  273. lc->avg_frame_size_for_layer =
  274. (int)((cpi->oxcf.target_bitrate[layer] -
  275. cpi->oxcf.target_bitrate[layer - 1]) *
  276. 1000 / (lc->framerate - prev_layer_framerate));
  277. }
  278. lc->active_worst_quality = cpi->oxcf.worst_allowed_q;
  279. lc->active_best_quality = cpi->oxcf.best_allowed_q;
  280. lc->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
  281. lc->buffer_level = lc->starting_buffer_level;
  282. lc->bits_off_target = lc->starting_buffer_level;
  283. lc->total_actual_bits = 0;
  284. lc->ni_av_qi = 0;
  285. lc->ni_tot_qi = 0;
  286. lc->ni_frames = 0;
  287. lc->rate_correction_factor = 1.0;
  288. lc->key_frame_rate_correction_factor = 1.0;
  289. lc->gf_rate_correction_factor = 1.0;
  290. lc->inter_frame_target = 0;
  291. }
  292. // Upon a run-time change in temporal layers, reset the layer context parameters
  293. // for any "new" layers. For "existing" layers, let them inherit the parameters
  294. // from the previous layer state (at the same layer #). In future we may want
  295. // to better map the previous layer state(s) to the "new" ones.
  296. static void reset_temporal_layer_change(VP8_COMP *cpi, VP8_CONFIG *oxcf,
  297. const int prev_num_layers) {
  298. int i;
  299. double prev_layer_framerate = 0;
  300. const int curr_num_layers = cpi->oxcf.number_of_layers;
  301. // If the previous state was 1 layer, get current layer context from cpi.
  302. // We need this to set the layer context for the new layers below.
  303. if (prev_num_layers == 1) {
  304. cpi->current_layer = 0;
  305. save_layer_context(cpi);
  306. }
  307. for (i = 0; i < curr_num_layers; ++i) {
  308. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  309. if (i >= prev_num_layers) {
  310. init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
  311. }
  312. // The initial buffer levels are set based on their starting levels.
  313. // We could set the buffer levels based on the previous state (normalized
  314. // properly by the layer bandwidths) but we would need to keep track of
  315. // the previous set of layer bandwidths (i.e., target_bitrate[i])
  316. // before the layer change. For now, reset to the starting levels.
  317. lc->buffer_level =
  318. cpi->oxcf.starting_buffer_level_in_ms * cpi->oxcf.target_bitrate[i];
  319. lc->bits_off_target = lc->buffer_level;
  320. // TDOD(marpan): Should we set the rate_correction_factor and
  321. // active_worst/best_quality to values derived from the previous layer
  322. // state (to smooth-out quality dips/rate fluctuation at transition)?
  323. // We need to treat the 1 layer case separately: oxcf.target_bitrate[i]
  324. // is not set for 1 layer, and the restore_layer_context/save_context()
  325. // are not called in the encoding loop, so we need to call it here to
  326. // pass the layer context state to |cpi|.
  327. if (curr_num_layers == 1) {
  328. lc->target_bandwidth = cpi->oxcf.target_bandwidth;
  329. lc->buffer_level =
  330. cpi->oxcf.starting_buffer_level_in_ms * lc->target_bandwidth / 1000;
  331. lc->bits_off_target = lc->buffer_level;
  332. restore_layer_context(cpi, 0);
  333. }
  334. prev_layer_framerate = cpi->output_framerate / cpi->oxcf.rate_decimator[i];
  335. }
  336. }
  337. static void setup_features(VP8_COMP *cpi) {
  338. // If segmentation enabled set the update flags
  339. if (cpi->mb.e_mbd.segmentation_enabled) {
  340. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  341. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  342. } else {
  343. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  344. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  345. }
  346. cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 0;
  347. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  348. memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  349. memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  350. memset(cpi->mb.e_mbd.last_ref_lf_deltas, 0,
  351. sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  352. memset(cpi->mb.e_mbd.last_mode_lf_deltas, 0,
  353. sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  354. set_default_lf_deltas(cpi);
  355. }
  356. static void dealloc_raw_frame_buffers(VP8_COMP *cpi);
  357. void vp8_initialize_enc(void) {
  358. static volatile int init_done = 0;
  359. if (!init_done) {
  360. vpx_dsp_rtcd();
  361. vp8_init_intra_predictors();
  362. init_done = 1;
  363. }
  364. }
  365. static void dealloc_compressor_data(VP8_COMP *cpi) {
  366. vpx_free(cpi->tplist);
  367. cpi->tplist = NULL;
  368. /* Delete last frame MV storage buffers */
  369. vpx_free(cpi->lfmv);
  370. cpi->lfmv = 0;
  371. vpx_free(cpi->lf_ref_frame_sign_bias);
  372. cpi->lf_ref_frame_sign_bias = 0;
  373. vpx_free(cpi->lf_ref_frame);
  374. cpi->lf_ref_frame = 0;
  375. /* Delete sementation map */
  376. vpx_free(cpi->segmentation_map);
  377. cpi->segmentation_map = 0;
  378. vpx_free(cpi->active_map);
  379. cpi->active_map = 0;
  380. vp8_de_alloc_frame_buffers(&cpi->common);
  381. vp8_yv12_de_alloc_frame_buffer(&cpi->pick_lf_lvl_frame);
  382. vp8_yv12_de_alloc_frame_buffer(&cpi->scaled_source);
  383. dealloc_raw_frame_buffers(cpi);
  384. vpx_free(cpi->tok);
  385. cpi->tok = 0;
  386. /* Structure used to monitor GF usage */
  387. vpx_free(cpi->gf_active_flags);
  388. cpi->gf_active_flags = 0;
  389. /* Activity mask based per mb zbin adjustments */
  390. vpx_free(cpi->mb_activity_map);
  391. cpi->mb_activity_map = 0;
  392. vpx_free(cpi->mb.pip);
  393. cpi->mb.pip = 0;
  394. #if CONFIG_MULTITHREAD
  395. vpx_free(cpi->mt_current_mb_col);
  396. cpi->mt_current_mb_col = NULL;
  397. #endif
  398. }
  399. static void enable_segmentation(VP8_COMP *cpi) {
  400. /* Set the appropriate feature bit */
  401. cpi->mb.e_mbd.segmentation_enabled = 1;
  402. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  403. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  404. }
  405. static void disable_segmentation(VP8_COMP *cpi) {
  406. /* Clear the appropriate feature bit */
  407. cpi->mb.e_mbd.segmentation_enabled = 0;
  408. }
  409. /* Valid values for a segment are 0 to 3
  410. * Segmentation map is arrange as [Rows][Columns]
  411. */
  412. static void set_segmentation_map(VP8_COMP *cpi,
  413. unsigned char *segmentation_map) {
  414. /* Copy in the new segmentation map */
  415. memcpy(cpi->segmentation_map, segmentation_map,
  416. (cpi->common.mb_rows * cpi->common.mb_cols));
  417. /* Signal that the map should be updated. */
  418. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  419. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  420. }
  421. /* The values given for each segment can be either deltas (from the default
  422. * value chosen for the frame) or absolute values.
  423. *
  424. * Valid range for abs values is:
  425. * (0-127 for MB_LVL_ALT_Q), (0-63 for SEGMENT_ALT_LF)
  426. * Valid range for delta values are:
  427. * (+/-127 for MB_LVL_ALT_Q), (+/-63 for SEGMENT_ALT_LF)
  428. *
  429. * abs_delta = SEGMENT_DELTADATA (deltas)
  430. * abs_delta = SEGMENT_ABSDATA (use the absolute values given).
  431. *
  432. */
  433. static void set_segment_data(VP8_COMP *cpi, signed char *feature_data,
  434. unsigned char abs_delta) {
  435. cpi->mb.e_mbd.mb_segement_abs_delta = abs_delta;
  436. memcpy(cpi->segment_feature_data, feature_data,
  437. sizeof(cpi->segment_feature_data));
  438. }
  439. /* A simple function to cyclically refresh the background at a lower Q */
  440. static void cyclic_background_refresh(VP8_COMP *cpi, int Q, int lf_adjustment) {
  441. unsigned char *seg_map = cpi->segmentation_map;
  442. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  443. int i;
  444. int block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
  445. int mbs_in_frame = cpi->common.mb_rows * cpi->common.mb_cols;
  446. cpi->cyclic_refresh_q = Q / 2;
  447. if (cpi->oxcf.screen_content_mode) {
  448. // Modify quality ramp-up based on Q. Above some Q level, increase the
  449. // number of blocks to be refreshed, and reduce it below the thredhold.
  450. // Turn-off under certain conditions (i.e., away from key frame, and if
  451. // we are at good quality (low Q) and most of the blocks were
  452. // skipped-encoded
  453. // in previous frame.
  454. int qp_thresh = (cpi->oxcf.screen_content_mode == 2) ? 80 : 100;
  455. if (Q >= qp_thresh) {
  456. cpi->cyclic_refresh_mode_max_mbs_perframe =
  457. (cpi->common.mb_rows * cpi->common.mb_cols) / 10;
  458. } else if (cpi->frames_since_key > 250 && Q < 20 &&
  459. cpi->mb.skip_true_count > (int)(0.95 * mbs_in_frame)) {
  460. cpi->cyclic_refresh_mode_max_mbs_perframe = 0;
  461. } else {
  462. cpi->cyclic_refresh_mode_max_mbs_perframe =
  463. (cpi->common.mb_rows * cpi->common.mb_cols) / 20;
  464. }
  465. block_count = cpi->cyclic_refresh_mode_max_mbs_perframe;
  466. }
  467. // Set every macroblock to be eligible for update.
  468. // For key frame this will reset seg map to 0.
  469. memset(cpi->segmentation_map, 0, mbs_in_frame);
  470. if (cpi->common.frame_type != KEY_FRAME && block_count > 0) {
  471. /* Cycle through the macro_block rows */
  472. /* MB loop to set local segmentation map */
  473. i = cpi->cyclic_refresh_mode_index;
  474. assert(i < mbs_in_frame);
  475. do {
  476. /* If the MB is as a candidate for clean up then mark it for
  477. * possible boost/refresh (segment 1) The segment id may get
  478. * reset to 0 later if the MB gets coded anything other than
  479. * last frame 0,0 as only (last frame 0,0) MBs are eligable for
  480. * refresh : that is to say Mbs likely to be background blocks.
  481. */
  482. if (cpi->cyclic_refresh_map[i] == 0) {
  483. seg_map[i] = 1;
  484. block_count--;
  485. } else if (cpi->cyclic_refresh_map[i] < 0) {
  486. cpi->cyclic_refresh_map[i]++;
  487. }
  488. i++;
  489. if (i == mbs_in_frame) i = 0;
  490. } while (block_count && i != cpi->cyclic_refresh_mode_index);
  491. cpi->cyclic_refresh_mode_index = i;
  492. #if CONFIG_TEMPORAL_DENOISING
  493. if (cpi->oxcf.noise_sensitivity > 0) {
  494. if (cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive &&
  495. Q < (int)cpi->denoiser.denoise_pars.qp_thresh &&
  496. (cpi->frames_since_key >
  497. 2 * cpi->denoiser.denoise_pars.consec_zerolast)) {
  498. // Under aggressive denoising, use segmentation to turn off loop
  499. // filter below some qp thresh. The filter is reduced for all
  500. // blocks that have been encoded as ZEROMV LAST x frames in a row,
  501. // where x is set by cpi->denoiser.denoise_pars.consec_zerolast.
  502. // This is to avoid "dot" artifacts that can occur from repeated
  503. // loop filtering on noisy input source.
  504. cpi->cyclic_refresh_q = Q;
  505. // lf_adjustment = -MAX_LOOP_FILTER;
  506. lf_adjustment = -40;
  507. for (i = 0; i < mbs_in_frame; ++i) {
  508. seg_map[i] = (cpi->consec_zero_last[i] >
  509. cpi->denoiser.denoise_pars.consec_zerolast)
  510. ? 1
  511. : 0;
  512. }
  513. }
  514. }
  515. #endif
  516. }
  517. /* Activate segmentation. */
  518. cpi->mb.e_mbd.update_mb_segmentation_map = 1;
  519. cpi->mb.e_mbd.update_mb_segmentation_data = 1;
  520. enable_segmentation(cpi);
  521. /* Set up the quant segment data */
  522. feature_data[MB_LVL_ALT_Q][0] = 0;
  523. feature_data[MB_LVL_ALT_Q][1] = (cpi->cyclic_refresh_q - Q);
  524. feature_data[MB_LVL_ALT_Q][2] = 0;
  525. feature_data[MB_LVL_ALT_Q][3] = 0;
  526. /* Set up the loop segment data */
  527. feature_data[MB_LVL_ALT_LF][0] = 0;
  528. feature_data[MB_LVL_ALT_LF][1] = lf_adjustment;
  529. feature_data[MB_LVL_ALT_LF][2] = 0;
  530. feature_data[MB_LVL_ALT_LF][3] = 0;
  531. /* Initialise the feature data structure */
  532. set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  533. }
  534. static void compute_skin_map(VP8_COMP *cpi) {
  535. int mb_row, mb_col, num_bl;
  536. VP8_COMMON *cm = &cpi->common;
  537. const uint8_t *src_y = cpi->Source->y_buffer;
  538. const uint8_t *src_u = cpi->Source->u_buffer;
  539. const uint8_t *src_v = cpi->Source->v_buffer;
  540. const int src_ystride = cpi->Source->y_stride;
  541. const int src_uvstride = cpi->Source->uv_stride;
  542. const SKIN_DETECTION_BLOCK_SIZE bsize =
  543. (cm->Width * cm->Height <= 352 * 288) ? SKIN_8X8 : SKIN_16X16;
  544. for (mb_row = 0; mb_row < cm->mb_rows; mb_row++) {
  545. num_bl = 0;
  546. for (mb_col = 0; mb_col < cm->mb_cols; mb_col++) {
  547. const int bl_index = mb_row * cm->mb_cols + mb_col;
  548. cpi->skin_map[bl_index] =
  549. vp8_compute_skin_block(src_y, src_u, src_v, src_ystride, src_uvstride,
  550. bsize, cpi->consec_zero_last[bl_index], 0);
  551. num_bl++;
  552. src_y += 16;
  553. src_u += 8;
  554. src_v += 8;
  555. }
  556. src_y += (src_ystride << 4) - (num_bl << 4);
  557. src_u += (src_uvstride << 3) - (num_bl << 3);
  558. src_v += (src_uvstride << 3) - (num_bl << 3);
  559. }
  560. // Remove isolated skin blocks (none of its neighbors are skin) and isolated
  561. // non-skin blocks (all of its neighbors are skin). Skip the boundary.
  562. for (mb_row = 1; mb_row < cm->mb_rows - 1; mb_row++) {
  563. for (mb_col = 1; mb_col < cm->mb_cols - 1; mb_col++) {
  564. const int bl_index = mb_row * cm->mb_cols + mb_col;
  565. int num_neighbor = 0;
  566. int mi, mj;
  567. int non_skin_threshold = 8;
  568. for (mi = -1; mi <= 1; mi += 1) {
  569. for (mj = -1; mj <= 1; mj += 1) {
  570. int bl_neighbor_index = (mb_row + mi) * cm->mb_cols + mb_col + mj;
  571. if (cpi->skin_map[bl_neighbor_index]) num_neighbor++;
  572. }
  573. }
  574. if (cpi->skin_map[bl_index] && num_neighbor < 2)
  575. cpi->skin_map[bl_index] = 0;
  576. if (!cpi->skin_map[bl_index] && num_neighbor == non_skin_threshold)
  577. cpi->skin_map[bl_index] = 1;
  578. }
  579. }
  580. }
  581. static void set_default_lf_deltas(VP8_COMP *cpi) {
  582. cpi->mb.e_mbd.mode_ref_lf_delta_enabled = 1;
  583. cpi->mb.e_mbd.mode_ref_lf_delta_update = 1;
  584. memset(cpi->mb.e_mbd.ref_lf_deltas, 0, sizeof(cpi->mb.e_mbd.ref_lf_deltas));
  585. memset(cpi->mb.e_mbd.mode_lf_deltas, 0, sizeof(cpi->mb.e_mbd.mode_lf_deltas));
  586. /* Test of ref frame deltas */
  587. cpi->mb.e_mbd.ref_lf_deltas[INTRA_FRAME] = 2;
  588. cpi->mb.e_mbd.ref_lf_deltas[LAST_FRAME] = 0;
  589. cpi->mb.e_mbd.ref_lf_deltas[GOLDEN_FRAME] = -2;
  590. cpi->mb.e_mbd.ref_lf_deltas[ALTREF_FRAME] = -2;
  591. cpi->mb.e_mbd.mode_lf_deltas[0] = 4; /* BPRED */
  592. if (cpi->oxcf.Mode == MODE_REALTIME) {
  593. cpi->mb.e_mbd.mode_lf_deltas[1] = -12; /* Zero */
  594. } else {
  595. cpi->mb.e_mbd.mode_lf_deltas[1] = -2; /* Zero */
  596. }
  597. cpi->mb.e_mbd.mode_lf_deltas[2] = 2; /* New mv */
  598. cpi->mb.e_mbd.mode_lf_deltas[3] = 4; /* Split mv */
  599. }
  600. /* Convenience macros for mapping speed and mode into a continuous
  601. * range
  602. */
  603. #define GOOD(x) ((x) + 1)
  604. #define RT(x) ((x) + 7)
  605. static int speed_map(int speed, const int *map) {
  606. int res;
  607. do {
  608. res = *map++;
  609. } while (speed >= *map++);
  610. return res;
  611. }
  612. static const int thresh_mult_map_znn[] = {
  613. /* map common to zero, nearest, and near */
  614. 0, GOOD(2), 1500, GOOD(3), 2000, RT(0), 1000, RT(2), 2000, INT_MAX
  615. };
  616. static const int thresh_mult_map_vhpred[] = { 1000, GOOD(2), 1500, GOOD(3),
  617. 2000, RT(0), 1000, RT(1),
  618. 2000, RT(7), INT_MAX, INT_MAX };
  619. static const int thresh_mult_map_bpred[] = { 2000, GOOD(0), 2500, GOOD(2),
  620. 5000, GOOD(3), 7500, RT(0),
  621. 2500, RT(1), 5000, RT(6),
  622. INT_MAX, INT_MAX };
  623. static const int thresh_mult_map_tm[] = { 1000, GOOD(2), 1500, GOOD(3),
  624. 2000, RT(0), 0, RT(1),
  625. 1000, RT(2), 2000, RT(7),
  626. INT_MAX, INT_MAX };
  627. static const int thresh_mult_map_new1[] = { 1000, GOOD(2), 2000,
  628. RT(0), 2000, INT_MAX };
  629. static const int thresh_mult_map_new2[] = { 1000, GOOD(2), 2000, GOOD(3),
  630. 2500, GOOD(5), 4000, RT(0),
  631. 2000, RT(2), 2500, RT(5),
  632. 4000, INT_MAX };
  633. static const int thresh_mult_map_split1[] = {
  634. 2500, GOOD(0), 1700, GOOD(2), 10000, GOOD(3), 25000, GOOD(4), INT_MAX,
  635. RT(0), 5000, RT(1), 10000, RT(2), 25000, RT(3), INT_MAX, INT_MAX
  636. };
  637. static const int thresh_mult_map_split2[] = {
  638. 5000, GOOD(0), 4500, GOOD(2), 20000, GOOD(3), 50000, GOOD(4), INT_MAX,
  639. RT(0), 10000, RT(1), 20000, RT(2), 50000, RT(3), INT_MAX, INT_MAX
  640. };
  641. static const int mode_check_freq_map_zn2[] = {
  642. /* {zero,nearest}{2,3} */
  643. 0, RT(10), 1 << 1, RT(11), 1 << 2, RT(12), 1 << 3, INT_MAX
  644. };
  645. static const int mode_check_freq_map_vhbpred[] = { 0, GOOD(5), 2, RT(0),
  646. 0, RT(3), 2, RT(5),
  647. 4, INT_MAX };
  648. static const int mode_check_freq_map_near2[] = {
  649. 0, GOOD(5), 2, RT(0), 0, RT(3), 2,
  650. RT(10), 1 << 2, RT(11), 1 << 3, RT(12), 1 << 4, INT_MAX
  651. };
  652. static const int mode_check_freq_map_new1[] = {
  653. 0, RT(10), 1 << 1, RT(11), 1 << 2, RT(12), 1 << 3, INT_MAX
  654. };
  655. static const int mode_check_freq_map_new2[] = { 0, GOOD(5), 4, RT(0),
  656. 0, RT(3), 4, RT(10),
  657. 1 << 3, RT(11), 1 << 4, RT(12),
  658. 1 << 5, INT_MAX };
  659. static const int mode_check_freq_map_split1[] = { 0, GOOD(2), 2, GOOD(3),
  660. 7, RT(1), 2, RT(2),
  661. 7, INT_MAX };
  662. static const int mode_check_freq_map_split2[] = { 0, GOOD(1), 2, GOOD(2),
  663. 4, GOOD(3), 15, RT(1),
  664. 4, RT(2), 15, INT_MAX };
  665. void vp8_set_speed_features(VP8_COMP *cpi) {
  666. SPEED_FEATURES *sf = &cpi->sf;
  667. int Mode = cpi->compressor_speed;
  668. int Speed = cpi->Speed;
  669. int Speed2;
  670. int i;
  671. VP8_COMMON *cm = &cpi->common;
  672. int last_improved_quant = sf->improved_quant;
  673. int ref_frames;
  674. /* Initialise default mode frequency sampling variables */
  675. for (i = 0; i < MAX_MODES; ++i) {
  676. cpi->mode_check_freq[i] = 0;
  677. }
  678. cpi->mb.mbs_tested_so_far = 0;
  679. cpi->mb.mbs_zero_last_dot_suppress = 0;
  680. /* best quality defaults */
  681. sf->RD = 1;
  682. sf->search_method = NSTEP;
  683. sf->improved_quant = 1;
  684. sf->improved_dct = 1;
  685. sf->auto_filter = 1;
  686. sf->recode_loop = 1;
  687. sf->quarter_pixel_search = 1;
  688. sf->half_pixel_search = 1;
  689. sf->iterative_sub_pixel = 1;
  690. sf->optimize_coefficients = 1;
  691. sf->use_fastquant_for_pick = 0;
  692. sf->no_skip_block4x4_search = 1;
  693. sf->first_step = 0;
  694. sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
  695. sf->improved_mv_pred = 1;
  696. /* default thresholds to 0 */
  697. for (i = 0; i < MAX_MODES; ++i) sf->thresh_mult[i] = 0;
  698. /* Count enabled references */
  699. ref_frames = 1;
  700. if (cpi->ref_frame_flags & VP8_LAST_FRAME) ref_frames++;
  701. if (cpi->ref_frame_flags & VP8_GOLD_FRAME) ref_frames++;
  702. if (cpi->ref_frame_flags & VP8_ALTR_FRAME) ref_frames++;
  703. /* Convert speed to continuous range, with clamping */
  704. if (Mode == 0) {
  705. Speed = 0;
  706. } else if (Mode == 2) {
  707. Speed = RT(Speed);
  708. } else {
  709. if (Speed > 5) Speed = 5;
  710. Speed = GOOD(Speed);
  711. }
  712. sf->thresh_mult[THR_ZERO1] = sf->thresh_mult[THR_NEAREST1] =
  713. sf->thresh_mult[THR_NEAR1] = sf->thresh_mult[THR_DC] = 0; /* always */
  714. sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO3] =
  715. sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST3] =
  716. sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR3] =
  717. speed_map(Speed, thresh_mult_map_znn);
  718. sf->thresh_mult[THR_V_PRED] = sf->thresh_mult[THR_H_PRED] =
  719. speed_map(Speed, thresh_mult_map_vhpred);
  720. sf->thresh_mult[THR_B_PRED] = speed_map(Speed, thresh_mult_map_bpred);
  721. sf->thresh_mult[THR_TM] = speed_map(Speed, thresh_mult_map_tm);
  722. sf->thresh_mult[THR_NEW1] = speed_map(Speed, thresh_mult_map_new1);
  723. sf->thresh_mult[THR_NEW2] = sf->thresh_mult[THR_NEW3] =
  724. speed_map(Speed, thresh_mult_map_new2);
  725. sf->thresh_mult[THR_SPLIT1] = speed_map(Speed, thresh_mult_map_split1);
  726. sf->thresh_mult[THR_SPLIT2] = sf->thresh_mult[THR_SPLIT3] =
  727. speed_map(Speed, thresh_mult_map_split2);
  728. // Special case for temporal layers.
  729. // Reduce the thresholds for zero/nearest/near for GOLDEN, if GOLDEN is
  730. // used as second reference. We don't modify thresholds for ALTREF case
  731. // since ALTREF is usually used as long-term reference in temporal layers.
  732. if ((cpi->Speed <= 6) && (cpi->oxcf.number_of_layers > 1) &&
  733. (cpi->ref_frame_flags & VP8_LAST_FRAME) &&
  734. (cpi->ref_frame_flags & VP8_GOLD_FRAME)) {
  735. if (cpi->closest_reference_frame == GOLDEN_FRAME) {
  736. sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO2] >> 3;
  737. sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST2] >> 3;
  738. sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR2] >> 3;
  739. } else {
  740. sf->thresh_mult[THR_ZERO2] = sf->thresh_mult[THR_ZERO2] >> 1;
  741. sf->thresh_mult[THR_NEAREST2] = sf->thresh_mult[THR_NEAREST2] >> 1;
  742. sf->thresh_mult[THR_NEAR2] = sf->thresh_mult[THR_NEAR2] >> 1;
  743. }
  744. }
  745. cpi->mode_check_freq[THR_ZERO1] = cpi->mode_check_freq[THR_NEAREST1] =
  746. cpi->mode_check_freq[THR_NEAR1] = cpi->mode_check_freq[THR_TM] =
  747. cpi->mode_check_freq[THR_DC] = 0; /* always */
  748. cpi->mode_check_freq[THR_ZERO2] = cpi->mode_check_freq[THR_ZERO3] =
  749. cpi->mode_check_freq[THR_NEAREST2] = cpi->mode_check_freq[THR_NEAREST3] =
  750. speed_map(Speed, mode_check_freq_map_zn2);
  751. cpi->mode_check_freq[THR_NEAR2] = cpi->mode_check_freq[THR_NEAR3] =
  752. speed_map(Speed, mode_check_freq_map_near2);
  753. cpi->mode_check_freq[THR_V_PRED] = cpi->mode_check_freq[THR_H_PRED] =
  754. cpi->mode_check_freq[THR_B_PRED] =
  755. speed_map(Speed, mode_check_freq_map_vhbpred);
  756. // For real-time mode at speed 10 keep the mode_check_freq threshold
  757. // for NEW1 similar to that of speed 9.
  758. Speed2 = Speed;
  759. if (cpi->Speed == 10 && Mode == 2) Speed2 = RT(9);
  760. cpi->mode_check_freq[THR_NEW1] = speed_map(Speed2, mode_check_freq_map_new1);
  761. cpi->mode_check_freq[THR_NEW2] = cpi->mode_check_freq[THR_NEW3] =
  762. speed_map(Speed, mode_check_freq_map_new2);
  763. cpi->mode_check_freq[THR_SPLIT1] =
  764. speed_map(Speed, mode_check_freq_map_split1);
  765. cpi->mode_check_freq[THR_SPLIT2] = cpi->mode_check_freq[THR_SPLIT3] =
  766. speed_map(Speed, mode_check_freq_map_split2);
  767. Speed = cpi->Speed;
  768. switch (Mode) {
  769. #if !CONFIG_REALTIME_ONLY
  770. case 0: /* best quality mode */
  771. sf->first_step = 0;
  772. sf->max_step_search_steps = MAX_MVSEARCH_STEPS;
  773. break;
  774. case 1:
  775. case 3:
  776. if (Speed > 0) {
  777. /* Disable coefficient optimization above speed 0 */
  778. sf->optimize_coefficients = 0;
  779. sf->use_fastquant_for_pick = 1;
  780. sf->no_skip_block4x4_search = 0;
  781. sf->first_step = 1;
  782. }
  783. if (Speed > 2) {
  784. sf->improved_quant = 0;
  785. sf->improved_dct = 0;
  786. /* Only do recode loop on key frames, golden frames and
  787. * alt ref frames
  788. */
  789. sf->recode_loop = 2;
  790. }
  791. if (Speed > 3) {
  792. sf->auto_filter = 1;
  793. sf->recode_loop = 0; /* recode loop off */
  794. sf->RD = 0; /* Turn rd off */
  795. }
  796. if (Speed > 4) {
  797. sf->auto_filter = 0; /* Faster selection of loop filter */
  798. }
  799. break;
  800. #endif
  801. case 2:
  802. sf->optimize_coefficients = 0;
  803. sf->recode_loop = 0;
  804. sf->auto_filter = 1;
  805. sf->iterative_sub_pixel = 1;
  806. sf->search_method = NSTEP;
  807. if (Speed > 0) {
  808. sf->improved_quant = 0;
  809. sf->improved_dct = 0;
  810. sf->use_fastquant_for_pick = 1;
  811. sf->no_skip_block4x4_search = 0;
  812. sf->first_step = 1;
  813. }
  814. if (Speed > 2) sf->auto_filter = 0; /* Faster selection of loop filter */
  815. if (Speed > 3) {
  816. sf->RD = 0;
  817. sf->auto_filter = 1;
  818. }
  819. if (Speed > 4) {
  820. sf->auto_filter = 0; /* Faster selection of loop filter */
  821. sf->search_method = HEX;
  822. sf->iterative_sub_pixel = 0;
  823. }
  824. if (Speed > 6) {
  825. unsigned int sum = 0;
  826. unsigned int total_mbs = cm->MBs;
  827. int thresh;
  828. unsigned int total_skip;
  829. int min = 2000;
  830. if (cpi->oxcf.encode_breakout > 2000) min = cpi->oxcf.encode_breakout;
  831. min >>= 7;
  832. for (i = 0; i < min; ++i) {
  833. sum += cpi->mb.error_bins[i];
  834. }
  835. total_skip = sum;
  836. sum = 0;
  837. /* i starts from 2 to make sure thresh started from 2048 */
  838. for (; i < 1024; ++i) {
  839. sum += cpi->mb.error_bins[i];
  840. if (10 * sum >=
  841. (unsigned int)(cpi->Speed - 6) * (total_mbs - total_skip)) {
  842. break;
  843. }
  844. }
  845. i--;
  846. thresh = (i << 7);
  847. if (thresh < 2000) thresh = 2000;
  848. if (ref_frames > 1) {
  849. sf->thresh_mult[THR_NEW1] = thresh;
  850. sf->thresh_mult[THR_NEAREST1] = thresh >> 1;
  851. sf->thresh_mult[THR_NEAR1] = thresh >> 1;
  852. }
  853. if (ref_frames > 2) {
  854. sf->thresh_mult[THR_NEW2] = thresh << 1;
  855. sf->thresh_mult[THR_NEAREST2] = thresh;
  856. sf->thresh_mult[THR_NEAR2] = thresh;
  857. }
  858. if (ref_frames > 3) {
  859. sf->thresh_mult[THR_NEW3] = thresh << 1;
  860. sf->thresh_mult[THR_NEAREST3] = thresh;
  861. sf->thresh_mult[THR_NEAR3] = thresh;
  862. }
  863. sf->improved_mv_pred = 0;
  864. }
  865. if (Speed > 8) sf->quarter_pixel_search = 0;
  866. if (cm->version == 0) {
  867. cm->filter_type = NORMAL_LOOPFILTER;
  868. if (Speed >= 14) cm->filter_type = SIMPLE_LOOPFILTER;
  869. } else {
  870. cm->filter_type = SIMPLE_LOOPFILTER;
  871. }
  872. /* This has a big hit on quality. Last resort */
  873. if (Speed >= 15) sf->half_pixel_search = 0;
  874. memset(cpi->mb.error_bins, 0, sizeof(cpi->mb.error_bins));
  875. }; /* switch */
  876. /* Slow quant, dct and trellis not worthwhile for first pass
  877. * so make sure they are always turned off.
  878. */
  879. if (cpi->pass == 1) {
  880. sf->improved_quant = 0;
  881. sf->optimize_coefficients = 0;
  882. sf->improved_dct = 0;
  883. }
  884. if (cpi->sf.search_method == NSTEP) {
  885. vp8_init3smotion_compensation(&cpi->mb,
  886. cm->yv12_fb[cm->lst_fb_idx].y_stride);
  887. } else if (cpi->sf.search_method == DIAMOND) {
  888. vp8_init_dsmotion_compensation(&cpi->mb,
  889. cm->yv12_fb[cm->lst_fb_idx].y_stride);
  890. }
  891. if (cpi->sf.improved_dct) {
  892. cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
  893. cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
  894. } else {
  895. /* No fast FDCT defined for any platform at this time. */
  896. cpi->mb.short_fdct8x4 = vp8_short_fdct8x4;
  897. cpi->mb.short_fdct4x4 = vp8_short_fdct4x4;
  898. }
  899. cpi->mb.short_walsh4x4 = vp8_short_walsh4x4;
  900. if (cpi->sf.improved_quant) {
  901. cpi->mb.quantize_b = vp8_regular_quantize_b;
  902. } else {
  903. cpi->mb.quantize_b = vp8_fast_quantize_b;
  904. }
  905. if (cpi->sf.improved_quant != last_improved_quant) vp8cx_init_quantizer(cpi);
  906. if (cpi->sf.iterative_sub_pixel == 1) {
  907. cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step_iteratively;
  908. } else if (cpi->sf.quarter_pixel_search) {
  909. cpi->find_fractional_mv_step = vp8_find_best_sub_pixel_step;
  910. } else if (cpi->sf.half_pixel_search) {
  911. cpi->find_fractional_mv_step = vp8_find_best_half_pixel_step;
  912. } else {
  913. cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
  914. }
  915. if (cpi->sf.optimize_coefficients == 1 && cpi->pass != 1) {
  916. cpi->mb.optimize = 1;
  917. } else {
  918. cpi->mb.optimize = 0;
  919. }
  920. if (cpi->common.full_pixel) {
  921. cpi->find_fractional_mv_step = vp8_skip_fractional_mv_step;
  922. }
  923. #ifdef SPEEDSTATS
  924. frames_at_speed[cpi->Speed]++;
  925. #endif
  926. }
  927. #undef GOOD
  928. #undef RT
  929. static void alloc_raw_frame_buffers(VP8_COMP *cpi) {
  930. #if VP8_TEMPORAL_ALT_REF
  931. int width = (cpi->oxcf.Width + 15) & ~15;
  932. int height = (cpi->oxcf.Height + 15) & ~15;
  933. #endif
  934. cpi->lookahead = vp8_lookahead_init(cpi->oxcf.Width, cpi->oxcf.Height,
  935. cpi->oxcf.lag_in_frames);
  936. if (!cpi->lookahead) {
  937. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  938. "Failed to allocate lag buffers");
  939. }
  940. #if VP8_TEMPORAL_ALT_REF
  941. if (vp8_yv12_alloc_frame_buffer(&cpi->alt_ref_buffer, width, height,
  942. VP8BORDERINPIXELS)) {
  943. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  944. "Failed to allocate altref buffer");
  945. }
  946. #endif
  947. }
  948. static void dealloc_raw_frame_buffers(VP8_COMP *cpi) {
  949. #if VP8_TEMPORAL_ALT_REF
  950. vp8_yv12_de_alloc_frame_buffer(&cpi->alt_ref_buffer);
  951. #endif
  952. vp8_lookahead_destroy(cpi->lookahead);
  953. }
  954. static int vp8_alloc_partition_data(VP8_COMP *cpi) {
  955. vpx_free(cpi->mb.pip);
  956. cpi->mb.pip =
  957. vpx_calloc((cpi->common.mb_cols + 1) * (cpi->common.mb_rows + 1),
  958. sizeof(PARTITION_INFO));
  959. if (!cpi->mb.pip) return 1;
  960. cpi->mb.pi = cpi->mb.pip + cpi->common.mode_info_stride + 1;
  961. return 0;
  962. }
  963. void vp8_alloc_compressor_data(VP8_COMP *cpi) {
  964. VP8_COMMON *cm = &cpi->common;
  965. int width = cm->Width;
  966. int height = cm->Height;
  967. if (vp8_alloc_frame_buffers(cm, width, height)) {
  968. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  969. "Failed to allocate frame buffers");
  970. }
  971. if (vp8_alloc_partition_data(cpi)) {
  972. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  973. "Failed to allocate partition data");
  974. }
  975. if ((width & 0xf) != 0) width += 16 - (width & 0xf);
  976. if ((height & 0xf) != 0) height += 16 - (height & 0xf);
  977. if (vp8_yv12_alloc_frame_buffer(&cpi->pick_lf_lvl_frame, width, height,
  978. VP8BORDERINPIXELS)) {
  979. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  980. "Failed to allocate last frame buffer");
  981. }
  982. if (vp8_yv12_alloc_frame_buffer(&cpi->scaled_source, width, height,
  983. VP8BORDERINPIXELS)) {
  984. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  985. "Failed to allocate scaled source buffer");
  986. }
  987. vpx_free(cpi->tok);
  988. {
  989. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  990. unsigned int tokens = 8 * 24 * 16; /* one MB for each thread */
  991. #else
  992. unsigned int tokens = cm->mb_rows * cm->mb_cols * 24 * 16;
  993. #endif
  994. CHECK_MEM_ERROR(cpi->tok, vpx_calloc(tokens, sizeof(*cpi->tok)));
  995. }
  996. /* Data used for real time vc mode to see if gf needs refreshing */
  997. cpi->zeromv_count = 0;
  998. /* Structures used to monitor GF usage */
  999. vpx_free(cpi->gf_active_flags);
  1000. CHECK_MEM_ERROR(
  1001. cpi->gf_active_flags,
  1002. vpx_calloc(sizeof(*cpi->gf_active_flags), cm->mb_rows * cm->mb_cols));
  1003. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  1004. vpx_free(cpi->mb_activity_map);
  1005. CHECK_MEM_ERROR(
  1006. cpi->mb_activity_map,
  1007. vpx_calloc(sizeof(*cpi->mb_activity_map), cm->mb_rows * cm->mb_cols));
  1008. /* allocate memory for storing last frame's MVs for MV prediction. */
  1009. vpx_free(cpi->lfmv);
  1010. CHECK_MEM_ERROR(cpi->lfmv, vpx_calloc((cm->mb_rows + 2) * (cm->mb_cols + 2),
  1011. sizeof(*cpi->lfmv)));
  1012. vpx_free(cpi->lf_ref_frame_sign_bias);
  1013. CHECK_MEM_ERROR(cpi->lf_ref_frame_sign_bias,
  1014. vpx_calloc((cm->mb_rows + 2) * (cm->mb_cols + 2),
  1015. sizeof(*cpi->lf_ref_frame_sign_bias)));
  1016. vpx_free(cpi->lf_ref_frame);
  1017. CHECK_MEM_ERROR(cpi->lf_ref_frame,
  1018. vpx_calloc((cm->mb_rows + 2) * (cm->mb_cols + 2),
  1019. sizeof(*cpi->lf_ref_frame)));
  1020. /* Create the encoder segmentation map and set all entries to 0 */
  1021. vpx_free(cpi->segmentation_map);
  1022. CHECK_MEM_ERROR(
  1023. cpi->segmentation_map,
  1024. vpx_calloc(cm->mb_rows * cm->mb_cols, sizeof(*cpi->segmentation_map)));
  1025. cpi->cyclic_refresh_mode_index = 0;
  1026. vpx_free(cpi->active_map);
  1027. CHECK_MEM_ERROR(cpi->active_map, vpx_calloc(cm->mb_rows * cm->mb_cols,
  1028. sizeof(*cpi->active_map)));
  1029. memset(cpi->active_map, 1, (cm->mb_rows * cm->mb_cols));
  1030. #if CONFIG_MULTITHREAD
  1031. if (width < 640) {
  1032. cpi->mt_sync_range = 1;
  1033. } else if (width <= 1280) {
  1034. cpi->mt_sync_range = 4;
  1035. } else if (width <= 2560) {
  1036. cpi->mt_sync_range = 8;
  1037. } else {
  1038. cpi->mt_sync_range = 16;
  1039. }
  1040. if (cpi->oxcf.multi_threaded > 1) {
  1041. int i;
  1042. vpx_free(cpi->mt_current_mb_col);
  1043. CHECK_MEM_ERROR(cpi->mt_current_mb_col,
  1044. vpx_malloc(sizeof(*cpi->mt_current_mb_col) * cm->mb_rows));
  1045. for (i = 0; i < cm->mb_rows; ++i)
  1046. vpx_atomic_init(&cpi->mt_current_mb_col[i], 0);
  1047. }
  1048. #endif
  1049. vpx_free(cpi->tplist);
  1050. CHECK_MEM_ERROR(cpi->tplist, vpx_malloc(sizeof(TOKENLIST) * cm->mb_rows));
  1051. #if CONFIG_TEMPORAL_DENOISING
  1052. if (cpi->oxcf.noise_sensitivity > 0) {
  1053. vp8_denoiser_free(&cpi->denoiser);
  1054. if (vp8_denoiser_allocate(&cpi->denoiser, width, height, cm->mb_rows,
  1055. cm->mb_cols, cpi->oxcf.noise_sensitivity)) {
  1056. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1057. "Failed to allocate denoiser");
  1058. }
  1059. }
  1060. #endif
  1061. }
  1062. /* Quant MOD */
  1063. static const int q_trans[] = {
  1064. 0, 1, 2, 3, 4, 5, 7, 8, 9, 10, 12, 13, 15, 17, 18, 19,
  1065. 20, 21, 23, 24, 25, 26, 27, 28, 29, 30, 31, 33, 35, 37, 39, 41,
  1066. 43, 45, 47, 49, 51, 53, 55, 57, 59, 61, 64, 67, 70, 73, 76, 79,
  1067. 82, 85, 88, 91, 94, 97, 100, 103, 106, 109, 112, 115, 118, 121, 124, 127,
  1068. };
  1069. int vp8_reverse_trans(int x) {
  1070. int i;
  1071. for (i = 0; i < 64; ++i) {
  1072. if (q_trans[i] >= x) return i;
  1073. }
  1074. return 63;
  1075. }
  1076. void vp8_new_framerate(VP8_COMP *cpi, double framerate) {
  1077. if (framerate < .1) framerate = 30;
  1078. cpi->framerate = framerate;
  1079. cpi->output_framerate = framerate;
  1080. cpi->per_frame_bandwidth =
  1081. (int)(cpi->oxcf.target_bandwidth / cpi->output_framerate);
  1082. cpi->av_per_frame_bandwidth = cpi->per_frame_bandwidth;
  1083. cpi->min_frame_bandwidth = (int)(cpi->av_per_frame_bandwidth *
  1084. cpi->oxcf.two_pass_vbrmin_section / 100);
  1085. /* Set Maximum gf/arf interval */
  1086. cpi->max_gf_interval = ((int)(cpi->output_framerate / 2.0) + 2);
  1087. if (cpi->max_gf_interval < 12) cpi->max_gf_interval = 12;
  1088. /* Extended interval for genuinely static scenes */
  1089. cpi->twopass.static_scene_max_gf_interval = cpi->key_frame_frequency >> 1;
  1090. /* Special conditions when altr ref frame enabled in lagged compress mode */
  1091. if (cpi->oxcf.play_alternate && cpi->oxcf.lag_in_frames) {
  1092. if (cpi->max_gf_interval > cpi->oxcf.lag_in_frames - 1) {
  1093. cpi->max_gf_interval = cpi->oxcf.lag_in_frames - 1;
  1094. }
  1095. if (cpi->twopass.static_scene_max_gf_interval >
  1096. cpi->oxcf.lag_in_frames - 1) {
  1097. cpi->twopass.static_scene_max_gf_interval = cpi->oxcf.lag_in_frames - 1;
  1098. }
  1099. }
  1100. if (cpi->max_gf_interval > cpi->twopass.static_scene_max_gf_interval) {
  1101. cpi->max_gf_interval = cpi->twopass.static_scene_max_gf_interval;
  1102. }
  1103. }
  1104. static void init_config(VP8_COMP *cpi, VP8_CONFIG *oxcf) {
  1105. VP8_COMMON *cm = &cpi->common;
  1106. cpi->oxcf = *oxcf;
  1107. cpi->auto_gold = 1;
  1108. cpi->auto_adjust_gold_quantizer = 1;
  1109. cm->version = oxcf->Version;
  1110. vp8_setup_version(cm);
  1111. /* Frame rate is not available on the first frame, as it's derived from
  1112. * the observed timestamps. The actual value used here doesn't matter
  1113. * too much, as it will adapt quickly.
  1114. */
  1115. if (oxcf->timebase.num > 0) {
  1116. cpi->framerate =
  1117. (double)(oxcf->timebase.den) / (double)(oxcf->timebase.num);
  1118. } else {
  1119. cpi->framerate = 30;
  1120. }
  1121. /* If the reciprocal of the timebase seems like a reasonable framerate,
  1122. * then use that as a guess, otherwise use 30.
  1123. */
  1124. if (cpi->framerate > 180) cpi->framerate = 30;
  1125. cpi->ref_framerate = cpi->framerate;
  1126. cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
  1127. cm->refresh_golden_frame = 0;
  1128. cm->refresh_last_frame = 1;
  1129. cm->refresh_entropy_probs = 1;
  1130. /* change includes all joint functionality */
  1131. vp8_change_config(cpi, oxcf);
  1132. /* Initialize active best and worst q and average q values. */
  1133. cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
  1134. cpi->active_best_quality = cpi->oxcf.best_allowed_q;
  1135. cpi->avg_frame_qindex = cpi->oxcf.worst_allowed_q;
  1136. /* Initialise the starting buffer levels */
  1137. cpi->buffer_level = cpi->oxcf.starting_buffer_level;
  1138. cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
  1139. cpi->rolling_target_bits = cpi->av_per_frame_bandwidth;
  1140. cpi->rolling_actual_bits = cpi->av_per_frame_bandwidth;
  1141. cpi->long_rolling_target_bits = cpi->av_per_frame_bandwidth;
  1142. cpi->long_rolling_actual_bits = cpi->av_per_frame_bandwidth;
  1143. cpi->total_actual_bits = 0;
  1144. cpi->total_target_vs_actual = 0;
  1145. /* Temporal scalabilty */
  1146. if (cpi->oxcf.number_of_layers > 1) {
  1147. unsigned int i;
  1148. double prev_layer_framerate = 0;
  1149. for (i = 0; i < cpi->oxcf.number_of_layers; ++i) {
  1150. init_temporal_layer_context(cpi, oxcf, i, prev_layer_framerate);
  1151. prev_layer_framerate =
  1152. cpi->output_framerate / cpi->oxcf.rate_decimator[i];
  1153. }
  1154. }
  1155. #if VP8_TEMPORAL_ALT_REF
  1156. {
  1157. int i;
  1158. cpi->fixed_divide[0] = 0;
  1159. for (i = 1; i < 512; ++i) cpi->fixed_divide[i] = 0x80000 / i;
  1160. }
  1161. #endif
  1162. }
  1163. static void update_layer_contexts(VP8_COMP *cpi) {
  1164. VP8_CONFIG *oxcf = &cpi->oxcf;
  1165. /* Update snapshots of the layer contexts to reflect new parameters */
  1166. if (oxcf->number_of_layers > 1) {
  1167. unsigned int i;
  1168. double prev_layer_framerate = 0;
  1169. assert(oxcf->number_of_layers <= VPX_TS_MAX_LAYERS);
  1170. for (i = 0; i < oxcf->number_of_layers && i < VPX_TS_MAX_LAYERS; ++i) {
  1171. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  1172. lc->framerate = cpi->ref_framerate / oxcf->rate_decimator[i];
  1173. lc->target_bandwidth = oxcf->target_bitrate[i] * 1000;
  1174. lc->starting_buffer_level = rescale(
  1175. (int)oxcf->starting_buffer_level_in_ms, lc->target_bandwidth, 1000);
  1176. if (oxcf->optimal_buffer_level == 0) {
  1177. lc->optimal_buffer_level = lc->target_bandwidth / 8;
  1178. } else {
  1179. lc->optimal_buffer_level = rescale(
  1180. (int)oxcf->optimal_buffer_level_in_ms, lc->target_bandwidth, 1000);
  1181. }
  1182. if (oxcf->maximum_buffer_size == 0) {
  1183. lc->maximum_buffer_size = lc->target_bandwidth / 8;
  1184. } else {
  1185. lc->maximum_buffer_size = rescale((int)oxcf->maximum_buffer_size_in_ms,
  1186. lc->target_bandwidth, 1000);
  1187. }
  1188. /* Work out the average size of a frame within this layer */
  1189. if (i > 0) {
  1190. lc->avg_frame_size_for_layer =
  1191. (int)((oxcf->target_bitrate[i] - oxcf->target_bitrate[i - 1]) *
  1192. 1000 / (lc->framerate - prev_layer_framerate));
  1193. }
  1194. prev_layer_framerate = lc->framerate;
  1195. }
  1196. }
  1197. }
  1198. void vp8_change_config(VP8_COMP *cpi, VP8_CONFIG *oxcf) {
  1199. VP8_COMMON *cm = &cpi->common;
  1200. int last_w, last_h;
  1201. unsigned int prev_number_of_layers;
  1202. if (!cpi) return;
  1203. if (!oxcf) return;
  1204. if (cm->version != oxcf->Version) {
  1205. cm->version = oxcf->Version;
  1206. vp8_setup_version(cm);
  1207. }
  1208. last_w = cpi->oxcf.Width;
  1209. last_h = cpi->oxcf.Height;
  1210. prev_number_of_layers = cpi->oxcf.number_of_layers;
  1211. cpi->oxcf = *oxcf;
  1212. switch (cpi->oxcf.Mode) {
  1213. case MODE_REALTIME:
  1214. cpi->pass = 0;
  1215. cpi->compressor_speed = 2;
  1216. if (cpi->oxcf.cpu_used < -16) {
  1217. cpi->oxcf.cpu_used = -16;
  1218. }
  1219. if (cpi->oxcf.cpu_used > 16) cpi->oxcf.cpu_used = 16;
  1220. break;
  1221. case MODE_GOODQUALITY:
  1222. cpi->pass = 0;
  1223. cpi->compressor_speed = 1;
  1224. if (cpi->oxcf.cpu_used < -5) {
  1225. cpi->oxcf.cpu_used = -5;
  1226. }
  1227. if (cpi->oxcf.cpu_used > 5) cpi->oxcf.cpu_used = 5;
  1228. break;
  1229. case MODE_BESTQUALITY:
  1230. cpi->pass = 0;
  1231. cpi->compressor_speed = 0;
  1232. break;
  1233. case MODE_FIRSTPASS:
  1234. cpi->pass = 1;
  1235. cpi->compressor_speed = 1;
  1236. break;
  1237. case MODE_SECONDPASS:
  1238. cpi->pass = 2;
  1239. cpi->compressor_speed = 1;
  1240. if (cpi->oxcf.cpu_used < -5) {
  1241. cpi->oxcf.cpu_used = -5;
  1242. }
  1243. if (cpi->oxcf.cpu_used > 5) cpi->oxcf.cpu_used = 5;
  1244. break;
  1245. case MODE_SECONDPASS_BEST:
  1246. cpi->pass = 2;
  1247. cpi->compressor_speed = 0;
  1248. break;
  1249. }
  1250. if (cpi->pass == 0) cpi->auto_worst_q = 1;
  1251. cpi->oxcf.worst_allowed_q = q_trans[oxcf->worst_allowed_q];
  1252. cpi->oxcf.best_allowed_q = q_trans[oxcf->best_allowed_q];
  1253. cpi->oxcf.cq_level = q_trans[cpi->oxcf.cq_level];
  1254. if (oxcf->fixed_q >= 0) {
  1255. if (oxcf->worst_allowed_q < 0) {
  1256. cpi->oxcf.fixed_q = q_trans[0];
  1257. } else {
  1258. cpi->oxcf.fixed_q = q_trans[oxcf->worst_allowed_q];
  1259. }
  1260. if (oxcf->alt_q < 0) {
  1261. cpi->oxcf.alt_q = q_trans[0];
  1262. } else {
  1263. cpi->oxcf.alt_q = q_trans[oxcf->alt_q];
  1264. }
  1265. if (oxcf->key_q < 0) {
  1266. cpi->oxcf.key_q = q_trans[0];
  1267. } else {
  1268. cpi->oxcf.key_q = q_trans[oxcf->key_q];
  1269. }
  1270. if (oxcf->gold_q < 0) {
  1271. cpi->oxcf.gold_q = q_trans[0];
  1272. } else {
  1273. cpi->oxcf.gold_q = q_trans[oxcf->gold_q];
  1274. }
  1275. }
  1276. cpi->ext_refresh_frame_flags_pending = 0;
  1277. cpi->baseline_gf_interval =
  1278. cpi->oxcf.alt_freq ? cpi->oxcf.alt_freq : DEFAULT_GF_INTERVAL;
  1279. // GF behavior for 1 pass CBR, used when error_resilience is off.
  1280. if (!cpi->oxcf.error_resilient_mode &&
  1281. cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER &&
  1282. cpi->oxcf.Mode == MODE_REALTIME)
  1283. cpi->baseline_gf_interval = cpi->gf_interval_onepass_cbr;
  1284. #if (CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
  1285. cpi->oxcf.token_partitions = 3;
  1286. #endif
  1287. if (cpi->oxcf.token_partitions >= 0 && cpi->oxcf.token_partitions <= 3) {
  1288. cm->multi_token_partition = (TOKEN_PARTITION)cpi->oxcf.token_partitions;
  1289. }
  1290. setup_features(cpi);
  1291. if (!cpi->use_roi_static_threshold) {
  1292. int i;
  1293. for (i = 0; i < MAX_MB_SEGMENTS; ++i) {
  1294. cpi->segment_encode_breakout[i] = cpi->oxcf.encode_breakout;
  1295. }
  1296. }
  1297. /* At the moment the first order values may not be > MAXQ */
  1298. if (cpi->oxcf.fixed_q > MAXQ) cpi->oxcf.fixed_q = MAXQ;
  1299. /* local file playback mode == really big buffer */
  1300. if (cpi->oxcf.end_usage == USAGE_LOCAL_FILE_PLAYBACK) {
  1301. cpi->oxcf.starting_buffer_level = 60000;
  1302. cpi->oxcf.optimal_buffer_level = 60000;
  1303. cpi->oxcf.maximum_buffer_size = 240000;
  1304. cpi->oxcf.starting_buffer_level_in_ms = 60000;
  1305. cpi->oxcf.optimal_buffer_level_in_ms = 60000;
  1306. cpi->oxcf.maximum_buffer_size_in_ms = 240000;
  1307. }
  1308. /* Convert target bandwidth from Kbit/s to Bit/s */
  1309. cpi->oxcf.target_bandwidth *= 1000;
  1310. cpi->oxcf.starting_buffer_level = rescale(
  1311. (int)cpi->oxcf.starting_buffer_level, cpi->oxcf.target_bandwidth, 1000);
  1312. /* Set or reset optimal and maximum buffer levels. */
  1313. if (cpi->oxcf.optimal_buffer_level == 0) {
  1314. cpi->oxcf.optimal_buffer_level = cpi->oxcf.target_bandwidth / 8;
  1315. } else {
  1316. cpi->oxcf.optimal_buffer_level = rescale(
  1317. (int)cpi->oxcf.optimal_buffer_level, cpi->oxcf.target_bandwidth, 1000);
  1318. }
  1319. if (cpi->oxcf.maximum_buffer_size == 0) {
  1320. cpi->oxcf.maximum_buffer_size = cpi->oxcf.target_bandwidth / 8;
  1321. } else {
  1322. cpi->oxcf.maximum_buffer_size = rescale((int)cpi->oxcf.maximum_buffer_size,
  1323. cpi->oxcf.target_bandwidth, 1000);
  1324. }
  1325. // Under a configuration change, where maximum_buffer_size may change,
  1326. // keep buffer level clipped to the maximum allowed buffer size.
  1327. if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
  1328. cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
  1329. cpi->buffer_level = cpi->bits_off_target;
  1330. }
  1331. /* Set up frame rate and related parameters rate control values. */
  1332. vp8_new_framerate(cpi, cpi->framerate);
  1333. /* Set absolute upper and lower quality limits */
  1334. cpi->worst_quality = cpi->oxcf.worst_allowed_q;
  1335. cpi->best_quality = cpi->oxcf.best_allowed_q;
  1336. /* active values should only be modified if out of new range */
  1337. if (cpi->active_worst_quality > cpi->oxcf.worst_allowed_q) {
  1338. cpi->active_worst_quality = cpi->oxcf.worst_allowed_q;
  1339. }
  1340. /* less likely */
  1341. else if (cpi->active_worst_quality < cpi->oxcf.best_allowed_q) {
  1342. cpi->active_worst_quality = cpi->oxcf.best_allowed_q;
  1343. }
  1344. if (cpi->active_best_quality < cpi->oxcf.best_allowed_q) {
  1345. cpi->active_best_quality = cpi->oxcf.best_allowed_q;
  1346. }
  1347. /* less likely */
  1348. else if (cpi->active_best_quality > cpi->oxcf.worst_allowed_q) {
  1349. cpi->active_best_quality = cpi->oxcf.worst_allowed_q;
  1350. }
  1351. cpi->buffered_mode = cpi->oxcf.optimal_buffer_level > 0;
  1352. cpi->cq_target_quality = cpi->oxcf.cq_level;
  1353. /* Only allow dropped frames in buffered mode */
  1354. cpi->drop_frames_allowed = cpi->oxcf.allow_df && cpi->buffered_mode;
  1355. cpi->target_bandwidth = cpi->oxcf.target_bandwidth;
  1356. // Check if the number of temporal layers has changed, and if so reset the
  1357. // pattern counter and set/initialize the temporal layer context for the
  1358. // new layer configuration.
  1359. if (cpi->oxcf.number_of_layers != prev_number_of_layers) {
  1360. // If the number of temporal layers are changed we must start at the
  1361. // base of the pattern cycle, so set the layer id to 0 and reset
  1362. // the temporal pattern counter.
  1363. if (cpi->temporal_layer_id > 0) {
  1364. cpi->temporal_layer_id = 0;
  1365. }
  1366. cpi->temporal_pattern_counter = 0;
  1367. reset_temporal_layer_change(cpi, oxcf, prev_number_of_layers);
  1368. }
  1369. if (!cpi->initial_width) {
  1370. cpi->initial_width = cpi->oxcf.Width;
  1371. cpi->initial_height = cpi->oxcf.Height;
  1372. }
  1373. cm->Width = cpi->oxcf.Width;
  1374. cm->Height = cpi->oxcf.Height;
  1375. assert(cm->Width <= cpi->initial_width);
  1376. assert(cm->Height <= cpi->initial_height);
  1377. /* TODO(jkoleszar): if an internal spatial resampling is active,
  1378. * and we downsize the input image, maybe we should clear the
  1379. * internal scale immediately rather than waiting for it to
  1380. * correct.
  1381. */
  1382. /* VP8 sharpness level mapping 0-7 (vs 0-10 in general VPx dialogs) */
  1383. if (cpi->oxcf.Sharpness > 7) cpi->oxcf.Sharpness = 7;
  1384. cm->sharpness_level = cpi->oxcf.Sharpness;
  1385. if (cm->horiz_scale != NORMAL || cm->vert_scale != NORMAL) {
  1386. int hr, hs, vr, vs;
  1387. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  1388. Scale2Ratio(cm->vert_scale, &vr, &vs);
  1389. /* always go to the next whole number */
  1390. cm->Width = (hs - 1 + cpi->oxcf.Width * hr) / hs;
  1391. cm->Height = (vs - 1 + cpi->oxcf.Height * vr) / vs;
  1392. }
  1393. if (last_w != cpi->oxcf.Width || last_h != cpi->oxcf.Height) {
  1394. cpi->force_next_frame_intra = 1;
  1395. }
  1396. if (((cm->Width + 15) & ~15) != cm->yv12_fb[cm->lst_fb_idx].y_width ||
  1397. ((cm->Height + 15) & ~15) != cm->yv12_fb[cm->lst_fb_idx].y_height ||
  1398. cm->yv12_fb[cm->lst_fb_idx].y_width == 0) {
  1399. dealloc_raw_frame_buffers(cpi);
  1400. alloc_raw_frame_buffers(cpi);
  1401. vp8_alloc_compressor_data(cpi);
  1402. }
  1403. if (cpi->oxcf.fixed_q >= 0) {
  1404. cpi->last_q[0] = cpi->oxcf.fixed_q;
  1405. cpi->last_q[1] = cpi->oxcf.fixed_q;
  1406. }
  1407. cpi->Speed = cpi->oxcf.cpu_used;
  1408. /* force to allowlag to 0 if lag_in_frames is 0; */
  1409. if (cpi->oxcf.lag_in_frames == 0) {
  1410. cpi->oxcf.allow_lag = 0;
  1411. }
  1412. /* Limit on lag buffers as these are not currently dynamically allocated */
  1413. else if (cpi->oxcf.lag_in_frames > MAX_LAG_BUFFERS) {
  1414. cpi->oxcf.lag_in_frames = MAX_LAG_BUFFERS;
  1415. }
  1416. /* YX Temp */
  1417. cpi->alt_ref_source = NULL;
  1418. cpi->is_src_frame_alt_ref = 0;
  1419. #if CONFIG_TEMPORAL_DENOISING
  1420. if (cpi->oxcf.noise_sensitivity) {
  1421. if (!cpi->denoiser.yv12_mc_running_avg.buffer_alloc) {
  1422. int width = (cpi->oxcf.Width + 15) & ~15;
  1423. int height = (cpi->oxcf.Height + 15) & ~15;
  1424. if (vp8_denoiser_allocate(&cpi->denoiser, width, height, cm->mb_rows,
  1425. cm->mb_cols, cpi->oxcf.noise_sensitivity)) {
  1426. vpx_internal_error(&cpi->common.error, VPX_CODEC_MEM_ERROR,
  1427. "Failed to allocate denoiser");
  1428. }
  1429. }
  1430. }
  1431. #endif
  1432. #if 0
  1433. /* Experimental RD Code */
  1434. cpi->frame_distortion = 0;
  1435. cpi->last_frame_distortion = 0;
  1436. #endif
  1437. }
  1438. #ifndef M_LOG2_E
  1439. #define M_LOG2_E 0.693147180559945309417
  1440. #endif
  1441. #define log2f(x) (log(x) / (float)M_LOG2_E)
  1442. static void cal_mvsadcosts(int *mvsadcost[2]) {
  1443. int i = 1;
  1444. mvsadcost[0][0] = 300;
  1445. mvsadcost[1][0] = 300;
  1446. do {
  1447. double z = 256 * (2 * (log2f(8 * i) + .6));
  1448. mvsadcost[0][i] = (int)z;
  1449. mvsadcost[1][i] = (int)z;
  1450. mvsadcost[0][-i] = (int)z;
  1451. mvsadcost[1][-i] = (int)z;
  1452. } while (++i <= mvfp_max);
  1453. }
  1454. struct VP8_COMP *vp8_create_compressor(VP8_CONFIG *oxcf) {
  1455. int i;
  1456. VP8_COMP *cpi;
  1457. VP8_COMMON *cm;
  1458. cpi = vpx_memalign(32, sizeof(VP8_COMP));
  1459. /* Check that the CPI instance is valid */
  1460. if (!cpi) return 0;
  1461. cm = &cpi->common;
  1462. memset(cpi, 0, sizeof(VP8_COMP));
  1463. if (setjmp(cm->error.jmp)) {
  1464. cpi->common.error.setjmp = 0;
  1465. vp8_remove_compressor(&cpi);
  1466. return 0;
  1467. }
  1468. cpi->common.error.setjmp = 1;
  1469. CHECK_MEM_ERROR(cpi->mb.ss, vpx_calloc(sizeof(search_site),
  1470. (MAX_MVSEARCH_STEPS * 8) + 1));
  1471. vp8_create_common(&cpi->common);
  1472. init_config(cpi, oxcf);
  1473. memcpy(cpi->base_skip_false_prob, vp8cx_base_skip_false_prob,
  1474. sizeof(vp8cx_base_skip_false_prob));
  1475. cpi->common.current_video_frame = 0;
  1476. cpi->temporal_pattern_counter = 0;
  1477. cpi->temporal_layer_id = -1;
  1478. cpi->kf_overspend_bits = 0;
  1479. cpi->kf_bitrate_adjustment = 0;
  1480. cpi->frames_till_gf_update_due = 0;
  1481. cpi->gf_overspend_bits = 0;
  1482. cpi->non_gf_bitrate_adjustment = 0;
  1483. cpi->prob_last_coded = 128;
  1484. cpi->prob_gf_coded = 128;
  1485. cpi->prob_intra_coded = 63;
  1486. /* Prime the recent reference frame usage counters.
  1487. * Hereafter they will be maintained as a sort of moving average
  1488. */
  1489. cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
  1490. cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
  1491. cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
  1492. cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
  1493. /* Set reference frame sign bias for ALTREF frame to 1 (for now) */
  1494. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
  1495. cpi->twopass.gf_decay_rate = 0;
  1496. cpi->baseline_gf_interval = DEFAULT_GF_INTERVAL;
  1497. cpi->gold_is_last = 0;
  1498. cpi->alt_is_last = 0;
  1499. cpi->gold_is_alt = 0;
  1500. cpi->active_map_enabled = 0;
  1501. cpi->use_roi_static_threshold = 0;
  1502. #if 0
  1503. /* Experimental code for lagged and one pass */
  1504. /* Initialise one_pass GF frames stats */
  1505. /* Update stats used for GF selection */
  1506. if (cpi->pass == 0)
  1507. {
  1508. cpi->one_pass_frame_index = 0;
  1509. for (i = 0; i < MAX_LAG_BUFFERS; ++i)
  1510. {
  1511. cpi->one_pass_frame_stats[i].frames_so_far = 0;
  1512. cpi->one_pass_frame_stats[i].frame_intra_error = 0.0;
  1513. cpi->one_pass_frame_stats[i].frame_coded_error = 0.0;
  1514. cpi->one_pass_frame_stats[i].frame_pcnt_inter = 0.0;
  1515. cpi->one_pass_frame_stats[i].frame_pcnt_motion = 0.0;
  1516. cpi->one_pass_frame_stats[i].frame_mvr = 0.0;
  1517. cpi->one_pass_frame_stats[i].frame_mvr_abs = 0.0;
  1518. cpi->one_pass_frame_stats[i].frame_mvc = 0.0;
  1519. cpi->one_pass_frame_stats[i].frame_mvc_abs = 0.0;
  1520. }
  1521. }
  1522. #endif
  1523. cpi->mse_source_denoised = 0;
  1524. /* Should we use the cyclic refresh method.
  1525. * Currently there is no external control for this.
  1526. * Enable it for error_resilient_mode, or for 1 pass CBR mode.
  1527. */
  1528. cpi->cyclic_refresh_mode_enabled =
  1529. (cpi->oxcf.error_resilient_mode ||
  1530. (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER &&
  1531. cpi->oxcf.Mode <= 2));
  1532. cpi->cyclic_refresh_mode_max_mbs_perframe =
  1533. (cpi->common.mb_rows * cpi->common.mb_cols) / 7;
  1534. if (cpi->oxcf.number_of_layers == 1) {
  1535. cpi->cyclic_refresh_mode_max_mbs_perframe =
  1536. (cpi->common.mb_rows * cpi->common.mb_cols) / 20;
  1537. } else if (cpi->oxcf.number_of_layers == 2) {
  1538. cpi->cyclic_refresh_mode_max_mbs_perframe =
  1539. (cpi->common.mb_rows * cpi->common.mb_cols) / 10;
  1540. }
  1541. cpi->cyclic_refresh_mode_index = 0;
  1542. cpi->cyclic_refresh_q = 32;
  1543. // GF behavior for 1 pass CBR, used when error_resilience is off.
  1544. cpi->gf_update_onepass_cbr = 0;
  1545. cpi->gf_noboost_onepass_cbr = 0;
  1546. if (!cpi->oxcf.error_resilient_mode &&
  1547. cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER && cpi->oxcf.Mode <= 2) {
  1548. cpi->gf_update_onepass_cbr = 1;
  1549. cpi->gf_noboost_onepass_cbr = 1;
  1550. cpi->gf_interval_onepass_cbr =
  1551. cpi->cyclic_refresh_mode_max_mbs_perframe > 0
  1552. ? (2 * (cpi->common.mb_rows * cpi->common.mb_cols) /
  1553. cpi->cyclic_refresh_mode_max_mbs_perframe)
  1554. : 10;
  1555. cpi->gf_interval_onepass_cbr =
  1556. VPXMIN(40, VPXMAX(6, cpi->gf_interval_onepass_cbr));
  1557. cpi->baseline_gf_interval = cpi->gf_interval_onepass_cbr;
  1558. }
  1559. if (cpi->cyclic_refresh_mode_enabled) {
  1560. CHECK_MEM_ERROR(cpi->cyclic_refresh_map,
  1561. vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
  1562. } else {
  1563. cpi->cyclic_refresh_map = (signed char *)NULL;
  1564. }
  1565. CHECK_MEM_ERROR(cpi->skin_map, vpx_calloc(cm->mb_rows * cm->mb_cols,
  1566. sizeof(cpi->skin_map[0])));
  1567. CHECK_MEM_ERROR(cpi->consec_zero_last,
  1568. vpx_calloc(cm->mb_rows * cm->mb_cols, 1));
  1569. CHECK_MEM_ERROR(cpi->consec_zero_last_mvbias,
  1570. vpx_calloc((cpi->common.mb_rows * cpi->common.mb_cols), 1));
  1571. /*Initialize the feed-forward activity masking.*/
  1572. cpi->activity_avg = 90 << 12;
  1573. /* Give a sensible default for the first frame. */
  1574. cpi->frames_since_key = 8;
  1575. cpi->key_frame_frequency = cpi->oxcf.key_freq;
  1576. cpi->this_key_frame_forced = 0;
  1577. cpi->next_key_frame_forced = 0;
  1578. cpi->source_alt_ref_pending = 0;
  1579. cpi->source_alt_ref_active = 0;
  1580. cpi->common.refresh_alt_ref_frame = 0;
  1581. cpi->force_maxqp = 0;
  1582. cpi->frames_since_last_drop_overshoot = 0;
  1583. cpi->b_calculate_psnr = CONFIG_INTERNAL_STATS;
  1584. #if CONFIG_INTERNAL_STATS
  1585. cpi->b_calculate_ssimg = 0;
  1586. cpi->count = 0;
  1587. cpi->bytes = 0;
  1588. if (cpi->b_calculate_psnr) {
  1589. cpi->total_sq_error = 0.0;
  1590. cpi->total_sq_error2 = 0.0;
  1591. cpi->total_y = 0.0;
  1592. cpi->total_u = 0.0;
  1593. cpi->total_v = 0.0;
  1594. cpi->total = 0.0;
  1595. cpi->totalp_y = 0.0;
  1596. cpi->totalp_u = 0.0;
  1597. cpi->totalp_v = 0.0;
  1598. cpi->totalp = 0.0;
  1599. cpi->tot_recode_hits = 0;
  1600. cpi->summed_quality = 0;
  1601. cpi->summed_weights = 0;
  1602. }
  1603. #endif
  1604. cpi->first_time_stamp_ever = 0x7FFFFFFF;
  1605. cpi->frames_till_gf_update_due = 0;
  1606. cpi->key_frame_count = 1;
  1607. cpi->ni_av_qi = cpi->oxcf.worst_allowed_q;
  1608. cpi->ni_tot_qi = 0;
  1609. cpi->ni_frames = 0;
  1610. cpi->total_byte_count = 0;
  1611. cpi->drop_frame = 0;
  1612. cpi->rate_correction_factor = 1.0;
  1613. cpi->key_frame_rate_correction_factor = 1.0;
  1614. cpi->gf_rate_correction_factor = 1.0;
  1615. cpi->twopass.est_max_qcorrection_factor = 1.0;
  1616. for (i = 0; i < KEY_FRAME_CONTEXT; ++i) {
  1617. cpi->prior_key_frame_distance[i] = (int)cpi->output_framerate;
  1618. }
  1619. #ifdef OUTPUT_YUV_SRC
  1620. yuv_file = fopen("bd.yuv", "ab");
  1621. #endif
  1622. #ifdef OUTPUT_YUV_DENOISED
  1623. yuv_denoised_file = fopen("denoised.yuv", "ab");
  1624. #endif
  1625. #ifdef OUTPUT_YUV_SKINMAP
  1626. yuv_skinmap_file = fopen("skinmap.yuv", "wb");
  1627. #endif
  1628. #if 0
  1629. framepsnr = fopen("framepsnr.stt", "a");
  1630. kf_list = fopen("kf_list.stt", "w");
  1631. #endif
  1632. cpi->output_pkt_list = oxcf->output_pkt_list;
  1633. #if !CONFIG_REALTIME_ONLY
  1634. if (cpi->pass == 1) {
  1635. vp8_init_first_pass(cpi);
  1636. } else if (cpi->pass == 2) {
  1637. size_t packet_sz = sizeof(FIRSTPASS_STATS);
  1638. int packets = (int)(oxcf->two_pass_stats_in.sz / packet_sz);
  1639. cpi->twopass.stats_in_start = oxcf->two_pass_stats_in.buf;
  1640. cpi->twopass.stats_in = cpi->twopass.stats_in_start;
  1641. cpi->twopass.stats_in_end =
  1642. (void *)((char *)cpi->twopass.stats_in + (packets - 1) * packet_sz);
  1643. vp8_init_second_pass(cpi);
  1644. }
  1645. #endif
  1646. if (cpi->compressor_speed == 2) {
  1647. cpi->avg_encode_time = 0;
  1648. cpi->avg_pick_mode_time = 0;
  1649. }
  1650. vp8_set_speed_features(cpi);
  1651. /* Set starting values of RD threshold multipliers (128 = *1) */
  1652. for (i = 0; i < MAX_MODES; ++i) {
  1653. cpi->mb.rd_thresh_mult[i] = 128;
  1654. }
  1655. #if CONFIG_MULTITHREAD
  1656. if (vp8cx_create_encoder_threads(cpi)) {
  1657. vp8_remove_compressor(&cpi);
  1658. return 0;
  1659. }
  1660. #endif
  1661. cpi->fn_ptr[BLOCK_16X16].sdf = vpx_sad16x16;
  1662. cpi->fn_ptr[BLOCK_16X16].vf = vpx_variance16x16;
  1663. cpi->fn_ptr[BLOCK_16X16].svf = vpx_sub_pixel_variance16x16;
  1664. cpi->fn_ptr[BLOCK_16X16].sdx3f = vpx_sad16x16x3;
  1665. cpi->fn_ptr[BLOCK_16X16].sdx8f = vpx_sad16x16x8;
  1666. cpi->fn_ptr[BLOCK_16X16].sdx4df = vpx_sad16x16x4d;
  1667. cpi->fn_ptr[BLOCK_16X8].sdf = vpx_sad16x8;
  1668. cpi->fn_ptr[BLOCK_16X8].vf = vpx_variance16x8;
  1669. cpi->fn_ptr[BLOCK_16X8].svf = vpx_sub_pixel_variance16x8;
  1670. cpi->fn_ptr[BLOCK_16X8].sdx3f = vpx_sad16x8x3;
  1671. cpi->fn_ptr[BLOCK_16X8].sdx8f = vpx_sad16x8x8;
  1672. cpi->fn_ptr[BLOCK_16X8].sdx4df = vpx_sad16x8x4d;
  1673. cpi->fn_ptr[BLOCK_8X16].sdf = vpx_sad8x16;
  1674. cpi->fn_ptr[BLOCK_8X16].vf = vpx_variance8x16;
  1675. cpi->fn_ptr[BLOCK_8X16].svf = vpx_sub_pixel_variance8x16;
  1676. cpi->fn_ptr[BLOCK_8X16].sdx3f = vpx_sad8x16x3;
  1677. cpi->fn_ptr[BLOCK_8X16].sdx8f = vpx_sad8x16x8;
  1678. cpi->fn_ptr[BLOCK_8X16].sdx4df = vpx_sad8x16x4d;
  1679. cpi->fn_ptr[BLOCK_8X8].sdf = vpx_sad8x8;
  1680. cpi->fn_ptr[BLOCK_8X8].vf = vpx_variance8x8;
  1681. cpi->fn_ptr[BLOCK_8X8].svf = vpx_sub_pixel_variance8x8;
  1682. cpi->fn_ptr[BLOCK_8X8].sdx3f = vpx_sad8x8x3;
  1683. cpi->fn_ptr[BLOCK_8X8].sdx8f = vpx_sad8x8x8;
  1684. cpi->fn_ptr[BLOCK_8X8].sdx4df = vpx_sad8x8x4d;
  1685. cpi->fn_ptr[BLOCK_4X4].sdf = vpx_sad4x4;
  1686. cpi->fn_ptr[BLOCK_4X4].vf = vpx_variance4x4;
  1687. cpi->fn_ptr[BLOCK_4X4].svf = vpx_sub_pixel_variance4x4;
  1688. cpi->fn_ptr[BLOCK_4X4].sdx3f = vpx_sad4x4x3;
  1689. cpi->fn_ptr[BLOCK_4X4].sdx8f = vpx_sad4x4x8;
  1690. cpi->fn_ptr[BLOCK_4X4].sdx4df = vpx_sad4x4x4d;
  1691. #if ARCH_X86 || ARCH_X86_64
  1692. cpi->fn_ptr[BLOCK_16X16].copymem = vp8_copy32xn;
  1693. cpi->fn_ptr[BLOCK_16X8].copymem = vp8_copy32xn;
  1694. cpi->fn_ptr[BLOCK_8X16].copymem = vp8_copy32xn;
  1695. cpi->fn_ptr[BLOCK_8X8].copymem = vp8_copy32xn;
  1696. cpi->fn_ptr[BLOCK_4X4].copymem = vp8_copy32xn;
  1697. #endif
  1698. cpi->full_search_sad = vp8_full_search_sad;
  1699. cpi->diamond_search_sad = vp8_diamond_search_sad;
  1700. cpi->refining_search_sad = vp8_refining_search_sad;
  1701. /* make sure frame 1 is okay */
  1702. cpi->mb.error_bins[0] = cpi->common.MBs;
  1703. /* vp8cx_init_quantizer() is first called here. Add check in
  1704. * vp8cx_frame_init_quantizer() so that vp8cx_init_quantizer is only
  1705. * called later when needed. This will avoid unnecessary calls of
  1706. * vp8cx_init_quantizer() for every frame.
  1707. */
  1708. vp8cx_init_quantizer(cpi);
  1709. vp8_loop_filter_init(cm);
  1710. cpi->common.error.setjmp = 0;
  1711. #if CONFIG_MULTI_RES_ENCODING
  1712. /* Calculate # of MBs in a row in lower-resolution level image. */
  1713. if (cpi->oxcf.mr_encoder_id > 0) vp8_cal_low_res_mb_cols(cpi);
  1714. #endif
  1715. /* setup RD costs to MACROBLOCK struct */
  1716. cpi->mb.mvcost[0] = &cpi->rd_costs.mvcosts[0][mv_max + 1];
  1717. cpi->mb.mvcost[1] = &cpi->rd_costs.mvcosts[1][mv_max + 1];
  1718. cpi->mb.mvsadcost[0] = &cpi->rd_costs.mvsadcosts[0][mvfp_max + 1];
  1719. cpi->mb.mvsadcost[1] = &cpi->rd_costs.mvsadcosts[1][mvfp_max + 1];
  1720. cal_mvsadcosts(cpi->mb.mvsadcost);
  1721. cpi->mb.mbmode_cost = cpi->rd_costs.mbmode_cost;
  1722. cpi->mb.intra_uv_mode_cost = cpi->rd_costs.intra_uv_mode_cost;
  1723. cpi->mb.bmode_costs = cpi->rd_costs.bmode_costs;
  1724. cpi->mb.inter_bmode_costs = cpi->rd_costs.inter_bmode_costs;
  1725. cpi->mb.token_costs = cpi->rd_costs.token_costs;
  1726. /* setup block ptrs & offsets */
  1727. vp8_setup_block_ptrs(&cpi->mb);
  1728. vp8_setup_block_dptrs(&cpi->mb.e_mbd);
  1729. return cpi;
  1730. }
  1731. void vp8_remove_compressor(VP8_COMP **comp) {
  1732. VP8_COMP *cpi = *comp;
  1733. if (!cpi) return;
  1734. if (cpi && (cpi->common.current_video_frame > 0)) {
  1735. #if !CONFIG_REALTIME_ONLY
  1736. if (cpi->pass == 2) {
  1737. vp8_end_second_pass(cpi);
  1738. }
  1739. #endif
  1740. #if CONFIG_INTERNAL_STATS
  1741. if (cpi->pass != 1) {
  1742. FILE *f = fopen("opsnr.stt", "a");
  1743. double time_encoded =
  1744. (cpi->last_end_time_stamp_seen - cpi->first_time_stamp_ever) /
  1745. 10000000.000;
  1746. double dr = (double)cpi->bytes * 8.0 / 1000.0 / time_encoded;
  1747. if (cpi->b_calculate_psnr) {
  1748. if (cpi->oxcf.number_of_layers > 1) {
  1749. int i;
  1750. fprintf(f,
  1751. "Layer\tBitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
  1752. "GLPsnrP\tVPXSSIM\n");
  1753. for (i = 0; i < (int)cpi->oxcf.number_of_layers; ++i) {
  1754. double dr =
  1755. (double)cpi->bytes_in_layer[i] * 8.0 / 1000.0 / time_encoded;
  1756. double samples = 3.0 / 2 * cpi->frames_in_layer[i] *
  1757. cpi->common.Width * cpi->common.Height;
  1758. double total_psnr =
  1759. vpx_sse_to_psnr(samples, 255.0, cpi->total_error2[i]);
  1760. double total_psnr2 =
  1761. vpx_sse_to_psnr(samples, 255.0, cpi->total_error2_p[i]);
  1762. double total_ssim =
  1763. 100 * pow(cpi->sum_ssim[i] / cpi->sum_weights[i], 8.0);
  1764. fprintf(f,
  1765. "%5d\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
  1766. "%7.3f\t%7.3f\n",
  1767. i, dr, cpi->sum_psnr[i] / cpi->frames_in_layer[i],
  1768. total_psnr, cpi->sum_psnr_p[i] / cpi->frames_in_layer[i],
  1769. total_psnr2, total_ssim);
  1770. }
  1771. } else {
  1772. double samples =
  1773. 3.0 / 2 * cpi->count * cpi->common.Width * cpi->common.Height;
  1774. double total_psnr =
  1775. vpx_sse_to_psnr(samples, 255.0, cpi->total_sq_error);
  1776. double total_psnr2 =
  1777. vpx_sse_to_psnr(samples, 255.0, cpi->total_sq_error2);
  1778. double total_ssim =
  1779. 100 * pow(cpi->summed_quality / cpi->summed_weights, 8.0);
  1780. fprintf(f,
  1781. "Bitrate\tAVGPsnr\tGLBPsnr\tAVPsnrP\t"
  1782. "GLPsnrP\tVPXSSIM\n");
  1783. fprintf(f,
  1784. "%7.3f\t%7.3f\t%7.3f\t%7.3f\t%7.3f\t"
  1785. "%7.3f\n",
  1786. dr, cpi->total / cpi->count, total_psnr,
  1787. cpi->totalp / cpi->count, total_psnr2, total_ssim);
  1788. }
  1789. }
  1790. fclose(f);
  1791. #if 0
  1792. f = fopen("qskip.stt", "a");
  1793. fprintf(f, "minq:%d -maxq:%d skiptrue:skipfalse = %d:%d\n", cpi->oxcf.best_allowed_q, cpi->oxcf.worst_allowed_q, skiptruecount, skipfalsecount);
  1794. fclose(f);
  1795. #endif
  1796. }
  1797. #endif
  1798. #ifdef SPEEDSTATS
  1799. if (cpi->compressor_speed == 2) {
  1800. int i;
  1801. FILE *f = fopen("cxspeed.stt", "a");
  1802. cnt_pm /= cpi->common.MBs;
  1803. for (i = 0; i < 16; ++i) fprintf(f, "%5d", frames_at_speed[i]);
  1804. fprintf(f, "\n");
  1805. fclose(f);
  1806. }
  1807. #endif
  1808. #ifdef MODE_STATS
  1809. {
  1810. extern int count_mb_seg[4];
  1811. FILE *f = fopen("modes.stt", "a");
  1812. double dr = (double)cpi->framerate * (double)bytes * (double)8 /
  1813. (double)count / (double)1000;
  1814. fprintf(f, "intra_mode in Intra Frames:\n");
  1815. fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d\n", y_modes[0], y_modes[1],
  1816. y_modes[2], y_modes[3], y_modes[4]);
  1817. fprintf(f, "UV:%8d, %8d, %8d, %8d\n", uv_modes[0], uv_modes[1],
  1818. uv_modes[2], uv_modes[3]);
  1819. fprintf(f, "B: ");
  1820. {
  1821. int i;
  1822. for (i = 0; i < 10; ++i) fprintf(f, "%8d, ", b_modes[i]);
  1823. fprintf(f, "\n");
  1824. }
  1825. fprintf(f, "Modes in Inter Frames:\n");
  1826. fprintf(f, "Y: %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d, %8d\n",
  1827. inter_y_modes[0], inter_y_modes[1], inter_y_modes[2],
  1828. inter_y_modes[3], inter_y_modes[4], inter_y_modes[5],
  1829. inter_y_modes[6], inter_y_modes[7], inter_y_modes[8],
  1830. inter_y_modes[9]);
  1831. fprintf(f, "UV:%8d, %8d, %8d, %8d\n", inter_uv_modes[0],
  1832. inter_uv_modes[1], inter_uv_modes[2], inter_uv_modes[3]);
  1833. fprintf(f, "B: ");
  1834. {
  1835. int i;
  1836. for (i = 0; i < 15; ++i) fprintf(f, "%8d, ", inter_b_modes[i]);
  1837. fprintf(f, "\n");
  1838. }
  1839. fprintf(f, "P:%8d, %8d, %8d, %8d\n", count_mb_seg[0], count_mb_seg[1],
  1840. count_mb_seg[2], count_mb_seg[3]);
  1841. fprintf(f, "PB:%8d, %8d, %8d, %8d\n", inter_b_modes[LEFT4X4],
  1842. inter_b_modes[ABOVE4X4], inter_b_modes[ZERO4X4],
  1843. inter_b_modes[NEW4X4]);
  1844. fclose(f);
  1845. }
  1846. #endif
  1847. #if defined(SECTIONBITS_OUTPUT)
  1848. if (0) {
  1849. int i;
  1850. FILE *f = fopen("tokenbits.stt", "a");
  1851. for (i = 0; i < 28; ++i) fprintf(f, "%8d", (int)(Sectionbits[i] / 256));
  1852. fprintf(f, "\n");
  1853. fclose(f);
  1854. }
  1855. #endif
  1856. #if 0
  1857. {
  1858. printf("\n_pick_loop_filter_level:%d\n", cpi->time_pick_lpf / 1000);
  1859. printf("\n_frames recive_data encod_mb_row compress_frame Total\n");
  1860. printf("%6d %10ld %10ld %10ld %10ld\n", cpi->common.current_video_frame, cpi->time_receive_data / 1000, cpi->time_encode_mb_row / 1000, cpi->time_compress_data / 1000, (cpi->time_receive_data + cpi->time_compress_data) / 1000);
  1861. }
  1862. #endif
  1863. }
  1864. #if CONFIG_MULTITHREAD
  1865. vp8cx_remove_encoder_threads(cpi);
  1866. #endif
  1867. #if CONFIG_TEMPORAL_DENOISING
  1868. vp8_denoiser_free(&cpi->denoiser);
  1869. #endif
  1870. dealloc_compressor_data(cpi);
  1871. vpx_free(cpi->mb.ss);
  1872. vpx_free(cpi->tok);
  1873. vpx_free(cpi->skin_map);
  1874. vpx_free(cpi->cyclic_refresh_map);
  1875. vpx_free(cpi->consec_zero_last);
  1876. vpx_free(cpi->consec_zero_last_mvbias);
  1877. vp8_remove_common(&cpi->common);
  1878. vpx_free(cpi);
  1879. *comp = 0;
  1880. #ifdef OUTPUT_YUV_SRC
  1881. fclose(yuv_file);
  1882. #endif
  1883. #ifdef OUTPUT_YUV_DENOISED
  1884. fclose(yuv_denoised_file);
  1885. #endif
  1886. #ifdef OUTPUT_YUV_SKINMAP
  1887. fclose(yuv_skinmap_file);
  1888. #endif
  1889. #if 0
  1890. if (keyfile)
  1891. fclose(keyfile);
  1892. if (framepsnr)
  1893. fclose(framepsnr);
  1894. if (kf_list)
  1895. fclose(kf_list);
  1896. #endif
  1897. }
  1898. static uint64_t calc_plane_error(unsigned char *orig, int orig_stride,
  1899. unsigned char *recon, int recon_stride,
  1900. unsigned int cols, unsigned int rows) {
  1901. unsigned int row, col;
  1902. uint64_t total_sse = 0;
  1903. int diff;
  1904. for (row = 0; row + 16 <= rows; row += 16) {
  1905. for (col = 0; col + 16 <= cols; col += 16) {
  1906. unsigned int sse;
  1907. vpx_mse16x16(orig + col, orig_stride, recon + col, recon_stride, &sse);
  1908. total_sse += sse;
  1909. }
  1910. /* Handle odd-sized width */
  1911. if (col < cols) {
  1912. unsigned int border_row, border_col;
  1913. unsigned char *border_orig = orig;
  1914. unsigned char *border_recon = recon;
  1915. for (border_row = 0; border_row < 16; ++border_row) {
  1916. for (border_col = col; border_col < cols; ++border_col) {
  1917. diff = border_orig[border_col] - border_recon[border_col];
  1918. total_sse += diff * diff;
  1919. }
  1920. border_orig += orig_stride;
  1921. border_recon += recon_stride;
  1922. }
  1923. }
  1924. orig += orig_stride * 16;
  1925. recon += recon_stride * 16;
  1926. }
  1927. /* Handle odd-sized height */
  1928. for (; row < rows; ++row) {
  1929. for (col = 0; col < cols; ++col) {
  1930. diff = orig[col] - recon[col];
  1931. total_sse += diff * diff;
  1932. }
  1933. orig += orig_stride;
  1934. recon += recon_stride;
  1935. }
  1936. vpx_clear_system_state();
  1937. return total_sse;
  1938. }
  1939. static void generate_psnr_packet(VP8_COMP *cpi) {
  1940. YV12_BUFFER_CONFIG *orig = cpi->Source;
  1941. YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
  1942. struct vpx_codec_cx_pkt pkt;
  1943. uint64_t sse;
  1944. int i;
  1945. unsigned int width = cpi->common.Width;
  1946. unsigned int height = cpi->common.Height;
  1947. pkt.kind = VPX_CODEC_PSNR_PKT;
  1948. sse = calc_plane_error(orig->y_buffer, orig->y_stride, recon->y_buffer,
  1949. recon->y_stride, width, height);
  1950. pkt.data.psnr.sse[0] = sse;
  1951. pkt.data.psnr.sse[1] = sse;
  1952. pkt.data.psnr.samples[0] = width * height;
  1953. pkt.data.psnr.samples[1] = width * height;
  1954. width = (width + 1) / 2;
  1955. height = (height + 1) / 2;
  1956. sse = calc_plane_error(orig->u_buffer, orig->uv_stride, recon->u_buffer,
  1957. recon->uv_stride, width, height);
  1958. pkt.data.psnr.sse[0] += sse;
  1959. pkt.data.psnr.sse[2] = sse;
  1960. pkt.data.psnr.samples[0] += width * height;
  1961. pkt.data.psnr.samples[2] = width * height;
  1962. sse = calc_plane_error(orig->v_buffer, orig->uv_stride, recon->v_buffer,
  1963. recon->uv_stride, width, height);
  1964. pkt.data.psnr.sse[0] += sse;
  1965. pkt.data.psnr.sse[3] = sse;
  1966. pkt.data.psnr.samples[0] += width * height;
  1967. pkt.data.psnr.samples[3] = width * height;
  1968. for (i = 0; i < 4; ++i) {
  1969. pkt.data.psnr.psnr[i] = vpx_sse_to_psnr(pkt.data.psnr.samples[i], 255.0,
  1970. (double)(pkt.data.psnr.sse[i]));
  1971. }
  1972. vpx_codec_pkt_list_add(cpi->output_pkt_list, &pkt);
  1973. }
  1974. int vp8_use_as_reference(VP8_COMP *cpi, int ref_frame_flags) {
  1975. if (ref_frame_flags > 7) return -1;
  1976. cpi->ref_frame_flags = ref_frame_flags;
  1977. return 0;
  1978. }
  1979. int vp8_update_reference(VP8_COMP *cpi, int ref_frame_flags) {
  1980. if (ref_frame_flags > 7) return -1;
  1981. cpi->common.refresh_golden_frame = 0;
  1982. cpi->common.refresh_alt_ref_frame = 0;
  1983. cpi->common.refresh_last_frame = 0;
  1984. if (ref_frame_flags & VP8_LAST_FRAME) cpi->common.refresh_last_frame = 1;
  1985. if (ref_frame_flags & VP8_GOLD_FRAME) cpi->common.refresh_golden_frame = 1;
  1986. if (ref_frame_flags & VP8_ALTR_FRAME) cpi->common.refresh_alt_ref_frame = 1;
  1987. cpi->ext_refresh_frame_flags_pending = 1;
  1988. return 0;
  1989. }
  1990. int vp8_get_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag,
  1991. YV12_BUFFER_CONFIG *sd) {
  1992. VP8_COMMON *cm = &cpi->common;
  1993. int ref_fb_idx;
  1994. if (ref_frame_flag == VP8_LAST_FRAME) {
  1995. ref_fb_idx = cm->lst_fb_idx;
  1996. } else if (ref_frame_flag == VP8_GOLD_FRAME) {
  1997. ref_fb_idx = cm->gld_fb_idx;
  1998. } else if (ref_frame_flag == VP8_ALTR_FRAME) {
  1999. ref_fb_idx = cm->alt_fb_idx;
  2000. } else {
  2001. return -1;
  2002. }
  2003. vp8_yv12_copy_frame(&cm->yv12_fb[ref_fb_idx], sd);
  2004. return 0;
  2005. }
  2006. int vp8_set_reference(VP8_COMP *cpi, enum vpx_ref_frame_type ref_frame_flag,
  2007. YV12_BUFFER_CONFIG *sd) {
  2008. VP8_COMMON *cm = &cpi->common;
  2009. int ref_fb_idx;
  2010. if (ref_frame_flag == VP8_LAST_FRAME) {
  2011. ref_fb_idx = cm->lst_fb_idx;
  2012. } else if (ref_frame_flag == VP8_GOLD_FRAME) {
  2013. ref_fb_idx = cm->gld_fb_idx;
  2014. } else if (ref_frame_flag == VP8_ALTR_FRAME) {
  2015. ref_fb_idx = cm->alt_fb_idx;
  2016. } else {
  2017. return -1;
  2018. }
  2019. vp8_yv12_copy_frame(sd, &cm->yv12_fb[ref_fb_idx]);
  2020. return 0;
  2021. }
  2022. int vp8_update_entropy(VP8_COMP *cpi, int update) {
  2023. VP8_COMMON *cm = &cpi->common;
  2024. cm->refresh_entropy_probs = update;
  2025. return 0;
  2026. }
  2027. static void scale_and_extend_source(YV12_BUFFER_CONFIG *sd, VP8_COMP *cpi) {
  2028. VP8_COMMON *cm = &cpi->common;
  2029. /* are we resizing the image */
  2030. if (cm->horiz_scale != 0 || cm->vert_scale != 0) {
  2031. #if CONFIG_SPATIAL_RESAMPLING
  2032. int hr, hs, vr, vs;
  2033. int tmp_height;
  2034. if (cm->vert_scale == 3) {
  2035. tmp_height = 9;
  2036. } else {
  2037. tmp_height = 11;
  2038. }
  2039. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  2040. Scale2Ratio(cm->vert_scale, &vr, &vs);
  2041. vpx_scale_frame(sd, &cpi->scaled_source, cm->temp_scale_frame.y_buffer,
  2042. tmp_height, hs, hr, vs, vr, 0);
  2043. vp8_yv12_extend_frame_borders(&cpi->scaled_source);
  2044. cpi->Source = &cpi->scaled_source;
  2045. #endif
  2046. } else {
  2047. cpi->Source = sd;
  2048. }
  2049. }
  2050. static int resize_key_frame(VP8_COMP *cpi) {
  2051. #if CONFIG_SPATIAL_RESAMPLING
  2052. VP8_COMMON *cm = &cpi->common;
  2053. /* Do we need to apply resampling for one pass cbr.
  2054. * In one pass this is more limited than in two pass cbr.
  2055. * The test and any change is only made once per key frame sequence.
  2056. */
  2057. if (cpi->oxcf.allow_spatial_resampling &&
  2058. (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) {
  2059. int hr, hs, vr, vs;
  2060. int new_width, new_height;
  2061. /* If we are below the resample DOWN watermark then scale down a
  2062. * notch.
  2063. */
  2064. if (cpi->buffer_level < (cpi->oxcf.resample_down_water_mark *
  2065. cpi->oxcf.optimal_buffer_level / 100)) {
  2066. cm->horiz_scale =
  2067. (cm->horiz_scale < ONETWO) ? cm->horiz_scale + 1 : ONETWO;
  2068. cm->vert_scale = (cm->vert_scale < ONETWO) ? cm->vert_scale + 1 : ONETWO;
  2069. }
  2070. /* Should we now start scaling back up */
  2071. else if (cpi->buffer_level > (cpi->oxcf.resample_up_water_mark *
  2072. cpi->oxcf.optimal_buffer_level / 100)) {
  2073. cm->horiz_scale =
  2074. (cm->horiz_scale > NORMAL) ? cm->horiz_scale - 1 : NORMAL;
  2075. cm->vert_scale = (cm->vert_scale > NORMAL) ? cm->vert_scale - 1 : NORMAL;
  2076. }
  2077. /* Get the new height and width */
  2078. Scale2Ratio(cm->horiz_scale, &hr, &hs);
  2079. Scale2Ratio(cm->vert_scale, &vr, &vs);
  2080. new_width = ((hs - 1) + (cpi->oxcf.Width * hr)) / hs;
  2081. new_height = ((vs - 1) + (cpi->oxcf.Height * vr)) / vs;
  2082. /* If the image size has changed we need to reallocate the buffers
  2083. * and resample the source image
  2084. */
  2085. if ((cm->Width != new_width) || (cm->Height != new_height)) {
  2086. cm->Width = new_width;
  2087. cm->Height = new_height;
  2088. vp8_alloc_compressor_data(cpi);
  2089. scale_and_extend_source(cpi->un_scaled_source, cpi);
  2090. return 1;
  2091. }
  2092. }
  2093. #endif
  2094. return 0;
  2095. }
  2096. static void update_alt_ref_frame_stats(VP8_COMP *cpi) {
  2097. VP8_COMMON *cm = &cpi->common;
  2098. /* Select an interval before next GF or altref */
  2099. if (!cpi->auto_gold) cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
  2100. if ((cpi->pass != 2) && cpi->frames_till_gf_update_due) {
  2101. cpi->current_gf_interval = cpi->frames_till_gf_update_due;
  2102. /* Set the bits per frame that we should try and recover in
  2103. * subsequent inter frames to account for the extra GF spend...
  2104. * note that his does not apply for GF updates that occur
  2105. * coincident with a key frame as the extra cost of key frames is
  2106. * dealt with elsewhere.
  2107. */
  2108. cpi->gf_overspend_bits += cpi->projected_frame_size;
  2109. cpi->non_gf_bitrate_adjustment =
  2110. cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
  2111. }
  2112. /* Update data structure that monitors level of reference to last GF */
  2113. memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
  2114. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  2115. /* this frame refreshes means next frames don't unless specified by user */
  2116. cpi->frames_since_golden = 0;
  2117. /* Clear the alternate reference update pending flag. */
  2118. cpi->source_alt_ref_pending = 0;
  2119. /* Set the alternate reference frame active flag */
  2120. cpi->source_alt_ref_active = 1;
  2121. }
  2122. static void update_golden_frame_stats(VP8_COMP *cpi) {
  2123. VP8_COMMON *cm = &cpi->common;
  2124. /* Update the Golden frame usage counts. */
  2125. if (cm->refresh_golden_frame) {
  2126. /* Select an interval before next GF */
  2127. if (!cpi->auto_gold) cpi->frames_till_gf_update_due = DEFAULT_GF_INTERVAL;
  2128. if ((cpi->pass != 2) && (cpi->frames_till_gf_update_due > 0)) {
  2129. cpi->current_gf_interval = cpi->frames_till_gf_update_due;
  2130. /* Set the bits per frame that we should try and recover in
  2131. * subsequent inter frames to account for the extra GF spend...
  2132. * note that his does not apply for GF updates that occur
  2133. * coincident with a key frame as the extra cost of key frames
  2134. * is dealt with elsewhere.
  2135. */
  2136. if ((cm->frame_type != KEY_FRAME) && !cpi->source_alt_ref_active) {
  2137. /* Calcluate GF bits to be recovered
  2138. * Projected size - av frame bits available for inter
  2139. * frames for clip as a whole
  2140. */
  2141. cpi->gf_overspend_bits +=
  2142. (cpi->projected_frame_size - cpi->inter_frame_target);
  2143. }
  2144. cpi->non_gf_bitrate_adjustment =
  2145. cpi->gf_overspend_bits / cpi->frames_till_gf_update_due;
  2146. }
  2147. /* Update data structure that monitors level of reference to last GF */
  2148. memset(cpi->gf_active_flags, 1, (cm->mb_rows * cm->mb_cols));
  2149. cpi->gf_active_count = cm->mb_rows * cm->mb_cols;
  2150. /* this frame refreshes means next frames don't unless specified by
  2151. * user
  2152. */
  2153. cm->refresh_golden_frame = 0;
  2154. cpi->frames_since_golden = 0;
  2155. cpi->recent_ref_frame_usage[INTRA_FRAME] = 1;
  2156. cpi->recent_ref_frame_usage[LAST_FRAME] = 1;
  2157. cpi->recent_ref_frame_usage[GOLDEN_FRAME] = 1;
  2158. cpi->recent_ref_frame_usage[ALTREF_FRAME] = 1;
  2159. /* ******** Fixed Q test code only ************ */
  2160. /* If we are going to use the ALT reference for the next group of
  2161. * frames set a flag to say so.
  2162. */
  2163. if (cpi->oxcf.fixed_q >= 0 && cpi->oxcf.play_alternate &&
  2164. !cpi->common.refresh_alt_ref_frame) {
  2165. cpi->source_alt_ref_pending = 1;
  2166. cpi->frames_till_gf_update_due = cpi->baseline_gf_interval;
  2167. }
  2168. if (!cpi->source_alt_ref_pending) cpi->source_alt_ref_active = 0;
  2169. /* Decrement count down till next gf */
  2170. if (cpi->frames_till_gf_update_due > 0) cpi->frames_till_gf_update_due--;
  2171. } else if (!cpi->common.refresh_alt_ref_frame) {
  2172. /* Decrement count down till next gf */
  2173. if (cpi->frames_till_gf_update_due > 0) cpi->frames_till_gf_update_due--;
  2174. if (cpi->frames_till_alt_ref_frame) cpi->frames_till_alt_ref_frame--;
  2175. cpi->frames_since_golden++;
  2176. if (cpi->frames_since_golden > 1) {
  2177. cpi->recent_ref_frame_usage[INTRA_FRAME] +=
  2178. cpi->mb.count_mb_ref_frame_usage[INTRA_FRAME];
  2179. cpi->recent_ref_frame_usage[LAST_FRAME] +=
  2180. cpi->mb.count_mb_ref_frame_usage[LAST_FRAME];
  2181. cpi->recent_ref_frame_usage[GOLDEN_FRAME] +=
  2182. cpi->mb.count_mb_ref_frame_usage[GOLDEN_FRAME];
  2183. cpi->recent_ref_frame_usage[ALTREF_FRAME] +=
  2184. cpi->mb.count_mb_ref_frame_usage[ALTREF_FRAME];
  2185. }
  2186. }
  2187. }
  2188. /* This function updates the reference frame probability estimates that
  2189. * will be used during mode selection
  2190. */
  2191. static void update_rd_ref_frame_probs(VP8_COMP *cpi) {
  2192. VP8_COMMON *cm = &cpi->common;
  2193. const int *const rfct = cpi->mb.count_mb_ref_frame_usage;
  2194. const int rf_intra = rfct[INTRA_FRAME];
  2195. const int rf_inter =
  2196. rfct[LAST_FRAME] + rfct[GOLDEN_FRAME] + rfct[ALTREF_FRAME];
  2197. if (cm->frame_type == KEY_FRAME) {
  2198. cpi->prob_intra_coded = 255;
  2199. cpi->prob_last_coded = 128;
  2200. cpi->prob_gf_coded = 128;
  2201. } else if (!(rf_intra + rf_inter)) {
  2202. cpi->prob_intra_coded = 63;
  2203. cpi->prob_last_coded = 128;
  2204. cpi->prob_gf_coded = 128;
  2205. }
  2206. /* update reference frame costs since we can do better than what we got
  2207. * last frame.
  2208. */
  2209. if (cpi->oxcf.number_of_layers == 1) {
  2210. if (cpi->common.refresh_alt_ref_frame) {
  2211. cpi->prob_intra_coded += 40;
  2212. if (cpi->prob_intra_coded > 255) cpi->prob_intra_coded = 255;
  2213. cpi->prob_last_coded = 200;
  2214. cpi->prob_gf_coded = 1;
  2215. } else if (cpi->frames_since_golden == 0) {
  2216. cpi->prob_last_coded = 214;
  2217. } else if (cpi->frames_since_golden == 1) {
  2218. cpi->prob_last_coded = 192;
  2219. cpi->prob_gf_coded = 220;
  2220. } else if (cpi->source_alt_ref_active) {
  2221. cpi->prob_gf_coded -= 20;
  2222. if (cpi->prob_gf_coded < 10) cpi->prob_gf_coded = 10;
  2223. }
  2224. if (!cpi->source_alt_ref_active) cpi->prob_gf_coded = 255;
  2225. }
  2226. }
  2227. #if !CONFIG_REALTIME_ONLY
  2228. /* 1 = key, 0 = inter */
  2229. static int decide_key_frame(VP8_COMP *cpi) {
  2230. VP8_COMMON *cm = &cpi->common;
  2231. int code_key_frame = 0;
  2232. cpi->kf_boost = 0;
  2233. if (cpi->Speed > 11) return 0;
  2234. /* Clear down mmx registers */
  2235. vpx_clear_system_state();
  2236. if ((cpi->compressor_speed == 2) && (cpi->Speed >= 5) && (cpi->sf.RD == 0)) {
  2237. double change = 1.0 *
  2238. abs((int)(cpi->mb.intra_error - cpi->last_intra_error)) /
  2239. (1 + cpi->last_intra_error);
  2240. double change2 =
  2241. 1.0 *
  2242. abs((int)(cpi->mb.prediction_error - cpi->last_prediction_error)) /
  2243. (1 + cpi->last_prediction_error);
  2244. double minerror = cm->MBs * 256;
  2245. cpi->last_intra_error = cpi->mb.intra_error;
  2246. cpi->last_prediction_error = cpi->mb.prediction_error;
  2247. if (10 * cpi->mb.intra_error / (1 + cpi->mb.prediction_error) < 15 &&
  2248. cpi->mb.prediction_error > minerror &&
  2249. (change > .25 || change2 > .25)) {
  2250. /*(change > 1.4 || change < .75)&& cpi->this_frame_percent_intra >
  2251. * cpi->last_frame_percent_intra + 3*/
  2252. return 1;
  2253. }
  2254. return 0;
  2255. }
  2256. /* If the following are true we might as well code a key frame */
  2257. if (((cpi->this_frame_percent_intra == 100) &&
  2258. (cpi->this_frame_percent_intra > (cpi->last_frame_percent_intra + 2))) ||
  2259. ((cpi->this_frame_percent_intra > 95) &&
  2260. (cpi->this_frame_percent_intra >=
  2261. (cpi->last_frame_percent_intra + 5)))) {
  2262. code_key_frame = 1;
  2263. }
  2264. /* in addition if the following are true and this is not a golden frame
  2265. * then code a key frame Note that on golden frames there often seems
  2266. * to be a pop in intra useage anyway hence this restriction is
  2267. * designed to prevent spurious key frames. The Intra pop needs to be
  2268. * investigated.
  2269. */
  2270. else if (((cpi->this_frame_percent_intra > 60) &&
  2271. (cpi->this_frame_percent_intra >
  2272. (cpi->last_frame_percent_intra * 2))) ||
  2273. ((cpi->this_frame_percent_intra > 75) &&
  2274. (cpi->this_frame_percent_intra >
  2275. (cpi->last_frame_percent_intra * 3 / 2))) ||
  2276. ((cpi->this_frame_percent_intra > 90) &&
  2277. (cpi->this_frame_percent_intra >
  2278. (cpi->last_frame_percent_intra + 10)))) {
  2279. if (!cm->refresh_golden_frame) code_key_frame = 1;
  2280. }
  2281. return code_key_frame;
  2282. }
  2283. static void Pass1Encode(VP8_COMP *cpi, size_t *size, unsigned char *dest,
  2284. unsigned int *frame_flags) {
  2285. (void)size;
  2286. (void)dest;
  2287. (void)frame_flags;
  2288. vp8_set_quantizer(cpi, 26);
  2289. vp8_first_pass(cpi);
  2290. }
  2291. #endif
  2292. #if 0
  2293. void write_cx_frame_to_file(YV12_BUFFER_CONFIG *frame, int this_frame)
  2294. {
  2295. /* write the frame */
  2296. FILE *yframe;
  2297. int i;
  2298. char filename[255];
  2299. sprintf(filename, "cx\\y%04d.raw", this_frame);
  2300. yframe = fopen(filename, "wb");
  2301. for (i = 0; i < frame->y_height; ++i)
  2302. fwrite(frame->y_buffer + i * frame->y_stride, frame->y_width, 1, yframe);
  2303. fclose(yframe);
  2304. sprintf(filename, "cx\\u%04d.raw", this_frame);
  2305. yframe = fopen(filename, "wb");
  2306. for (i = 0; i < frame->uv_height; ++i)
  2307. fwrite(frame->u_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
  2308. fclose(yframe);
  2309. sprintf(filename, "cx\\v%04d.raw", this_frame);
  2310. yframe = fopen(filename, "wb");
  2311. for (i = 0; i < frame->uv_height; ++i)
  2312. fwrite(frame->v_buffer + i * frame->uv_stride, frame->uv_width, 1, yframe);
  2313. fclose(yframe);
  2314. }
  2315. #endif
  2316. #if !CONFIG_REALTIME_ONLY
  2317. /* Function to test for conditions that indeicate we should loop
  2318. * back and recode a frame.
  2319. */
  2320. static int recode_loop_test(VP8_COMP *cpi, int high_limit, int low_limit, int q,
  2321. int maxq, int minq) {
  2322. int force_recode = 0;
  2323. VP8_COMMON *cm = &cpi->common;
  2324. /* Is frame recode allowed at all
  2325. * Yes if either recode mode 1 is selected or mode two is selcted
  2326. * and the frame is a key frame. golden frame or alt_ref_frame
  2327. */
  2328. if ((cpi->sf.recode_loop == 1) ||
  2329. ((cpi->sf.recode_loop == 2) &&
  2330. ((cm->frame_type == KEY_FRAME) || cm->refresh_golden_frame ||
  2331. cm->refresh_alt_ref_frame))) {
  2332. /* General over and under shoot tests */
  2333. if (((cpi->projected_frame_size > high_limit) && (q < maxq)) ||
  2334. ((cpi->projected_frame_size < low_limit) && (q > minq))) {
  2335. force_recode = 1;
  2336. }
  2337. /* Special Constrained quality tests */
  2338. else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) {
  2339. /* Undershoot and below auto cq level */
  2340. if ((q > cpi->cq_target_quality) &&
  2341. (cpi->projected_frame_size < ((cpi->this_frame_target * 7) >> 3))) {
  2342. force_recode = 1;
  2343. }
  2344. /* Severe undershoot and between auto and user cq level */
  2345. else if ((q > cpi->oxcf.cq_level) &&
  2346. (cpi->projected_frame_size < cpi->min_frame_bandwidth) &&
  2347. (cpi->active_best_quality > cpi->oxcf.cq_level)) {
  2348. force_recode = 1;
  2349. cpi->active_best_quality = cpi->oxcf.cq_level;
  2350. }
  2351. }
  2352. }
  2353. return force_recode;
  2354. }
  2355. #endif // !CONFIG_REALTIME_ONLY
  2356. static void update_reference_frames(VP8_COMP *cpi) {
  2357. VP8_COMMON *cm = &cpi->common;
  2358. YV12_BUFFER_CONFIG *yv12_fb = cm->yv12_fb;
  2359. /* At this point the new frame has been encoded.
  2360. * If any buffer copy / swapping is signaled it should be done here.
  2361. */
  2362. if (cm->frame_type == KEY_FRAME) {
  2363. yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME | VP8_ALTR_FRAME;
  2364. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2365. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2366. cm->alt_fb_idx = cm->gld_fb_idx = cm->new_fb_idx;
  2367. cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
  2368. cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
  2369. } else {
  2370. if (cm->refresh_alt_ref_frame) {
  2371. assert(!cm->copy_buffer_to_arf);
  2372. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_ALTR_FRAME;
  2373. cm->yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2374. cm->alt_fb_idx = cm->new_fb_idx;
  2375. cpi->current_ref_frames[ALTREF_FRAME] = cm->current_video_frame;
  2376. } else if (cm->copy_buffer_to_arf) {
  2377. assert(!(cm->copy_buffer_to_arf & ~0x3));
  2378. if (cm->copy_buffer_to_arf == 1) {
  2379. if (cm->alt_fb_idx != cm->lst_fb_idx) {
  2380. yv12_fb[cm->lst_fb_idx].flags |= VP8_ALTR_FRAME;
  2381. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2382. cm->alt_fb_idx = cm->lst_fb_idx;
  2383. cpi->current_ref_frames[ALTREF_FRAME] =
  2384. cpi->current_ref_frames[LAST_FRAME];
  2385. }
  2386. } else {
  2387. if (cm->alt_fb_idx != cm->gld_fb_idx) {
  2388. yv12_fb[cm->gld_fb_idx].flags |= VP8_ALTR_FRAME;
  2389. yv12_fb[cm->alt_fb_idx].flags &= ~VP8_ALTR_FRAME;
  2390. cm->alt_fb_idx = cm->gld_fb_idx;
  2391. cpi->current_ref_frames[ALTREF_FRAME] =
  2392. cpi->current_ref_frames[GOLDEN_FRAME];
  2393. }
  2394. }
  2395. }
  2396. if (cm->refresh_golden_frame) {
  2397. assert(!cm->copy_buffer_to_gf);
  2398. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_GOLD_FRAME;
  2399. cm->yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2400. cm->gld_fb_idx = cm->new_fb_idx;
  2401. cpi->current_ref_frames[GOLDEN_FRAME] = cm->current_video_frame;
  2402. } else if (cm->copy_buffer_to_gf) {
  2403. assert(!(cm->copy_buffer_to_arf & ~0x3));
  2404. if (cm->copy_buffer_to_gf == 1) {
  2405. if (cm->gld_fb_idx != cm->lst_fb_idx) {
  2406. yv12_fb[cm->lst_fb_idx].flags |= VP8_GOLD_FRAME;
  2407. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2408. cm->gld_fb_idx = cm->lst_fb_idx;
  2409. cpi->current_ref_frames[GOLDEN_FRAME] =
  2410. cpi->current_ref_frames[LAST_FRAME];
  2411. }
  2412. } else {
  2413. if (cm->alt_fb_idx != cm->gld_fb_idx) {
  2414. yv12_fb[cm->alt_fb_idx].flags |= VP8_GOLD_FRAME;
  2415. yv12_fb[cm->gld_fb_idx].flags &= ~VP8_GOLD_FRAME;
  2416. cm->gld_fb_idx = cm->alt_fb_idx;
  2417. cpi->current_ref_frames[GOLDEN_FRAME] =
  2418. cpi->current_ref_frames[ALTREF_FRAME];
  2419. }
  2420. }
  2421. }
  2422. }
  2423. if (cm->refresh_last_frame) {
  2424. cm->yv12_fb[cm->new_fb_idx].flags |= VP8_LAST_FRAME;
  2425. cm->yv12_fb[cm->lst_fb_idx].flags &= ~VP8_LAST_FRAME;
  2426. cm->lst_fb_idx = cm->new_fb_idx;
  2427. cpi->current_ref_frames[LAST_FRAME] = cm->current_video_frame;
  2428. }
  2429. #if CONFIG_TEMPORAL_DENOISING
  2430. if (cpi->oxcf.noise_sensitivity) {
  2431. /* we shouldn't have to keep multiple copies as we know in advance which
  2432. * buffer we should start - for now to get something up and running
  2433. * I've chosen to copy the buffers
  2434. */
  2435. if (cm->frame_type == KEY_FRAME) {
  2436. int i;
  2437. for (i = LAST_FRAME; i < MAX_REF_FRAMES; ++i)
  2438. vp8_yv12_copy_frame(cpi->Source, &cpi->denoiser.yv12_running_avg[i]);
  2439. } else {
  2440. vp8_yv12_extend_frame_borders(
  2441. &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
  2442. if (cm->refresh_alt_ref_frame || cm->copy_buffer_to_arf) {
  2443. vp8_yv12_copy_frame(&cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2444. &cpi->denoiser.yv12_running_avg[ALTREF_FRAME]);
  2445. }
  2446. if (cm->refresh_golden_frame || cm->copy_buffer_to_gf) {
  2447. vp8_yv12_copy_frame(&cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2448. &cpi->denoiser.yv12_running_avg[GOLDEN_FRAME]);
  2449. }
  2450. if (cm->refresh_last_frame) {
  2451. vp8_yv12_copy_frame(&cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2452. &cpi->denoiser.yv12_running_avg[LAST_FRAME]);
  2453. }
  2454. }
  2455. if (cpi->oxcf.noise_sensitivity == 4)
  2456. vp8_yv12_copy_frame(cpi->Source, &cpi->denoiser.yv12_last_source);
  2457. }
  2458. #endif
  2459. }
  2460. static int measure_square_diff_partial(YV12_BUFFER_CONFIG *source,
  2461. YV12_BUFFER_CONFIG *dest,
  2462. VP8_COMP *cpi) {
  2463. int i, j;
  2464. int Total = 0;
  2465. int num_blocks = 0;
  2466. int skip = 2;
  2467. int min_consec_zero_last = 10;
  2468. int tot_num_blocks = (source->y_height * source->y_width) >> 8;
  2469. unsigned char *src = source->y_buffer;
  2470. unsigned char *dst = dest->y_buffer;
  2471. /* Loop through the Y plane, every |skip| blocks along rows and colmumns,
  2472. * summing the square differences, and only for blocks that have been
  2473. * zero_last mode at least |x| frames in a row.
  2474. */
  2475. for (i = 0; i < source->y_height; i += 16 * skip) {
  2476. int block_index_row = (i >> 4) * cpi->common.mb_cols;
  2477. for (j = 0; j < source->y_width; j += 16 * skip) {
  2478. int index = block_index_row + (j >> 4);
  2479. if (cpi->consec_zero_last[index] >= min_consec_zero_last) {
  2480. unsigned int sse;
  2481. Total += vpx_mse16x16(src + j, source->y_stride, dst + j,
  2482. dest->y_stride, &sse);
  2483. num_blocks++;
  2484. }
  2485. }
  2486. src += 16 * skip * source->y_stride;
  2487. dst += 16 * skip * dest->y_stride;
  2488. }
  2489. // Only return non-zero if we have at least ~1/16 samples for estimate.
  2490. if (num_blocks > (tot_num_blocks >> 4)) {
  2491. assert(num_blocks != 0);
  2492. return num_blocks ? (Total / num_blocks) : 0;
  2493. } else {
  2494. return 0;
  2495. }
  2496. }
  2497. #if CONFIG_TEMPORAL_DENOISING
  2498. static void process_denoiser_mode_change(VP8_COMP *cpi) {
  2499. const VP8_COMMON *const cm = &cpi->common;
  2500. int i, j;
  2501. int total = 0;
  2502. int num_blocks = 0;
  2503. // Number of blocks skipped along row/column in computing the
  2504. // nmse (normalized mean square error) of source.
  2505. int skip = 2;
  2506. // Only select blocks for computing nmse that have been encoded
  2507. // as ZERO LAST min_consec_zero_last frames in a row.
  2508. // Scale with number of temporal layers.
  2509. int min_consec_zero_last = 12 / cpi->oxcf.number_of_layers;
  2510. // Decision is tested for changing the denoising mode every
  2511. // num_mode_change times this function is called. Note that this
  2512. // function called every 8 frames, so (8 * num_mode_change) is number
  2513. // of frames where denoising mode change is tested for switch.
  2514. int num_mode_change = 20;
  2515. // Framerate factor, to compensate for larger mse at lower framerates.
  2516. // Use ref_framerate, which is full source framerate for temporal layers.
  2517. // TODO(marpan): Adjust this factor.
  2518. int fac_framerate = cpi->ref_framerate < 25.0f ? 80 : 100;
  2519. int tot_num_blocks = cm->mb_rows * cm->mb_cols;
  2520. int ystride = cpi->Source->y_stride;
  2521. unsigned char *src = cpi->Source->y_buffer;
  2522. unsigned char *dst = cpi->denoiser.yv12_last_source.y_buffer;
  2523. static const unsigned char const_source[16] = { 128, 128, 128, 128, 128, 128,
  2524. 128, 128, 128, 128, 128, 128,
  2525. 128, 128, 128, 128 };
  2526. int bandwidth = (int)(cpi->target_bandwidth);
  2527. // For temporal layers, use full bandwidth (top layer).
  2528. if (cpi->oxcf.number_of_layers > 1) {
  2529. LAYER_CONTEXT *lc = &cpi->layer_context[cpi->oxcf.number_of_layers - 1];
  2530. bandwidth = (int)(lc->target_bandwidth);
  2531. }
  2532. // Loop through the Y plane, every skip blocks along rows and columns,
  2533. // summing the normalized mean square error, only for blocks that have
  2534. // been encoded as ZEROMV LAST at least min_consec_zero_last least frames in
  2535. // a row and have small sum difference between current and previous frame.
  2536. // Normalization here is by the contrast of the current frame block.
  2537. for (i = 0; i < cm->Height; i += 16 * skip) {
  2538. int block_index_row = (i >> 4) * cm->mb_cols;
  2539. for (j = 0; j < cm->Width; j += 16 * skip) {
  2540. int index = block_index_row + (j >> 4);
  2541. if (cpi->consec_zero_last[index] >= min_consec_zero_last) {
  2542. unsigned int sse;
  2543. const unsigned int var =
  2544. vpx_variance16x16(src + j, ystride, dst + j, ystride, &sse);
  2545. // Only consider this block as valid for noise measurement
  2546. // if the sum_diff average of the current and previous frame
  2547. // is small (to avoid effects from lighting change).
  2548. if ((sse - var) < 128) {
  2549. unsigned int sse2;
  2550. const unsigned int act =
  2551. vpx_variance16x16(src + j, ystride, const_source, 0, &sse2);
  2552. if (act > 0) total += sse / act;
  2553. num_blocks++;
  2554. }
  2555. }
  2556. }
  2557. src += 16 * skip * ystride;
  2558. dst += 16 * skip * ystride;
  2559. }
  2560. total = total * fac_framerate / 100;
  2561. // Only consider this frame as valid sample if we have computed nmse over
  2562. // at least ~1/16 blocks, and Total > 0 (Total == 0 can happen if the
  2563. // application inputs duplicate frames, or contrast is all zero).
  2564. if (total > 0 && (num_blocks > (tot_num_blocks >> 4))) {
  2565. // Update the recursive mean square source_diff.
  2566. total = (total << 8) / num_blocks;
  2567. if (cpi->denoiser.nmse_source_diff_count == 0) {
  2568. // First sample in new interval.
  2569. cpi->denoiser.nmse_source_diff = total;
  2570. cpi->denoiser.qp_avg = cm->base_qindex;
  2571. } else {
  2572. // For subsequent samples, use average with weight ~1/4 for new sample.
  2573. cpi->denoiser.nmse_source_diff =
  2574. (int)((total + 3 * cpi->denoiser.nmse_source_diff) >> 2);
  2575. cpi->denoiser.qp_avg =
  2576. (int)((cm->base_qindex + 3 * cpi->denoiser.qp_avg) >> 2);
  2577. }
  2578. cpi->denoiser.nmse_source_diff_count++;
  2579. }
  2580. // Check for changing the denoiser mode, when we have obtained #samples =
  2581. // num_mode_change. Condition the change also on the bitrate and QP.
  2582. if (cpi->denoiser.nmse_source_diff_count == num_mode_change) {
  2583. // Check for going up: from normal to aggressive mode.
  2584. if ((cpi->denoiser.denoiser_mode == kDenoiserOnYUV) &&
  2585. (cpi->denoiser.nmse_source_diff >
  2586. cpi->denoiser.threshold_aggressive_mode) &&
  2587. (cpi->denoiser.qp_avg < cpi->denoiser.qp_threshold_up &&
  2588. bandwidth > cpi->denoiser.bitrate_threshold)) {
  2589. vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUVAggressive);
  2590. } else {
  2591. // Check for going down: from aggressive to normal mode.
  2592. if (((cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) &&
  2593. (cpi->denoiser.nmse_source_diff <
  2594. cpi->denoiser.threshold_aggressive_mode)) ||
  2595. ((cpi->denoiser.denoiser_mode == kDenoiserOnYUVAggressive) &&
  2596. (cpi->denoiser.qp_avg > cpi->denoiser.qp_threshold_down ||
  2597. bandwidth < cpi->denoiser.bitrate_threshold))) {
  2598. vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUV);
  2599. }
  2600. }
  2601. // Reset metric and counter for next interval.
  2602. cpi->denoiser.nmse_source_diff = 0;
  2603. cpi->denoiser.qp_avg = 0;
  2604. cpi->denoiser.nmse_source_diff_count = 0;
  2605. }
  2606. }
  2607. #endif
  2608. void vp8_loopfilter_frame(VP8_COMP *cpi, VP8_COMMON *cm) {
  2609. const FRAME_TYPE frame_type = cm->frame_type;
  2610. int update_any_ref_buffers = 1;
  2611. if (cpi->common.refresh_last_frame == 0 &&
  2612. cpi->common.refresh_golden_frame == 0 &&
  2613. cpi->common.refresh_alt_ref_frame == 0) {
  2614. update_any_ref_buffers = 0;
  2615. }
  2616. if (cm->no_lpf) {
  2617. cm->filter_level = 0;
  2618. } else {
  2619. struct vpx_usec_timer timer;
  2620. vpx_clear_system_state();
  2621. vpx_usec_timer_start(&timer);
  2622. if (cpi->sf.auto_filter == 0) {
  2623. #if CONFIG_TEMPORAL_DENOISING
  2624. if (cpi->oxcf.noise_sensitivity && cm->frame_type != KEY_FRAME) {
  2625. // Use the denoised buffer for selecting base loop filter level.
  2626. // Denoised signal for current frame is stored in INTRA_FRAME.
  2627. // No denoising on key frames.
  2628. vp8cx_pick_filter_level_fast(
  2629. &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi);
  2630. } else {
  2631. vp8cx_pick_filter_level_fast(cpi->Source, cpi);
  2632. }
  2633. #else
  2634. vp8cx_pick_filter_level_fast(cpi->Source, cpi);
  2635. #endif
  2636. } else {
  2637. #if CONFIG_TEMPORAL_DENOISING
  2638. if (cpi->oxcf.noise_sensitivity && cm->frame_type != KEY_FRAME) {
  2639. // Use the denoised buffer for selecting base loop filter level.
  2640. // Denoised signal for current frame is stored in INTRA_FRAME.
  2641. // No denoising on key frames.
  2642. vp8cx_pick_filter_level(&cpi->denoiser.yv12_running_avg[INTRA_FRAME],
  2643. cpi);
  2644. } else {
  2645. vp8cx_pick_filter_level(cpi->Source, cpi);
  2646. }
  2647. #else
  2648. vp8cx_pick_filter_level(cpi->Source, cpi);
  2649. #endif
  2650. }
  2651. if (cm->filter_level > 0) {
  2652. vp8cx_set_alt_lf_level(cpi, cm->filter_level);
  2653. }
  2654. vpx_usec_timer_mark(&timer);
  2655. cpi->time_pick_lpf += vpx_usec_timer_elapsed(&timer);
  2656. }
  2657. #if CONFIG_MULTITHREAD
  2658. if (vpx_atomic_load_acquire(&cpi->b_multi_threaded)) {
  2659. sem_post(&cpi->h_event_end_lpf); /* signal that we have set filter_level */
  2660. }
  2661. #endif
  2662. // No need to apply loop-filter if the encoded frame does not update
  2663. // any reference buffers.
  2664. if (cm->filter_level > 0 && update_any_ref_buffers) {
  2665. vp8_loop_filter_frame(cm, &cpi->mb.e_mbd, frame_type);
  2666. }
  2667. vp8_yv12_extend_frame_borders(cm->frame_to_show);
  2668. }
  2669. static void encode_frame_to_data_rate(VP8_COMP *cpi, size_t *size,
  2670. unsigned char *dest,
  2671. unsigned char *dest_end,
  2672. unsigned int *frame_flags) {
  2673. int Q;
  2674. int frame_over_shoot_limit;
  2675. int frame_under_shoot_limit;
  2676. int Loop = 0;
  2677. int loop_count;
  2678. VP8_COMMON *cm = &cpi->common;
  2679. int active_worst_qchanged = 0;
  2680. #if !CONFIG_REALTIME_ONLY
  2681. int q_low;
  2682. int q_high;
  2683. int zbin_oq_high;
  2684. int zbin_oq_low = 0;
  2685. int top_index;
  2686. int bottom_index;
  2687. int overshoot_seen = 0;
  2688. int undershoot_seen = 0;
  2689. #endif
  2690. int drop_mark = (int)(cpi->oxcf.drop_frames_water_mark *
  2691. cpi->oxcf.optimal_buffer_level / 100);
  2692. int drop_mark75 = drop_mark * 2 / 3;
  2693. int drop_mark50 = drop_mark / 4;
  2694. int drop_mark25 = drop_mark / 8;
  2695. /* Clear down mmx registers to allow floating point in what follows */
  2696. vpx_clear_system_state();
  2697. if (cpi->force_next_frame_intra) {
  2698. cm->frame_type = KEY_FRAME; /* delayed intra frame */
  2699. cpi->force_next_frame_intra = 0;
  2700. }
  2701. /* For an alt ref frame in 2 pass we skip the call to the second pass
  2702. * function that sets the target bandwidth
  2703. */
  2704. switch (cpi->pass) {
  2705. #if !CONFIG_REALTIME_ONLY
  2706. case 2:
  2707. if (cpi->common.refresh_alt_ref_frame) {
  2708. /* Per frame bit target for the alt ref frame */
  2709. cpi->per_frame_bandwidth = cpi->twopass.gf_bits;
  2710. /* per second target bitrate */
  2711. cpi->target_bandwidth =
  2712. (int)(cpi->twopass.gf_bits * cpi->output_framerate);
  2713. }
  2714. break;
  2715. #endif // !CONFIG_REALTIME_ONLY
  2716. default:
  2717. cpi->per_frame_bandwidth =
  2718. (int)(cpi->target_bandwidth / cpi->output_framerate);
  2719. break;
  2720. }
  2721. /* Default turn off buffer to buffer copying */
  2722. cm->copy_buffer_to_gf = 0;
  2723. cm->copy_buffer_to_arf = 0;
  2724. /* Clear zbin over-quant value and mode boost values. */
  2725. cpi->mb.zbin_over_quant = 0;
  2726. cpi->mb.zbin_mode_boost = 0;
  2727. /* Enable or disable mode based tweaking of the zbin
  2728. * For 2 Pass Only used where GF/ARF prediction quality
  2729. * is above a threshold
  2730. */
  2731. cpi->mb.zbin_mode_boost_enabled = 1;
  2732. if (cpi->pass == 2) {
  2733. if (cpi->gfu_boost <= 400) {
  2734. cpi->mb.zbin_mode_boost_enabled = 0;
  2735. }
  2736. }
  2737. /* Current default encoder behaviour for the altref sign bias */
  2738. if (cpi->source_alt_ref_active) {
  2739. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 1;
  2740. } else {
  2741. cpi->common.ref_frame_sign_bias[ALTREF_FRAME] = 0;
  2742. }
  2743. /* Check to see if a key frame is signaled
  2744. * For two pass with auto key frame enabled cm->frame_type may already
  2745. * be set, but not for one pass.
  2746. */
  2747. if ((cm->current_video_frame == 0) || (cm->frame_flags & FRAMEFLAGS_KEY) ||
  2748. (cpi->oxcf.auto_key &&
  2749. (cpi->frames_since_key % cpi->key_frame_frequency == 0))) {
  2750. /* Key frame from VFW/auto-keyframe/first frame */
  2751. cm->frame_type = KEY_FRAME;
  2752. #if CONFIG_TEMPORAL_DENOISING
  2753. if (cpi->oxcf.noise_sensitivity == 4) {
  2754. // For adaptive mode, reset denoiser to normal mode on key frame.
  2755. vp8_denoiser_set_parameters(&cpi->denoiser, kDenoiserOnYUV);
  2756. }
  2757. #endif
  2758. }
  2759. #if CONFIG_MULTI_RES_ENCODING
  2760. if (cpi->oxcf.mr_total_resolutions > 1) {
  2761. LOWER_RES_FRAME_INFO *low_res_frame_info =
  2762. (LOWER_RES_FRAME_INFO *)cpi->oxcf.mr_low_res_mode_info;
  2763. if (cpi->oxcf.mr_encoder_id) {
  2764. // Check if lower resolution is available for motion vector reuse.
  2765. if (cm->frame_type != KEY_FRAME) {
  2766. cpi->mr_low_res_mv_avail = 1;
  2767. cpi->mr_low_res_mv_avail &= !(low_res_frame_info->is_frame_dropped);
  2768. if (cpi->ref_frame_flags & VP8_LAST_FRAME)
  2769. cpi->mr_low_res_mv_avail &=
  2770. (cpi->current_ref_frames[LAST_FRAME] ==
  2771. low_res_frame_info->low_res_ref_frames[LAST_FRAME]);
  2772. if (cpi->ref_frame_flags & VP8_GOLD_FRAME)
  2773. cpi->mr_low_res_mv_avail &=
  2774. (cpi->current_ref_frames[GOLDEN_FRAME] ==
  2775. low_res_frame_info->low_res_ref_frames[GOLDEN_FRAME]);
  2776. // Don't use altref to determine whether low res is available.
  2777. // TODO (marpan): Should we make this type of condition on a
  2778. // per-reference frame basis?
  2779. /*
  2780. if (cpi->ref_frame_flags & VP8_ALTR_FRAME)
  2781. cpi->mr_low_res_mv_avail &= (cpi->current_ref_frames[ALTREF_FRAME]
  2782. == low_res_frame_info->low_res_ref_frames[ALTREF_FRAME]);
  2783. */
  2784. }
  2785. // Disable motion vector reuse (i.e., disable any usage of the low_res)
  2786. // if the previous lower stream is skipped/disabled.
  2787. if (low_res_frame_info->skip_encoding_prev_stream) {
  2788. cpi->mr_low_res_mv_avail = 0;
  2789. }
  2790. }
  2791. // This stream is not skipped (i.e., it's being encoded), so set this skip
  2792. // flag to 0. This is needed for the next stream (i.e., which is the next
  2793. // frame to be encoded).
  2794. low_res_frame_info->skip_encoding_prev_stream = 0;
  2795. // On a key frame: For the lowest resolution, keep track of the key frame
  2796. // counter value. For the higher resolutions, reset the current video
  2797. // frame counter to that of the lowest resolution.
  2798. // This is done to the handle the case where we may stop/start encoding
  2799. // higher layer(s). The restart-encoding of higher layer is only signaled
  2800. // by a key frame for now.
  2801. // TODO (marpan): Add flag to indicate restart-encoding of higher layer.
  2802. if (cm->frame_type == KEY_FRAME) {
  2803. if (cpi->oxcf.mr_encoder_id) {
  2804. // If the initial starting value of the buffer level is zero (this can
  2805. // happen because we may have not started encoding this higher stream),
  2806. // then reset it to non-zero value based on |starting_buffer_level|.
  2807. if (cpi->common.current_video_frame == 0 && cpi->buffer_level == 0) {
  2808. unsigned int i;
  2809. cpi->bits_off_target = cpi->oxcf.starting_buffer_level;
  2810. cpi->buffer_level = cpi->oxcf.starting_buffer_level;
  2811. for (i = 0; i < cpi->oxcf.number_of_layers; ++i) {
  2812. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  2813. lc->bits_off_target = lc->starting_buffer_level;
  2814. lc->buffer_level = lc->starting_buffer_level;
  2815. }
  2816. }
  2817. cpi->common.current_video_frame =
  2818. low_res_frame_info->key_frame_counter_value;
  2819. } else {
  2820. low_res_frame_info->key_frame_counter_value =
  2821. cpi->common.current_video_frame;
  2822. }
  2823. }
  2824. }
  2825. #endif
  2826. // Find the reference frame closest to the current frame.
  2827. cpi->closest_reference_frame = LAST_FRAME;
  2828. if (cm->frame_type != KEY_FRAME) {
  2829. int i;
  2830. MV_REFERENCE_FRAME closest_ref = INTRA_FRAME;
  2831. if (cpi->ref_frame_flags & VP8_LAST_FRAME) {
  2832. closest_ref = LAST_FRAME;
  2833. } else if (cpi->ref_frame_flags & VP8_GOLD_FRAME) {
  2834. closest_ref = GOLDEN_FRAME;
  2835. } else if (cpi->ref_frame_flags & VP8_ALTR_FRAME) {
  2836. closest_ref = ALTREF_FRAME;
  2837. }
  2838. for (i = 1; i <= 3; ++i) {
  2839. vpx_ref_frame_type_t ref_frame_type =
  2840. (vpx_ref_frame_type_t)((i == 3) ? 4 : i);
  2841. if (cpi->ref_frame_flags & ref_frame_type) {
  2842. if ((cm->current_video_frame - cpi->current_ref_frames[i]) <
  2843. (cm->current_video_frame - cpi->current_ref_frames[closest_ref])) {
  2844. closest_ref = i;
  2845. }
  2846. }
  2847. }
  2848. cpi->closest_reference_frame = closest_ref;
  2849. }
  2850. /* Set various flags etc to special state if it is a key frame */
  2851. if (cm->frame_type == KEY_FRAME) {
  2852. int i;
  2853. // Set the loop filter deltas and segmentation map update
  2854. setup_features(cpi);
  2855. /* The alternate reference frame cannot be active for a key frame */
  2856. cpi->source_alt_ref_active = 0;
  2857. /* Reset the RD threshold multipliers to default of * 1 (128) */
  2858. for (i = 0; i < MAX_MODES; ++i) {
  2859. cpi->mb.rd_thresh_mult[i] = 128;
  2860. }
  2861. // Reset the zero_last counter to 0 on key frame.
  2862. memset(cpi->consec_zero_last, 0, cm->mb_rows * cm->mb_cols);
  2863. memset(cpi->consec_zero_last_mvbias, 0,
  2864. (cpi->common.mb_rows * cpi->common.mb_cols));
  2865. }
  2866. #if 0
  2867. /* Experimental code for lagged compress and one pass
  2868. * Initialise one_pass GF frames stats
  2869. * Update stats used for GF selection
  2870. */
  2871. {
  2872. cpi->one_pass_frame_index = cm->current_video_frame % MAX_LAG_BUFFERS;
  2873. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frames_so_far = 0;
  2874. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_intra_error = 0.0;
  2875. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_coded_error = 0.0;
  2876. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_inter = 0.0;
  2877. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_pcnt_motion = 0.0;
  2878. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr = 0.0;
  2879. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvr_abs = 0.0;
  2880. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc = 0.0;
  2881. cpi->one_pass_frame_stats[cpi->one_pass_frame_index ].frame_mvc_abs = 0.0;
  2882. }
  2883. #endif
  2884. update_rd_ref_frame_probs(cpi);
  2885. if (cpi->drop_frames_allowed) {
  2886. /* The reset to decimation 0 is only done here for one pass.
  2887. * Once it is set two pass leaves decimation on till the next kf.
  2888. */
  2889. if ((cpi->buffer_level > drop_mark) && (cpi->decimation_factor > 0)) {
  2890. cpi->decimation_factor--;
  2891. }
  2892. if (cpi->buffer_level > drop_mark75 && cpi->decimation_factor > 0) {
  2893. cpi->decimation_factor = 1;
  2894. } else if (cpi->buffer_level < drop_mark25 &&
  2895. (cpi->decimation_factor == 2 || cpi->decimation_factor == 3)) {
  2896. cpi->decimation_factor = 3;
  2897. } else if (cpi->buffer_level < drop_mark50 &&
  2898. (cpi->decimation_factor == 1 || cpi->decimation_factor == 2)) {
  2899. cpi->decimation_factor = 2;
  2900. } else if (cpi->buffer_level < drop_mark75 &&
  2901. (cpi->decimation_factor == 0 || cpi->decimation_factor == 1)) {
  2902. cpi->decimation_factor = 1;
  2903. }
  2904. }
  2905. /* The following decimates the frame rate according to a regular
  2906. * pattern (i.e. to 1/2 or 2/3 frame rate) This can be used to help
  2907. * prevent buffer under-run in CBR mode. Alternatively it might be
  2908. * desirable in some situations to drop frame rate but throw more bits
  2909. * at each frame.
  2910. *
  2911. * Note that dropping a key frame can be problematic if spatial
  2912. * resampling is also active
  2913. */
  2914. if (cpi->decimation_factor > 0) {
  2915. switch (cpi->decimation_factor) {
  2916. case 1:
  2917. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 3 / 2;
  2918. break;
  2919. case 2:
  2920. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
  2921. break;
  2922. case 3:
  2923. cpi->per_frame_bandwidth = cpi->per_frame_bandwidth * 5 / 4;
  2924. break;
  2925. }
  2926. /* Note that we should not throw out a key frame (especially when
  2927. * spatial resampling is enabled).
  2928. */
  2929. if (cm->frame_type == KEY_FRAME) {
  2930. cpi->decimation_count = cpi->decimation_factor;
  2931. } else if (cpi->decimation_count > 0) {
  2932. cpi->decimation_count--;
  2933. cpi->bits_off_target += cpi->av_per_frame_bandwidth;
  2934. if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
  2935. cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
  2936. }
  2937. #if CONFIG_MULTI_RES_ENCODING
  2938. vp8_store_drop_frame_info(cpi);
  2939. #endif
  2940. cm->current_video_frame++;
  2941. cpi->frames_since_key++;
  2942. cpi->ext_refresh_frame_flags_pending = 0;
  2943. // We advance the temporal pattern for dropped frames.
  2944. cpi->temporal_pattern_counter++;
  2945. #if CONFIG_INTERNAL_STATS
  2946. cpi->count++;
  2947. #endif
  2948. cpi->buffer_level = cpi->bits_off_target;
  2949. if (cpi->oxcf.number_of_layers > 1) {
  2950. unsigned int i;
  2951. /* Propagate bits saved by dropping the frame to higher
  2952. * layers
  2953. */
  2954. for (i = cpi->current_layer + 1; i < cpi->oxcf.number_of_layers; ++i) {
  2955. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  2956. lc->bits_off_target += (int)(lc->target_bandwidth / lc->framerate);
  2957. if (lc->bits_off_target > lc->maximum_buffer_size) {
  2958. lc->bits_off_target = lc->maximum_buffer_size;
  2959. }
  2960. lc->buffer_level = lc->bits_off_target;
  2961. }
  2962. }
  2963. return;
  2964. } else {
  2965. cpi->decimation_count = cpi->decimation_factor;
  2966. }
  2967. } else {
  2968. cpi->decimation_count = 0;
  2969. }
  2970. /* Decide how big to make the frame */
  2971. if (!vp8_pick_frame_size(cpi)) {
  2972. /*TODO: 2 drop_frame and return code could be put together. */
  2973. #if CONFIG_MULTI_RES_ENCODING
  2974. vp8_store_drop_frame_info(cpi);
  2975. #endif
  2976. cm->current_video_frame++;
  2977. cpi->frames_since_key++;
  2978. cpi->ext_refresh_frame_flags_pending = 0;
  2979. // We advance the temporal pattern for dropped frames.
  2980. cpi->temporal_pattern_counter++;
  2981. return;
  2982. }
  2983. /* Reduce active_worst_allowed_q for CBR if our buffer is getting too full.
  2984. * This has a knock on effect on active best quality as well.
  2985. * For CBR if the buffer reaches its maximum level then we can no longer
  2986. * save up bits for later frames so we might as well use them up
  2987. * on the current frame.
  2988. */
  2989. if ((cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) &&
  2990. (cpi->buffer_level >= cpi->oxcf.optimal_buffer_level) &&
  2991. cpi->buffered_mode) {
  2992. /* Max adjustment is 1/4 */
  2993. int Adjustment = cpi->active_worst_quality / 4;
  2994. if (Adjustment) {
  2995. int buff_lvl_step;
  2996. if (cpi->buffer_level < cpi->oxcf.maximum_buffer_size) {
  2997. buff_lvl_step = (int)((cpi->oxcf.maximum_buffer_size -
  2998. cpi->oxcf.optimal_buffer_level) /
  2999. Adjustment);
  3000. if (buff_lvl_step) {
  3001. Adjustment =
  3002. (int)((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) /
  3003. buff_lvl_step);
  3004. } else {
  3005. Adjustment = 0;
  3006. }
  3007. }
  3008. cpi->active_worst_quality -= Adjustment;
  3009. if (cpi->active_worst_quality < cpi->active_best_quality) {
  3010. cpi->active_worst_quality = cpi->active_best_quality;
  3011. }
  3012. }
  3013. }
  3014. /* Set an active best quality and if necessary active worst quality
  3015. * There is some odd behavior for one pass here that needs attention.
  3016. */
  3017. if ((cpi->pass == 2) || (cpi->ni_frames > 150)) {
  3018. vpx_clear_system_state();
  3019. Q = cpi->active_worst_quality;
  3020. if (cm->frame_type == KEY_FRAME) {
  3021. if (cpi->pass == 2) {
  3022. if (cpi->gfu_boost > 600) {
  3023. cpi->active_best_quality = kf_low_motion_minq[Q];
  3024. } else {
  3025. cpi->active_best_quality = kf_high_motion_minq[Q];
  3026. }
  3027. /* Special case for key frames forced because we have reached
  3028. * the maximum key frame interval. Here force the Q to a range
  3029. * based on the ambient Q to reduce the risk of popping
  3030. */
  3031. if (cpi->this_key_frame_forced) {
  3032. if (cpi->active_best_quality > cpi->avg_frame_qindex * 7 / 8) {
  3033. cpi->active_best_quality = cpi->avg_frame_qindex * 7 / 8;
  3034. } else if (cpi->active_best_quality<cpi->avg_frame_qindex>> 2) {
  3035. cpi->active_best_quality = cpi->avg_frame_qindex >> 2;
  3036. }
  3037. }
  3038. }
  3039. /* One pass more conservative */
  3040. else {
  3041. cpi->active_best_quality = kf_high_motion_minq[Q];
  3042. }
  3043. }
  3044. else if (cpi->oxcf.number_of_layers == 1 &&
  3045. (cm->refresh_golden_frame || cpi->common.refresh_alt_ref_frame)) {
  3046. /* Use the lower of cpi->active_worst_quality and recent
  3047. * average Q as basis for GF/ARF Q limit unless last frame was
  3048. * a key frame.
  3049. */
  3050. if ((cpi->frames_since_key > 1) &&
  3051. (cpi->avg_frame_qindex < cpi->active_worst_quality)) {
  3052. Q = cpi->avg_frame_qindex;
  3053. }
  3054. /* For constrained quality dont allow Q less than the cq level */
  3055. if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3056. (Q < cpi->cq_target_quality)) {
  3057. Q = cpi->cq_target_quality;
  3058. }
  3059. if (cpi->pass == 2) {
  3060. if (cpi->gfu_boost > 1000) {
  3061. cpi->active_best_quality = gf_low_motion_minq[Q];
  3062. } else if (cpi->gfu_boost < 400) {
  3063. cpi->active_best_quality = gf_high_motion_minq[Q];
  3064. } else {
  3065. cpi->active_best_quality = gf_mid_motion_minq[Q];
  3066. }
  3067. /* Constrained quality use slightly lower active best. */
  3068. if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) {
  3069. cpi->active_best_quality = cpi->active_best_quality * 15 / 16;
  3070. }
  3071. }
  3072. /* One pass more conservative */
  3073. else {
  3074. cpi->active_best_quality = gf_high_motion_minq[Q];
  3075. }
  3076. } else {
  3077. cpi->active_best_quality = inter_minq[Q];
  3078. /* For the constant/constrained quality mode we dont want
  3079. * q to fall below the cq level.
  3080. */
  3081. if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3082. (cpi->active_best_quality < cpi->cq_target_quality)) {
  3083. /* If we are strongly undershooting the target rate in the last
  3084. * frames then use the user passed in cq value not the auto
  3085. * cq value.
  3086. */
  3087. if (cpi->rolling_actual_bits < cpi->min_frame_bandwidth) {
  3088. cpi->active_best_quality = cpi->oxcf.cq_level;
  3089. } else {
  3090. cpi->active_best_quality = cpi->cq_target_quality;
  3091. }
  3092. }
  3093. }
  3094. /* If CBR and the buffer is as full then it is reasonable to allow
  3095. * higher quality on the frames to prevent bits just going to waste.
  3096. */
  3097. if (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) {
  3098. /* Note that the use of >= here elliminates the risk of a devide
  3099. * by 0 error in the else if clause
  3100. */
  3101. if (cpi->buffer_level >= cpi->oxcf.maximum_buffer_size) {
  3102. cpi->active_best_quality = cpi->best_quality;
  3103. } else if (cpi->buffer_level > cpi->oxcf.optimal_buffer_level) {
  3104. int Fraction =
  3105. (int)(((cpi->buffer_level - cpi->oxcf.optimal_buffer_level) * 128) /
  3106. (cpi->oxcf.maximum_buffer_size -
  3107. cpi->oxcf.optimal_buffer_level));
  3108. int min_qadjustment =
  3109. ((cpi->active_best_quality - cpi->best_quality) * Fraction) / 128;
  3110. cpi->active_best_quality -= min_qadjustment;
  3111. }
  3112. }
  3113. }
  3114. /* Make sure constrained quality mode limits are adhered to for the first
  3115. * few frames of one pass encodes
  3116. */
  3117. else if (cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) {
  3118. if ((cm->frame_type == KEY_FRAME) || cm->refresh_golden_frame ||
  3119. cpi->common.refresh_alt_ref_frame) {
  3120. cpi->active_best_quality = cpi->best_quality;
  3121. } else if (cpi->active_best_quality < cpi->cq_target_quality) {
  3122. cpi->active_best_quality = cpi->cq_target_quality;
  3123. }
  3124. }
  3125. /* Clip the active best and worst quality values to limits */
  3126. if (cpi->active_worst_quality > cpi->worst_quality) {
  3127. cpi->active_worst_quality = cpi->worst_quality;
  3128. }
  3129. if (cpi->active_best_quality < cpi->best_quality) {
  3130. cpi->active_best_quality = cpi->best_quality;
  3131. }
  3132. if (cpi->active_worst_quality < cpi->active_best_quality) {
  3133. cpi->active_worst_quality = cpi->active_best_quality;
  3134. }
  3135. /* Determine initial Q to try */
  3136. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3137. #if !CONFIG_REALTIME_ONLY
  3138. /* Set highest allowed value for Zbin over quant */
  3139. if (cm->frame_type == KEY_FRAME) {
  3140. zbin_oq_high = 0;
  3141. } else if ((cpi->oxcf.number_of_layers == 1) &&
  3142. ((cm->refresh_alt_ref_frame ||
  3143. (cm->refresh_golden_frame && !cpi->source_alt_ref_active)))) {
  3144. zbin_oq_high = 16;
  3145. } else {
  3146. zbin_oq_high = ZBIN_OQ_MAX;
  3147. }
  3148. #endif
  3149. compute_skin_map(cpi);
  3150. /* Setup background Q adjustment for error resilient mode.
  3151. * For multi-layer encodes only enable this for the base layer.
  3152. */
  3153. if (cpi->cyclic_refresh_mode_enabled) {
  3154. // Special case for screen_content_mode with golden frame updates.
  3155. int disable_cr_gf =
  3156. (cpi->oxcf.screen_content_mode == 2 && cm->refresh_golden_frame);
  3157. if (cpi->current_layer == 0 && cpi->force_maxqp == 0 && !disable_cr_gf) {
  3158. cyclic_background_refresh(cpi, Q, 0);
  3159. } else {
  3160. disable_segmentation(cpi);
  3161. }
  3162. }
  3163. vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit,
  3164. &frame_over_shoot_limit);
  3165. #if !CONFIG_REALTIME_ONLY
  3166. /* Limit Q range for the adaptive loop. */
  3167. bottom_index = cpi->active_best_quality;
  3168. top_index = cpi->active_worst_quality;
  3169. q_low = cpi->active_best_quality;
  3170. q_high = cpi->active_worst_quality;
  3171. #endif
  3172. vp8_save_coding_context(cpi);
  3173. loop_count = 0;
  3174. scale_and_extend_source(cpi->un_scaled_source, cpi);
  3175. #if CONFIG_TEMPORAL_DENOISING && CONFIG_POSTPROC
  3176. // Option to apply spatial blur under the aggressive or adaptive
  3177. // (temporal denoising) mode.
  3178. if (cpi->oxcf.noise_sensitivity >= 3) {
  3179. if (cpi->denoiser.denoise_pars.spatial_blur != 0) {
  3180. vp8_de_noise(cm, cpi->Source, cpi->Source,
  3181. cpi->denoiser.denoise_pars.spatial_blur, 1, 0, 0);
  3182. }
  3183. }
  3184. #endif
  3185. #if !(CONFIG_REALTIME_ONLY) && CONFIG_POSTPROC && !(CONFIG_TEMPORAL_DENOISING)
  3186. if (cpi->oxcf.noise_sensitivity > 0) {
  3187. unsigned char *src;
  3188. int l = 0;
  3189. switch (cpi->oxcf.noise_sensitivity) {
  3190. case 1: l = 20; break;
  3191. case 2: l = 40; break;
  3192. case 3: l = 60; break;
  3193. case 4: l = 80; break;
  3194. case 5: l = 100; break;
  3195. case 6: l = 150; break;
  3196. }
  3197. if (cm->frame_type == KEY_FRAME) {
  3198. vp8_de_noise(cm, cpi->Source, cpi->Source, l, 1, 0, 1);
  3199. } else {
  3200. vp8_de_noise(cm, cpi->Source, cpi->Source, l, 1, 0, 1);
  3201. src = cpi->Source->y_buffer;
  3202. if (cpi->Source->y_stride < 0) {
  3203. src += cpi->Source->y_stride * (cpi->Source->y_height - 1);
  3204. }
  3205. }
  3206. }
  3207. #endif
  3208. #ifdef OUTPUT_YUV_SRC
  3209. vpx_write_yuv_frame(yuv_file, cpi->Source);
  3210. #endif
  3211. do {
  3212. vpx_clear_system_state();
  3213. vp8_set_quantizer(cpi, Q);
  3214. /* setup skip prob for costing in mode/mv decision */
  3215. if (cpi->common.mb_no_coeff_skip) {
  3216. cpi->prob_skip_false = cpi->base_skip_false_prob[Q];
  3217. if (cm->frame_type != KEY_FRAME) {
  3218. if (cpi->common.refresh_alt_ref_frame) {
  3219. if (cpi->last_skip_false_probs[2] != 0) {
  3220. cpi->prob_skip_false = cpi->last_skip_false_probs[2];
  3221. }
  3222. /*
  3223. if(cpi->last_skip_false_probs[2]!=0 && abs(Q-
  3224. cpi->last_skip_probs_q[2])<=16 )
  3225. cpi->prob_skip_false = cpi->last_skip_false_probs[2];
  3226. else if (cpi->last_skip_false_probs[2]!=0)
  3227. cpi->prob_skip_false = (cpi->last_skip_false_probs[2] +
  3228. cpi->prob_skip_false ) / 2;
  3229. */
  3230. } else if (cpi->common.refresh_golden_frame) {
  3231. if (cpi->last_skip_false_probs[1] != 0) {
  3232. cpi->prob_skip_false = cpi->last_skip_false_probs[1];
  3233. }
  3234. /*
  3235. if(cpi->last_skip_false_probs[1]!=0 && abs(Q-
  3236. cpi->last_skip_probs_q[1])<=16 )
  3237. cpi->prob_skip_false = cpi->last_skip_false_probs[1];
  3238. else if (cpi->last_skip_false_probs[1]!=0)
  3239. cpi->prob_skip_false = (cpi->last_skip_false_probs[1] +
  3240. cpi->prob_skip_false ) / 2;
  3241. */
  3242. } else {
  3243. if (cpi->last_skip_false_probs[0] != 0) {
  3244. cpi->prob_skip_false = cpi->last_skip_false_probs[0];
  3245. }
  3246. /*
  3247. if(cpi->last_skip_false_probs[0]!=0 && abs(Q-
  3248. cpi->last_skip_probs_q[0])<=16 )
  3249. cpi->prob_skip_false = cpi->last_skip_false_probs[0];
  3250. else if(cpi->last_skip_false_probs[0]!=0)
  3251. cpi->prob_skip_false = (cpi->last_skip_false_probs[0] +
  3252. cpi->prob_skip_false ) / 2;
  3253. */
  3254. }
  3255. /* as this is for cost estimate, let's make sure it does not
  3256. * go extreme eitehr way
  3257. */
  3258. if (cpi->prob_skip_false < 5) cpi->prob_skip_false = 5;
  3259. if (cpi->prob_skip_false > 250) cpi->prob_skip_false = 250;
  3260. if (cpi->oxcf.number_of_layers == 1 && cpi->is_src_frame_alt_ref) {
  3261. cpi->prob_skip_false = 1;
  3262. }
  3263. }
  3264. #if 0
  3265. if (cpi->pass != 1)
  3266. {
  3267. FILE *f = fopen("skip.stt", "a");
  3268. fprintf(f, "%d, %d, %4d ", cpi->common.refresh_golden_frame, cpi->common.refresh_alt_ref_frame, cpi->prob_skip_false);
  3269. fclose(f);
  3270. }
  3271. #endif
  3272. }
  3273. if (cm->frame_type == KEY_FRAME) {
  3274. if (resize_key_frame(cpi)) {
  3275. /* If the frame size has changed, need to reset Q, quantizer,
  3276. * and background refresh.
  3277. */
  3278. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3279. if (cpi->cyclic_refresh_mode_enabled) {
  3280. if (cpi->current_layer == 0) {
  3281. cyclic_background_refresh(cpi, Q, 0);
  3282. } else {
  3283. disable_segmentation(cpi);
  3284. }
  3285. }
  3286. // Reset the zero_last counter to 0 on key frame.
  3287. memset(cpi->consec_zero_last, 0, cm->mb_rows * cm->mb_cols);
  3288. memset(cpi->consec_zero_last_mvbias, 0,
  3289. (cpi->common.mb_rows * cpi->common.mb_cols));
  3290. vp8_set_quantizer(cpi, Q);
  3291. }
  3292. vp8_setup_key_frame(cpi);
  3293. }
  3294. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  3295. {
  3296. if (cpi->oxcf.error_resilient_mode) cm->refresh_entropy_probs = 0;
  3297. if (cpi->oxcf.error_resilient_mode & VPX_ERROR_RESILIENT_PARTITIONS) {
  3298. if (cm->frame_type == KEY_FRAME) cm->refresh_entropy_probs = 1;
  3299. }
  3300. if (cm->refresh_entropy_probs == 0) {
  3301. /* save a copy for later refresh */
  3302. memcpy(&cm->lfc, &cm->fc, sizeof(cm->fc));
  3303. }
  3304. vp8_update_coef_context(cpi);
  3305. vp8_update_coef_probs(cpi);
  3306. /* transform / motion compensation build reconstruction frame
  3307. * +pack coef partitions
  3308. */
  3309. vp8_encode_frame(cpi);
  3310. /* cpi->projected_frame_size is not needed for RT mode */
  3311. }
  3312. #else
  3313. /* transform / motion compensation build reconstruction frame */
  3314. vp8_encode_frame(cpi);
  3315. if (cpi->pass == 0 && cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER) {
  3316. if (vp8_drop_encodedframe_overshoot(cpi, Q)) return;
  3317. if (cm->frame_type != KEY_FRAME)
  3318. cpi->last_pred_err_mb =
  3319. (int)(cpi->mb.prediction_error / cpi->common.MBs);
  3320. }
  3321. cpi->projected_frame_size -= vp8_estimate_entropy_savings(cpi);
  3322. cpi->projected_frame_size =
  3323. (cpi->projected_frame_size > 0) ? cpi->projected_frame_size : 0;
  3324. #endif
  3325. vpx_clear_system_state();
  3326. /* Test to see if the stats generated for this frame indicate that
  3327. * we should have coded a key frame (assuming that we didn't)!
  3328. */
  3329. if (cpi->pass != 2 && cpi->oxcf.auto_key && cm->frame_type != KEY_FRAME &&
  3330. cpi->compressor_speed != 2) {
  3331. #if !CONFIG_REALTIME_ONLY
  3332. if (decide_key_frame(cpi)) {
  3333. /* Reset all our sizing numbers and recode */
  3334. cm->frame_type = KEY_FRAME;
  3335. vp8_pick_frame_size(cpi);
  3336. /* Clear the Alt reference frame active flag when we have
  3337. * a key frame
  3338. */
  3339. cpi->source_alt_ref_active = 0;
  3340. // Set the loop filter deltas and segmentation map update
  3341. setup_features(cpi);
  3342. vp8_restore_coding_context(cpi);
  3343. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3344. vp8_compute_frame_size_bounds(cpi, &frame_under_shoot_limit,
  3345. &frame_over_shoot_limit);
  3346. /* Limit Q range for the adaptive loop. */
  3347. bottom_index = cpi->active_best_quality;
  3348. top_index = cpi->active_worst_quality;
  3349. q_low = cpi->active_best_quality;
  3350. q_high = cpi->active_worst_quality;
  3351. loop_count++;
  3352. Loop = 1;
  3353. continue;
  3354. }
  3355. #endif
  3356. }
  3357. vpx_clear_system_state();
  3358. if (frame_over_shoot_limit == 0) frame_over_shoot_limit = 1;
  3359. /* Are we are overshooting and up against the limit of active max Q. */
  3360. if (((cpi->pass != 2) ||
  3361. (cpi->oxcf.end_usage == USAGE_STREAM_FROM_SERVER)) &&
  3362. (Q == cpi->active_worst_quality) &&
  3363. (cpi->active_worst_quality < cpi->worst_quality) &&
  3364. (cpi->projected_frame_size > frame_over_shoot_limit)) {
  3365. int over_size_percent =
  3366. ((cpi->projected_frame_size - frame_over_shoot_limit) * 100) /
  3367. frame_over_shoot_limit;
  3368. /* If so is there any scope for relaxing it */
  3369. while ((cpi->active_worst_quality < cpi->worst_quality) &&
  3370. (over_size_percent > 0)) {
  3371. cpi->active_worst_quality++;
  3372. /* Assume 1 qstep = about 4% on frame size. */
  3373. over_size_percent = (int)(over_size_percent * 0.96);
  3374. }
  3375. #if !CONFIG_REALTIME_ONLY
  3376. top_index = cpi->active_worst_quality;
  3377. #endif // !CONFIG_REALTIME_ONLY
  3378. /* If we have updated the active max Q do not call
  3379. * vp8_update_rate_correction_factors() this loop.
  3380. */
  3381. active_worst_qchanged = 1;
  3382. } else {
  3383. active_worst_qchanged = 0;
  3384. }
  3385. #if CONFIG_REALTIME_ONLY
  3386. Loop = 0;
  3387. #else
  3388. /* Special case handling for forced key frames */
  3389. if ((cm->frame_type == KEY_FRAME) && cpi->this_key_frame_forced) {
  3390. int last_q = Q;
  3391. int kf_err = vp8_calc_ss_err(cpi->Source, &cm->yv12_fb[cm->new_fb_idx]);
  3392. /* The key frame is not good enough */
  3393. if (kf_err > ((cpi->ambient_err * 7) >> 3)) {
  3394. /* Lower q_high */
  3395. q_high = (Q > q_low) ? (Q - 1) : q_low;
  3396. /* Adjust Q */
  3397. Q = (q_high + q_low) >> 1;
  3398. }
  3399. /* The key frame is much better than the previous frame */
  3400. else if (kf_err < (cpi->ambient_err >> 1)) {
  3401. /* Raise q_low */
  3402. q_low = (Q < q_high) ? (Q + 1) : q_high;
  3403. /* Adjust Q */
  3404. Q = (q_high + q_low + 1) >> 1;
  3405. }
  3406. /* Clamp Q to upper and lower limits: */
  3407. if (Q > q_high) {
  3408. Q = q_high;
  3409. } else if (Q < q_low) {
  3410. Q = q_low;
  3411. }
  3412. Loop = Q != last_q;
  3413. }
  3414. /* Is the projected frame size out of range and are we allowed
  3415. * to attempt to recode.
  3416. */
  3417. else if (recode_loop_test(cpi, frame_over_shoot_limit,
  3418. frame_under_shoot_limit, Q, top_index,
  3419. bottom_index)) {
  3420. int last_q = Q;
  3421. int Retries = 0;
  3422. /* Frame size out of permitted range. Update correction factor
  3423. * & compute new Q to try...
  3424. */
  3425. /* Frame is too large */
  3426. if (cpi->projected_frame_size > cpi->this_frame_target) {
  3427. /* Raise Qlow as to at least the current value */
  3428. q_low = (Q < q_high) ? (Q + 1) : q_high;
  3429. /* If we are using over quant do the same for zbin_oq_low */
  3430. if (cpi->mb.zbin_over_quant > 0) {
  3431. zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high)
  3432. ? (cpi->mb.zbin_over_quant + 1)
  3433. : zbin_oq_high;
  3434. }
  3435. if (undershoot_seen) {
  3436. /* Update rate_correction_factor unless
  3437. * cpi->active_worst_quality has changed.
  3438. */
  3439. if (!active_worst_qchanged) {
  3440. vp8_update_rate_correction_factors(cpi, 1);
  3441. }
  3442. Q = (q_high + q_low + 1) / 2;
  3443. /* Adjust cpi->zbin_over_quant (only allowed when Q
  3444. * is max)
  3445. */
  3446. if (Q < MAXQ) {
  3447. cpi->mb.zbin_over_quant = 0;
  3448. } else {
  3449. zbin_oq_low = (cpi->mb.zbin_over_quant < zbin_oq_high)
  3450. ? (cpi->mb.zbin_over_quant + 1)
  3451. : zbin_oq_high;
  3452. cpi->mb.zbin_over_quant = (zbin_oq_high + zbin_oq_low) / 2;
  3453. }
  3454. } else {
  3455. /* Update rate_correction_factor unless
  3456. * cpi->active_worst_quality has changed.
  3457. */
  3458. if (!active_worst_qchanged) {
  3459. vp8_update_rate_correction_factors(cpi, 0);
  3460. }
  3461. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3462. while (((Q < q_low) || (cpi->mb.zbin_over_quant < zbin_oq_low)) &&
  3463. (Retries < 10)) {
  3464. vp8_update_rate_correction_factors(cpi, 0);
  3465. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3466. Retries++;
  3467. }
  3468. }
  3469. overshoot_seen = 1;
  3470. }
  3471. /* Frame is too small */
  3472. else {
  3473. if (cpi->mb.zbin_over_quant == 0) {
  3474. /* Lower q_high if not using over quant */
  3475. q_high = (Q > q_low) ? (Q - 1) : q_low;
  3476. } else {
  3477. /* else lower zbin_oq_high */
  3478. zbin_oq_high = (cpi->mb.zbin_over_quant > zbin_oq_low)
  3479. ? (cpi->mb.zbin_over_quant - 1)
  3480. : zbin_oq_low;
  3481. }
  3482. if (overshoot_seen) {
  3483. /* Update rate_correction_factor unless
  3484. * cpi->active_worst_quality has changed.
  3485. */
  3486. if (!active_worst_qchanged) {
  3487. vp8_update_rate_correction_factors(cpi, 1);
  3488. }
  3489. Q = (q_high + q_low) / 2;
  3490. /* Adjust cpi->zbin_over_quant (only allowed when Q
  3491. * is max)
  3492. */
  3493. if (Q < MAXQ) {
  3494. cpi->mb.zbin_over_quant = 0;
  3495. } else {
  3496. cpi->mb.zbin_over_quant = (zbin_oq_high + zbin_oq_low) / 2;
  3497. }
  3498. } else {
  3499. /* Update rate_correction_factor unless
  3500. * cpi->active_worst_quality has changed.
  3501. */
  3502. if (!active_worst_qchanged) {
  3503. vp8_update_rate_correction_factors(cpi, 0);
  3504. }
  3505. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3506. /* Special case reset for qlow for constrained quality.
  3507. * This should only trigger where there is very substantial
  3508. * undershoot on a frame and the auto cq level is above
  3509. * the user passsed in value.
  3510. */
  3511. if ((cpi->oxcf.end_usage == USAGE_CONSTRAINED_QUALITY) &&
  3512. (Q < q_low)) {
  3513. q_low = Q;
  3514. }
  3515. while (((Q > q_high) || (cpi->mb.zbin_over_quant > zbin_oq_high)) &&
  3516. (Retries < 10)) {
  3517. vp8_update_rate_correction_factors(cpi, 0);
  3518. Q = vp8_regulate_q(cpi, cpi->this_frame_target);
  3519. Retries++;
  3520. }
  3521. }
  3522. undershoot_seen = 1;
  3523. }
  3524. /* Clamp Q to upper and lower limits: */
  3525. if (Q > q_high) {
  3526. Q = q_high;
  3527. } else if (Q < q_low) {
  3528. Q = q_low;
  3529. }
  3530. /* Clamp cpi->zbin_over_quant */
  3531. cpi->mb.zbin_over_quant = (cpi->mb.zbin_over_quant < zbin_oq_low)
  3532. ? zbin_oq_low
  3533. : (cpi->mb.zbin_over_quant > zbin_oq_high)
  3534. ? zbin_oq_high
  3535. : cpi->mb.zbin_over_quant;
  3536. Loop = Q != last_q;
  3537. } else {
  3538. Loop = 0;
  3539. }
  3540. #endif // CONFIG_REALTIME_ONLY
  3541. if (cpi->is_src_frame_alt_ref) Loop = 0;
  3542. if (Loop == 1) {
  3543. vp8_restore_coding_context(cpi);
  3544. loop_count++;
  3545. #if CONFIG_INTERNAL_STATS
  3546. cpi->tot_recode_hits++;
  3547. #endif
  3548. }
  3549. } while (Loop == 1);
  3550. #if defined(DROP_UNCODED_FRAMES)
  3551. /* if there are no coded macroblocks at all drop this frame */
  3552. if (cpi->common.MBs == cpi->mb.skip_true_count &&
  3553. (cpi->drop_frame_count & 7) != 7 && cm->frame_type != KEY_FRAME) {
  3554. cpi->common.current_video_frame++;
  3555. cpi->frames_since_key++;
  3556. cpi->drop_frame_count++;
  3557. cpi->ext_refresh_frame_flags_pending = 0;
  3558. // We advance the temporal pattern for dropped frames.
  3559. cpi->temporal_pattern_counter++;
  3560. return;
  3561. }
  3562. cpi->drop_frame_count = 0;
  3563. #endif
  3564. #if 0
  3565. /* Experimental code for lagged and one pass
  3566. * Update stats used for one pass GF selection
  3567. */
  3568. {
  3569. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_coded_error = (double)cpi->prediction_error;
  3570. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_intra_error = (double)cpi->intra_error;
  3571. cpi->one_pass_frame_stats[cpi->one_pass_frame_index].frame_pcnt_inter = (double)(100 - cpi->this_frame_percent_intra) / 100.0;
  3572. }
  3573. #endif
  3574. /* Special case code to reduce pulsing when key frames are forced at a
  3575. * fixed interval. Note the reconstruction error if it is the frame before
  3576. * the force key frame
  3577. */
  3578. if (cpi->next_key_frame_forced && (cpi->twopass.frames_to_key == 0)) {
  3579. cpi->ambient_err =
  3580. vp8_calc_ss_err(cpi->Source, &cm->yv12_fb[cm->new_fb_idx]);
  3581. }
  3582. /* This frame's MVs are saved and will be used in next frame's MV predictor.
  3583. * Last frame has one more line(add to bottom) and one more column(add to
  3584. * right) than cm->mip. The edge elements are initialized to 0.
  3585. */
  3586. #if CONFIG_MULTI_RES_ENCODING
  3587. if (!cpi->oxcf.mr_encoder_id && cm->show_frame)
  3588. #else
  3589. if (cm->show_frame) /* do not save for altref frame */
  3590. #endif
  3591. {
  3592. int mb_row;
  3593. int mb_col;
  3594. /* Point to beginning of allocated MODE_INFO arrays. */
  3595. MODE_INFO *tmp = cm->mip;
  3596. if (cm->frame_type != KEY_FRAME) {
  3597. for (mb_row = 0; mb_row < cm->mb_rows + 1; ++mb_row) {
  3598. for (mb_col = 0; mb_col < cm->mb_cols + 1; ++mb_col) {
  3599. if (tmp->mbmi.ref_frame != INTRA_FRAME) {
  3600. cpi->lfmv[mb_col + mb_row * (cm->mode_info_stride + 1)].as_int =
  3601. tmp->mbmi.mv.as_int;
  3602. }
  3603. cpi->lf_ref_frame_sign_bias[mb_col +
  3604. mb_row * (cm->mode_info_stride + 1)] =
  3605. cm->ref_frame_sign_bias[tmp->mbmi.ref_frame];
  3606. cpi->lf_ref_frame[mb_col + mb_row * (cm->mode_info_stride + 1)] =
  3607. tmp->mbmi.ref_frame;
  3608. tmp++;
  3609. }
  3610. }
  3611. }
  3612. }
  3613. /* Count last ref frame 0,0 usage on current encoded frame. */
  3614. {
  3615. int mb_row;
  3616. int mb_col;
  3617. /* Point to beginning of MODE_INFO arrays. */
  3618. MODE_INFO *tmp = cm->mi;
  3619. cpi->zeromv_count = 0;
  3620. if (cm->frame_type != KEY_FRAME) {
  3621. for (mb_row = 0; mb_row < cm->mb_rows; ++mb_row) {
  3622. for (mb_col = 0; mb_col < cm->mb_cols; ++mb_col) {
  3623. if (tmp->mbmi.mode == ZEROMV && tmp->mbmi.ref_frame == LAST_FRAME) {
  3624. cpi->zeromv_count++;
  3625. }
  3626. tmp++;
  3627. }
  3628. tmp++;
  3629. }
  3630. }
  3631. }
  3632. #if CONFIG_MULTI_RES_ENCODING
  3633. vp8_cal_dissimilarity(cpi);
  3634. #endif
  3635. /* Update the GF useage maps.
  3636. * This is done after completing the compression of a frame when all
  3637. * modes etc. are finalized but before loop filter
  3638. */
  3639. if (cpi->oxcf.number_of_layers == 1) {
  3640. vp8_update_gf_useage_maps(cpi, cm, &cpi->mb);
  3641. }
  3642. if (cm->frame_type == KEY_FRAME) cm->refresh_last_frame = 1;
  3643. #if 0
  3644. {
  3645. FILE *f = fopen("gfactive.stt", "a");
  3646. fprintf(f, "%8d %8d %8d %8d %8d\n", cm->current_video_frame, (100 * cpi->gf_active_count) / (cpi->common.mb_rows * cpi->common.mb_cols), cpi->this_iiratio, cpi->next_iiratio, cm->refresh_golden_frame);
  3647. fclose(f);
  3648. }
  3649. #endif
  3650. /* For inter frames the current default behavior is that when
  3651. * cm->refresh_golden_frame is set we copy the old GF over to the ARF buffer
  3652. * This is purely an encoder decision at present.
  3653. * Avoid this behavior when refresh flags are set by the user.
  3654. */
  3655. if (!cpi->oxcf.error_resilient_mode && cm->refresh_golden_frame &&
  3656. !cpi->ext_refresh_frame_flags_pending) {
  3657. cm->copy_buffer_to_arf = 2;
  3658. } else {
  3659. cm->copy_buffer_to_arf = 0;
  3660. }
  3661. cm->frame_to_show = &cm->yv12_fb[cm->new_fb_idx];
  3662. #if CONFIG_TEMPORAL_DENOISING
  3663. // Get some measure of the amount of noise, by measuring the (partial) mse
  3664. // between source and denoised buffer, for y channel. Partial refers to
  3665. // computing the sse for a sub-sample of the frame (i.e., skip x blocks along
  3666. // row/column),
  3667. // and only for blocks in that set that are consecutive ZEROMV_LAST mode.
  3668. // Do this every ~8 frames, to further reduce complexity.
  3669. // TODO(marpan): Keep this for now for the case cpi->oxcf.noise_sensitivity <
  3670. // 4,
  3671. // should be removed in favor of the process_denoiser_mode_change() function
  3672. // below.
  3673. if (cpi->oxcf.noise_sensitivity > 0 && cpi->oxcf.noise_sensitivity < 4 &&
  3674. !cpi->oxcf.screen_content_mode && cpi->frames_since_key % 8 == 0 &&
  3675. cm->frame_type != KEY_FRAME) {
  3676. cpi->mse_source_denoised = measure_square_diff_partial(
  3677. &cpi->denoiser.yv12_running_avg[INTRA_FRAME], cpi->Source, cpi);
  3678. }
  3679. // For the adaptive denoising mode (noise_sensitivity == 4), sample the mse
  3680. // of source diff (between current and previous frame), and determine if we
  3681. // should switch the denoiser mode. Sampling refers to computing the mse for
  3682. // a sub-sample of the frame (i.e., skip x blocks along row/column), and
  3683. // only for blocks in that set that have used ZEROMV LAST, along with some
  3684. // constraint on the sum diff between blocks. This process is called every
  3685. // ~8 frames, to further reduce complexity.
  3686. if (cpi->oxcf.noise_sensitivity == 4 && !cpi->oxcf.screen_content_mode &&
  3687. cpi->frames_since_key % 8 == 0 && cm->frame_type != KEY_FRAME) {
  3688. process_denoiser_mode_change(cpi);
  3689. }
  3690. #endif
  3691. #ifdef OUTPUT_YUV_SKINMAP
  3692. if (cpi->common.current_video_frame > 1) {
  3693. vp8_compute_skin_map(cpi, yuv_skinmap_file);
  3694. }
  3695. #endif
  3696. #if CONFIG_MULTITHREAD
  3697. if (vpx_atomic_load_acquire(&cpi->b_multi_threaded)) {
  3698. /* start loopfilter in separate thread */
  3699. sem_post(&cpi->h_event_start_lpf);
  3700. cpi->b_lpf_running = 1;
  3701. /* wait for the filter_level to be picked so that we can continue with
  3702. * stream packing */
  3703. sem_wait(&cpi->h_event_end_lpf);
  3704. } else
  3705. #endif
  3706. {
  3707. vp8_loopfilter_frame(cpi, cm);
  3708. }
  3709. update_reference_frames(cpi);
  3710. #ifdef OUTPUT_YUV_DENOISED
  3711. vpx_write_yuv_frame(yuv_denoised_file,
  3712. &cpi->denoiser.yv12_running_avg[INTRA_FRAME]);
  3713. #endif
  3714. #if !(CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING)
  3715. if (cpi->oxcf.error_resilient_mode) {
  3716. cm->refresh_entropy_probs = 0;
  3717. }
  3718. #endif
  3719. /* build the bitstream */
  3720. vp8_pack_bitstream(cpi, dest, dest_end, size);
  3721. /* Move storing frame_type out of the above loop since it is also
  3722. * needed in motion search besides loopfilter */
  3723. cm->last_frame_type = cm->frame_type;
  3724. /* Update rate control heuristics */
  3725. cpi->total_byte_count += (*size);
  3726. cpi->projected_frame_size = (int)(*size) << 3;
  3727. if (cpi->oxcf.number_of_layers > 1) {
  3728. unsigned int i;
  3729. for (i = cpi->current_layer + 1; i < cpi->oxcf.number_of_layers; ++i) {
  3730. cpi->layer_context[i].total_byte_count += (*size);
  3731. }
  3732. }
  3733. if (!active_worst_qchanged) vp8_update_rate_correction_factors(cpi, 2);
  3734. cpi->last_q[cm->frame_type] = cm->base_qindex;
  3735. if (cm->frame_type == KEY_FRAME) {
  3736. vp8_adjust_key_frame_context(cpi);
  3737. }
  3738. /* Keep a record of ambient average Q. */
  3739. if (cm->frame_type != KEY_FRAME) {
  3740. cpi->avg_frame_qindex =
  3741. (2 + 3 * cpi->avg_frame_qindex + cm->base_qindex) >> 2;
  3742. }
  3743. /* Keep a record from which we can calculate the average Q excluding
  3744. * GF updates and key frames
  3745. */
  3746. if ((cm->frame_type != KEY_FRAME) &&
  3747. ((cpi->oxcf.number_of_layers > 1) ||
  3748. (!cm->refresh_golden_frame && !cm->refresh_alt_ref_frame))) {
  3749. cpi->ni_frames++;
  3750. /* Calculate the average Q for normal inter frames (not key or GFU
  3751. * frames).
  3752. */
  3753. if (cpi->pass == 2) {
  3754. cpi->ni_tot_qi += Q;
  3755. cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
  3756. } else {
  3757. /* Damp value for first few frames */
  3758. if (cpi->ni_frames > 150) {
  3759. cpi->ni_tot_qi += Q;
  3760. cpi->ni_av_qi = (cpi->ni_tot_qi / cpi->ni_frames);
  3761. }
  3762. /* For one pass, early in the clip ... average the current frame Q
  3763. * value with the worstq entered by the user as a dampening measure
  3764. */
  3765. else {
  3766. cpi->ni_tot_qi += Q;
  3767. cpi->ni_av_qi =
  3768. ((cpi->ni_tot_qi / cpi->ni_frames) + cpi->worst_quality + 1) / 2;
  3769. }
  3770. /* If the average Q is higher than what was used in the last
  3771. * frame (after going through the recode loop to keep the frame
  3772. * size within range) then use the last frame value - 1. The -1
  3773. * is designed to stop Q and hence the data rate, from
  3774. * progressively falling away during difficult sections, but at
  3775. * the same time reduce the number of itterations around the
  3776. * recode loop.
  3777. */
  3778. if (Q > cpi->ni_av_qi) cpi->ni_av_qi = Q - 1;
  3779. }
  3780. }
  3781. /* Update the buffer level variable. */
  3782. /* Non-viewable frames are a special case and are treated as pure overhead. */
  3783. if (!cm->show_frame) {
  3784. cpi->bits_off_target -= cpi->projected_frame_size;
  3785. } else {
  3786. cpi->bits_off_target +=
  3787. cpi->av_per_frame_bandwidth - cpi->projected_frame_size;
  3788. }
  3789. /* Clip the buffer level to the maximum specified buffer size */
  3790. if (cpi->bits_off_target > cpi->oxcf.maximum_buffer_size) {
  3791. cpi->bits_off_target = cpi->oxcf.maximum_buffer_size;
  3792. }
  3793. // If the frame dropper is not enabled, don't let the buffer level go below
  3794. // some threshold, given here by -|maximum_buffer_size|. For now we only do
  3795. // this for screen content input.
  3796. if (cpi->drop_frames_allowed == 0 && cpi->oxcf.screen_content_mode &&
  3797. cpi->bits_off_target < -cpi->oxcf.maximum_buffer_size) {
  3798. cpi->bits_off_target = -cpi->oxcf.maximum_buffer_size;
  3799. }
  3800. /* Rolling monitors of whether we are over or underspending used to
  3801. * help regulate min and Max Q in two pass.
  3802. */
  3803. cpi->rolling_target_bits =
  3804. ((cpi->rolling_target_bits * 3) + cpi->this_frame_target + 2) / 4;
  3805. cpi->rolling_actual_bits =
  3806. ((cpi->rolling_actual_bits * 3) + cpi->projected_frame_size + 2) / 4;
  3807. cpi->long_rolling_target_bits =
  3808. ((cpi->long_rolling_target_bits * 31) + cpi->this_frame_target + 16) / 32;
  3809. cpi->long_rolling_actual_bits =
  3810. ((cpi->long_rolling_actual_bits * 31) + cpi->projected_frame_size + 16) /
  3811. 32;
  3812. /* Actual bits spent */
  3813. cpi->total_actual_bits += cpi->projected_frame_size;
  3814. /* Debug stats */
  3815. cpi->total_target_vs_actual +=
  3816. (cpi->this_frame_target - cpi->projected_frame_size);
  3817. cpi->buffer_level = cpi->bits_off_target;
  3818. /* Propagate values to higher temporal layers */
  3819. if (cpi->oxcf.number_of_layers > 1) {
  3820. unsigned int i;
  3821. for (i = cpi->current_layer + 1; i < cpi->oxcf.number_of_layers; ++i) {
  3822. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  3823. int bits_off_for_this_layer = (int)(lc->target_bandwidth / lc->framerate -
  3824. cpi->projected_frame_size);
  3825. lc->bits_off_target += bits_off_for_this_layer;
  3826. /* Clip buffer level to maximum buffer size for the layer */
  3827. if (lc->bits_off_target > lc->maximum_buffer_size) {
  3828. lc->bits_off_target = lc->maximum_buffer_size;
  3829. }
  3830. lc->total_actual_bits += cpi->projected_frame_size;
  3831. lc->total_target_vs_actual += bits_off_for_this_layer;
  3832. lc->buffer_level = lc->bits_off_target;
  3833. }
  3834. }
  3835. /* Update bits left to the kf and gf groups to account for overshoot
  3836. * or undershoot on these frames
  3837. */
  3838. if (cm->frame_type == KEY_FRAME) {
  3839. cpi->twopass.kf_group_bits +=
  3840. cpi->this_frame_target - cpi->projected_frame_size;
  3841. if (cpi->twopass.kf_group_bits < 0) cpi->twopass.kf_group_bits = 0;
  3842. } else if (cm->refresh_golden_frame || cm->refresh_alt_ref_frame) {
  3843. cpi->twopass.gf_group_bits +=
  3844. cpi->this_frame_target - cpi->projected_frame_size;
  3845. if (cpi->twopass.gf_group_bits < 0) cpi->twopass.gf_group_bits = 0;
  3846. }
  3847. if (cm->frame_type != KEY_FRAME) {
  3848. if (cpi->common.refresh_alt_ref_frame) {
  3849. cpi->last_skip_false_probs[2] = cpi->prob_skip_false;
  3850. cpi->last_skip_probs_q[2] = cm->base_qindex;
  3851. } else if (cpi->common.refresh_golden_frame) {
  3852. cpi->last_skip_false_probs[1] = cpi->prob_skip_false;
  3853. cpi->last_skip_probs_q[1] = cm->base_qindex;
  3854. } else {
  3855. cpi->last_skip_false_probs[0] = cpi->prob_skip_false;
  3856. cpi->last_skip_probs_q[0] = cm->base_qindex;
  3857. /* update the baseline */
  3858. cpi->base_skip_false_prob[cm->base_qindex] = cpi->prob_skip_false;
  3859. }
  3860. }
  3861. #if 0 && CONFIG_INTERNAL_STATS
  3862. {
  3863. FILE *f = fopen("tmp.stt", "a");
  3864. vpx_clear_system_state();
  3865. if (cpi->twopass.total_left_stats.coded_error != 0.0)
  3866. fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
  3867. "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
  3868. "%8.2lf %"PRId64" %10.3lf %10"PRId64" %8d\n",
  3869. cpi->common.current_video_frame, cpi->this_frame_target,
  3870. cpi->projected_frame_size,
  3871. (cpi->projected_frame_size - cpi->this_frame_target),
  3872. cpi->total_target_vs_actual,
  3873. cpi->buffer_level,
  3874. (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
  3875. cpi->total_actual_bits, cm->base_qindex,
  3876. cpi->active_best_quality, cpi->active_worst_quality,
  3877. cpi->ni_av_qi, cpi->cq_target_quality,
  3878. cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
  3879. cm->frame_type, cpi->gfu_boost,
  3880. cpi->twopass.est_max_qcorrection_factor,
  3881. cpi->twopass.bits_left,
  3882. cpi->twopass.total_left_stats.coded_error,
  3883. (double)cpi->twopass.bits_left /
  3884. cpi->twopass.total_left_stats.coded_error,
  3885. cpi->tot_recode_hits);
  3886. else
  3887. fprintf(f, "%10d %10d %10d %10d %10d %10"PRId64" %10"PRId64
  3888. "%10"PRId64" %10d %6d %6d %6d %6d %5d %5d %5d %8d "
  3889. "%8.2lf %"PRId64" %10.3lf %8d\n",
  3890. cpi->common.current_video_frame, cpi->this_frame_target,
  3891. cpi->projected_frame_size,
  3892. (cpi->projected_frame_size - cpi->this_frame_target),
  3893. cpi->total_target_vs_actual,
  3894. cpi->buffer_level,
  3895. (cpi->oxcf.starting_buffer_level-cpi->bits_off_target),
  3896. cpi->total_actual_bits, cm->base_qindex,
  3897. cpi->active_best_quality, cpi->active_worst_quality,
  3898. cpi->ni_av_qi, cpi->cq_target_quality,
  3899. cm->refresh_golden_frame, cm->refresh_alt_ref_frame,
  3900. cm->frame_type, cpi->gfu_boost,
  3901. cpi->twopass.est_max_qcorrection_factor,
  3902. cpi->twopass.bits_left,
  3903. cpi->twopass.total_left_stats.coded_error,
  3904. cpi->tot_recode_hits);
  3905. fclose(f);
  3906. {
  3907. FILE *fmodes = fopen("Modes.stt", "a");
  3908. fprintf(fmodes, "%6d:%1d:%1d:%1d ",
  3909. cpi->common.current_video_frame,
  3910. cm->frame_type, cm->refresh_golden_frame,
  3911. cm->refresh_alt_ref_frame);
  3912. fprintf(fmodes, "\n");
  3913. fclose(fmodes);
  3914. }
  3915. }
  3916. #endif
  3917. cpi->ext_refresh_frame_flags_pending = 0;
  3918. if (cm->refresh_golden_frame == 1) {
  3919. cm->frame_flags = cm->frame_flags | FRAMEFLAGS_GOLDEN;
  3920. } else {
  3921. cm->frame_flags = cm->frame_flags & ~FRAMEFLAGS_GOLDEN;
  3922. }
  3923. if (cm->refresh_alt_ref_frame == 1) {
  3924. cm->frame_flags = cm->frame_flags | FRAMEFLAGS_ALTREF;
  3925. } else {
  3926. cm->frame_flags = cm->frame_flags & ~FRAMEFLAGS_ALTREF;
  3927. }
  3928. if (cm->refresh_last_frame & cm->refresh_golden_frame) { /* both refreshed */
  3929. cpi->gold_is_last = 1;
  3930. } else if (cm->refresh_last_frame ^ cm->refresh_golden_frame) {
  3931. /* 1 refreshed but not the other */
  3932. cpi->gold_is_last = 0;
  3933. }
  3934. if (cm->refresh_last_frame & cm->refresh_alt_ref_frame) { /* both refreshed */
  3935. cpi->alt_is_last = 1;
  3936. } else if (cm->refresh_last_frame ^ cm->refresh_alt_ref_frame) {
  3937. /* 1 refreshed but not the other */
  3938. cpi->alt_is_last = 0;
  3939. }
  3940. if (cm->refresh_alt_ref_frame &
  3941. cm->refresh_golden_frame) { /* both refreshed */
  3942. cpi->gold_is_alt = 1;
  3943. } else if (cm->refresh_alt_ref_frame ^ cm->refresh_golden_frame) {
  3944. /* 1 refreshed but not the other */
  3945. cpi->gold_is_alt = 0;
  3946. }
  3947. cpi->ref_frame_flags = VP8_ALTR_FRAME | VP8_GOLD_FRAME | VP8_LAST_FRAME;
  3948. if (cpi->gold_is_last) cpi->ref_frame_flags &= ~VP8_GOLD_FRAME;
  3949. if (cpi->alt_is_last) cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
  3950. if (cpi->gold_is_alt) cpi->ref_frame_flags &= ~VP8_ALTR_FRAME;
  3951. if (!cpi->oxcf.error_resilient_mode) {
  3952. if (cpi->oxcf.play_alternate && cm->refresh_alt_ref_frame &&
  3953. (cm->frame_type != KEY_FRAME)) {
  3954. /* Update the alternate reference frame stats as appropriate. */
  3955. update_alt_ref_frame_stats(cpi);
  3956. } else {
  3957. /* Update the Golden frame stats as appropriate. */
  3958. update_golden_frame_stats(cpi);
  3959. }
  3960. }
  3961. if (cm->frame_type == KEY_FRAME) {
  3962. /* Tell the caller that the frame was coded as a key frame */
  3963. *frame_flags = cm->frame_flags | FRAMEFLAGS_KEY;
  3964. /* As this frame is a key frame the next defaults to an inter frame. */
  3965. cm->frame_type = INTER_FRAME;
  3966. cpi->last_frame_percent_intra = 100;
  3967. } else {
  3968. *frame_flags = cm->frame_flags & ~FRAMEFLAGS_KEY;
  3969. cpi->last_frame_percent_intra = cpi->this_frame_percent_intra;
  3970. }
  3971. /* Clear the one shot update flags for segmentation map and mode/ref
  3972. * loop filter deltas.
  3973. */
  3974. cpi->mb.e_mbd.update_mb_segmentation_map = 0;
  3975. cpi->mb.e_mbd.update_mb_segmentation_data = 0;
  3976. cpi->mb.e_mbd.mode_ref_lf_delta_update = 0;
  3977. /* Dont increment frame counters if this was an altref buffer update
  3978. * not a real frame
  3979. */
  3980. if (cm->show_frame) {
  3981. cm->current_video_frame++;
  3982. cpi->frames_since_key++;
  3983. cpi->temporal_pattern_counter++;
  3984. }
  3985. #if 0
  3986. {
  3987. char filename[512];
  3988. FILE *recon_file;
  3989. sprintf(filename, "enc%04d.yuv", (int) cm->current_video_frame);
  3990. recon_file = fopen(filename, "wb");
  3991. fwrite(cm->yv12_fb[cm->lst_fb_idx].buffer_alloc,
  3992. cm->yv12_fb[cm->lst_fb_idx].frame_size, 1, recon_file);
  3993. fclose(recon_file);
  3994. }
  3995. #endif
  3996. /* DEBUG */
  3997. /* vpx_write_yuv_frame("encoder_recon.yuv", cm->frame_to_show); */
  3998. }
  3999. #if !CONFIG_REALTIME_ONLY
  4000. static void Pass2Encode(VP8_COMP *cpi, size_t *size, unsigned char *dest,
  4001. unsigned char *dest_end, unsigned int *frame_flags) {
  4002. if (!cpi->common.refresh_alt_ref_frame) vp8_second_pass(cpi);
  4003. encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
  4004. cpi->twopass.bits_left -= 8 * (int)(*size);
  4005. if (!cpi->common.refresh_alt_ref_frame) {
  4006. double two_pass_min_rate =
  4007. (double)(cpi->oxcf.target_bandwidth *
  4008. cpi->oxcf.two_pass_vbrmin_section / 100);
  4009. cpi->twopass.bits_left += (int64_t)(two_pass_min_rate / cpi->framerate);
  4010. }
  4011. }
  4012. #endif
  4013. int vp8_receive_raw_frame(VP8_COMP *cpi, unsigned int frame_flags,
  4014. YV12_BUFFER_CONFIG *sd, int64_t time_stamp,
  4015. int64_t end_time) {
  4016. struct vpx_usec_timer timer;
  4017. int res = 0;
  4018. vpx_usec_timer_start(&timer);
  4019. /* Reinit the lookahead buffer if the frame size changes */
  4020. if (sd->y_width != cpi->oxcf.Width || sd->y_height != cpi->oxcf.Height) {
  4021. assert(cpi->oxcf.lag_in_frames < 2);
  4022. dealloc_raw_frame_buffers(cpi);
  4023. alloc_raw_frame_buffers(cpi);
  4024. }
  4025. if (vp8_lookahead_push(cpi->lookahead, sd, time_stamp, end_time, frame_flags,
  4026. cpi->active_map_enabled ? cpi->active_map : NULL)) {
  4027. res = -1;
  4028. }
  4029. vpx_usec_timer_mark(&timer);
  4030. cpi->time_receive_data += vpx_usec_timer_elapsed(&timer);
  4031. return res;
  4032. }
  4033. static int frame_is_reference(const VP8_COMP *cpi) {
  4034. const VP8_COMMON *cm = &cpi->common;
  4035. const MACROBLOCKD *xd = &cpi->mb.e_mbd;
  4036. return cm->frame_type == KEY_FRAME || cm->refresh_last_frame ||
  4037. cm->refresh_golden_frame || cm->refresh_alt_ref_frame ||
  4038. cm->copy_buffer_to_gf || cm->copy_buffer_to_arf ||
  4039. cm->refresh_entropy_probs || xd->mode_ref_lf_delta_update ||
  4040. xd->update_mb_segmentation_map || xd->update_mb_segmentation_data;
  4041. }
  4042. int vp8_get_compressed_data(VP8_COMP *cpi, unsigned int *frame_flags,
  4043. size_t *size, unsigned char *dest,
  4044. unsigned char *dest_end, int64_t *time_stamp,
  4045. int64_t *time_end, int flush) {
  4046. VP8_COMMON *cm;
  4047. struct vpx_usec_timer tsctimer;
  4048. struct vpx_usec_timer ticktimer;
  4049. struct vpx_usec_timer cmptimer;
  4050. YV12_BUFFER_CONFIG *force_src_buffer = NULL;
  4051. if (!cpi) return -1;
  4052. cm = &cpi->common;
  4053. vpx_usec_timer_start(&cmptimer);
  4054. cpi->source = NULL;
  4055. #if !CONFIG_REALTIME_ONLY
  4056. /* Should we code an alternate reference frame */
  4057. if (cpi->oxcf.error_resilient_mode == 0 && cpi->oxcf.play_alternate &&
  4058. cpi->source_alt_ref_pending) {
  4059. if ((cpi->source = vp8_lookahead_peek(
  4060. cpi->lookahead, cpi->frames_till_gf_update_due, PEEK_FORWARD))) {
  4061. cpi->alt_ref_source = cpi->source;
  4062. if (cpi->oxcf.arnr_max_frames > 0) {
  4063. vp8_temporal_filter_prepare_c(cpi, cpi->frames_till_gf_update_due);
  4064. force_src_buffer = &cpi->alt_ref_buffer;
  4065. }
  4066. cpi->frames_till_alt_ref_frame = cpi->frames_till_gf_update_due;
  4067. cm->refresh_alt_ref_frame = 1;
  4068. cm->refresh_golden_frame = 0;
  4069. cm->refresh_last_frame = 0;
  4070. cm->show_frame = 0;
  4071. /* Clear Pending alt Ref flag. */
  4072. cpi->source_alt_ref_pending = 0;
  4073. cpi->is_src_frame_alt_ref = 0;
  4074. }
  4075. }
  4076. #endif
  4077. if (!cpi->source) {
  4078. /* Read last frame source if we are encoding first pass. */
  4079. if (cpi->pass == 1 && cm->current_video_frame > 0) {
  4080. if ((cpi->last_source =
  4081. vp8_lookahead_peek(cpi->lookahead, 1, PEEK_BACKWARD)) == NULL) {
  4082. return -1;
  4083. }
  4084. }
  4085. if ((cpi->source = vp8_lookahead_pop(cpi->lookahead, flush))) {
  4086. cm->show_frame = 1;
  4087. cpi->is_src_frame_alt_ref =
  4088. cpi->alt_ref_source && (cpi->source == cpi->alt_ref_source);
  4089. if (cpi->is_src_frame_alt_ref) cpi->alt_ref_source = NULL;
  4090. }
  4091. }
  4092. if (cpi->source) {
  4093. cpi->Source = force_src_buffer ? force_src_buffer : &cpi->source->img;
  4094. cpi->un_scaled_source = cpi->Source;
  4095. *time_stamp = cpi->source->ts_start;
  4096. *time_end = cpi->source->ts_end;
  4097. *frame_flags = cpi->source->flags;
  4098. if (cpi->pass == 1 && cm->current_video_frame > 0) {
  4099. cpi->last_frame_unscaled_source = &cpi->last_source->img;
  4100. }
  4101. } else {
  4102. *size = 0;
  4103. #if !CONFIG_REALTIME_ONLY
  4104. if (flush && cpi->pass == 1 && !cpi->twopass.first_pass_done) {
  4105. vp8_end_first_pass(cpi); /* get last stats packet */
  4106. cpi->twopass.first_pass_done = 1;
  4107. }
  4108. #endif
  4109. return -1;
  4110. }
  4111. if (cpi->source->ts_start < cpi->first_time_stamp_ever) {
  4112. cpi->first_time_stamp_ever = cpi->source->ts_start;
  4113. cpi->last_end_time_stamp_seen = cpi->source->ts_start;
  4114. }
  4115. /* adjust frame rates based on timestamps given */
  4116. if (cm->show_frame) {
  4117. int64_t this_duration;
  4118. int step = 0;
  4119. if (cpi->source->ts_start == cpi->first_time_stamp_ever) {
  4120. this_duration = cpi->source->ts_end - cpi->source->ts_start;
  4121. step = 1;
  4122. } else {
  4123. int64_t last_duration;
  4124. this_duration = cpi->source->ts_end - cpi->last_end_time_stamp_seen;
  4125. last_duration = cpi->last_end_time_stamp_seen - cpi->last_time_stamp_seen;
  4126. /* do a step update if the duration changes by 10% */
  4127. if (last_duration) {
  4128. step = (int)(((this_duration - last_duration) * 10 / last_duration));
  4129. }
  4130. }
  4131. if (this_duration) {
  4132. if (step) {
  4133. cpi->ref_framerate = 10000000.0 / this_duration;
  4134. } else {
  4135. double avg_duration, interval;
  4136. /* Average this frame's rate into the last second's average
  4137. * frame rate. If we haven't seen 1 second yet, then average
  4138. * over the whole interval seen.
  4139. */
  4140. interval = (double)(cpi->source->ts_end - cpi->first_time_stamp_ever);
  4141. if (interval > 10000000.0) interval = 10000000;
  4142. avg_duration = 10000000.0 / cpi->ref_framerate;
  4143. avg_duration *= (interval - avg_duration + this_duration);
  4144. avg_duration /= interval;
  4145. cpi->ref_framerate = 10000000.0 / avg_duration;
  4146. }
  4147. #if CONFIG_MULTI_RES_ENCODING
  4148. if (cpi->oxcf.mr_total_resolutions > 1) {
  4149. LOWER_RES_FRAME_INFO *low_res_frame_info =
  4150. (LOWER_RES_FRAME_INFO *)cpi->oxcf.mr_low_res_mode_info;
  4151. // Frame rate should be the same for all spatial layers in
  4152. // multi-res-encoding (simulcast), so we constrain the frame for
  4153. // higher layers to be that of lowest resolution. This is needed
  4154. // as he application may decide to skip encoding a high layer and
  4155. // then start again, in which case a big jump in time-stamps will
  4156. // be received for that high layer, which will yield an incorrect
  4157. // frame rate (from time-stamp adjustment in above calculation).
  4158. if (cpi->oxcf.mr_encoder_id) {
  4159. if (!low_res_frame_info->skip_encoding_base_stream)
  4160. cpi->ref_framerate = low_res_frame_info->low_res_framerate;
  4161. } else {
  4162. // Keep track of frame rate for lowest resolution.
  4163. low_res_frame_info->low_res_framerate = cpi->ref_framerate;
  4164. // The base stream is being encoded so set skip flag to 0.
  4165. low_res_frame_info->skip_encoding_base_stream = 0;
  4166. }
  4167. }
  4168. #endif
  4169. if (cpi->oxcf.number_of_layers > 1) {
  4170. unsigned int i;
  4171. /* Update frame rates for each layer */
  4172. assert(cpi->oxcf.number_of_layers <= VPX_TS_MAX_LAYERS);
  4173. for (i = 0; i < cpi->oxcf.number_of_layers && i < VPX_TS_MAX_LAYERS;
  4174. ++i) {
  4175. LAYER_CONTEXT *lc = &cpi->layer_context[i];
  4176. lc->framerate = cpi->ref_framerate / cpi->oxcf.rate_decimator[i];
  4177. }
  4178. } else {
  4179. vp8_new_framerate(cpi, cpi->ref_framerate);
  4180. }
  4181. }
  4182. cpi->last_time_stamp_seen = cpi->source->ts_start;
  4183. cpi->last_end_time_stamp_seen = cpi->source->ts_end;
  4184. }
  4185. if (cpi->oxcf.number_of_layers > 1) {
  4186. int layer;
  4187. update_layer_contexts(cpi);
  4188. /* Restore layer specific context & set frame rate */
  4189. if (cpi->temporal_layer_id >= 0) {
  4190. layer = cpi->temporal_layer_id;
  4191. } else {
  4192. layer =
  4193. cpi->oxcf
  4194. .layer_id[cpi->temporal_pattern_counter % cpi->oxcf.periodicity];
  4195. }
  4196. restore_layer_context(cpi, layer);
  4197. vp8_new_framerate(cpi, cpi->layer_context[layer].framerate);
  4198. }
  4199. if (cpi->compressor_speed == 2) {
  4200. vpx_usec_timer_start(&tsctimer);
  4201. vpx_usec_timer_start(&ticktimer);
  4202. }
  4203. cpi->lf_zeromv_pct = (cpi->zeromv_count * 100) / cm->MBs;
  4204. #if CONFIG_REALTIME_ONLY & CONFIG_ONTHEFLY_BITPACKING
  4205. {
  4206. int i;
  4207. const int num_part = (1 << cm->multi_token_partition);
  4208. /* the available bytes in dest */
  4209. const unsigned long dest_size = dest_end - dest;
  4210. const int tok_part_buff_size = (dest_size * 9) / (10 * num_part);
  4211. unsigned char *dp = dest;
  4212. cpi->partition_d[0] = dp;
  4213. dp += dest_size / 10; /* reserve 1/10 for control partition */
  4214. cpi->partition_d_end[0] = dp;
  4215. for (i = 0; i < num_part; ++i) {
  4216. cpi->partition_d[i + 1] = dp;
  4217. dp += tok_part_buff_size;
  4218. cpi->partition_d_end[i + 1] = dp;
  4219. }
  4220. }
  4221. #endif
  4222. /* start with a 0 size frame */
  4223. *size = 0;
  4224. /* Clear down mmx registers */
  4225. vpx_clear_system_state();
  4226. cm->frame_type = INTER_FRAME;
  4227. cm->frame_flags = *frame_flags;
  4228. #if 0
  4229. if (cm->refresh_alt_ref_frame)
  4230. {
  4231. cm->refresh_golden_frame = 0;
  4232. cm->refresh_last_frame = 0;
  4233. }
  4234. else
  4235. {
  4236. cm->refresh_golden_frame = 0;
  4237. cm->refresh_last_frame = 1;
  4238. }
  4239. #endif
  4240. /* find a free buffer for the new frame */
  4241. {
  4242. int i = 0;
  4243. for (; i < NUM_YV12_BUFFERS; ++i) {
  4244. if (!cm->yv12_fb[i].flags) {
  4245. cm->new_fb_idx = i;
  4246. break;
  4247. }
  4248. }
  4249. assert(i < NUM_YV12_BUFFERS);
  4250. }
  4251. switch (cpi->pass) {
  4252. #if !CONFIG_REALTIME_ONLY
  4253. case 1: Pass1Encode(cpi, size, dest, frame_flags); break;
  4254. case 2: Pass2Encode(cpi, size, dest, dest_end, frame_flags); break;
  4255. #endif // !CONFIG_REALTIME_ONLY
  4256. default:
  4257. encode_frame_to_data_rate(cpi, size, dest, dest_end, frame_flags);
  4258. break;
  4259. }
  4260. if (cpi->compressor_speed == 2) {
  4261. unsigned int duration, duration2;
  4262. vpx_usec_timer_mark(&tsctimer);
  4263. vpx_usec_timer_mark(&ticktimer);
  4264. duration = (int)(vpx_usec_timer_elapsed(&ticktimer));
  4265. duration2 = (unsigned int)((double)duration / 2);
  4266. if (cm->frame_type != KEY_FRAME) {
  4267. if (cpi->avg_encode_time == 0) {
  4268. cpi->avg_encode_time = duration;
  4269. } else {
  4270. cpi->avg_encode_time = (7 * cpi->avg_encode_time + duration) >> 3;
  4271. }
  4272. }
  4273. if (duration2) {
  4274. {
  4275. if (cpi->avg_pick_mode_time == 0) {
  4276. cpi->avg_pick_mode_time = duration2;
  4277. } else {
  4278. cpi->avg_pick_mode_time =
  4279. (7 * cpi->avg_pick_mode_time + duration2) >> 3;
  4280. }
  4281. }
  4282. }
  4283. }
  4284. if (cm->refresh_entropy_probs == 0) {
  4285. memcpy(&cm->fc, &cm->lfc, sizeof(cm->fc));
  4286. }
  4287. /* Save the contexts separately for alt ref, gold and last. */
  4288. /* (TODO jbb -> Optimize this with pointers to avoid extra copies. ) */
  4289. if (cm->refresh_alt_ref_frame) memcpy(&cpi->lfc_a, &cm->fc, sizeof(cm->fc));
  4290. if (cm->refresh_golden_frame) memcpy(&cpi->lfc_g, &cm->fc, sizeof(cm->fc));
  4291. if (cm->refresh_last_frame) memcpy(&cpi->lfc_n, &cm->fc, sizeof(cm->fc));
  4292. /* if its a dropped frame honor the requests on subsequent frames */
  4293. if (*size > 0) {
  4294. cpi->droppable = !frame_is_reference(cpi);
  4295. /* return to normal state */
  4296. cm->refresh_entropy_probs = 1;
  4297. cm->refresh_alt_ref_frame = 0;
  4298. cm->refresh_golden_frame = 0;
  4299. cm->refresh_last_frame = 1;
  4300. cm->frame_type = INTER_FRAME;
  4301. }
  4302. /* Save layer specific state */
  4303. if (cpi->oxcf.number_of_layers > 1) save_layer_context(cpi);
  4304. vpx_usec_timer_mark(&cmptimer);
  4305. cpi->time_compress_data += vpx_usec_timer_elapsed(&cmptimer);
  4306. if (cpi->b_calculate_psnr && cpi->pass != 1 && cm->show_frame) {
  4307. generate_psnr_packet(cpi);
  4308. }
  4309. #if CONFIG_INTERNAL_STATS
  4310. if (cpi->pass != 1) {
  4311. cpi->bytes += *size;
  4312. if (cm->show_frame) {
  4313. cpi->common.show_frame_mi = cpi->common.mi;
  4314. cpi->count++;
  4315. if (cpi->b_calculate_psnr) {
  4316. uint64_t ye, ue, ve;
  4317. double frame_psnr;
  4318. YV12_BUFFER_CONFIG *orig = cpi->Source;
  4319. YV12_BUFFER_CONFIG *recon = cpi->common.frame_to_show;
  4320. unsigned int y_width = cpi->common.Width;
  4321. unsigned int y_height = cpi->common.Height;
  4322. unsigned int uv_width = (y_width + 1) / 2;
  4323. unsigned int uv_height = (y_height + 1) / 2;
  4324. int y_samples = y_height * y_width;
  4325. int uv_samples = uv_height * uv_width;
  4326. int t_samples = y_samples + 2 * uv_samples;
  4327. double sq_error;
  4328. ye = calc_plane_error(orig->y_buffer, orig->y_stride, recon->y_buffer,
  4329. recon->y_stride, y_width, y_height);
  4330. ue = calc_plane_error(orig->u_buffer, orig->uv_stride, recon->u_buffer,
  4331. recon->uv_stride, uv_width, uv_height);
  4332. ve = calc_plane_error(orig->v_buffer, orig->uv_stride, recon->v_buffer,
  4333. recon->uv_stride, uv_width, uv_height);
  4334. sq_error = (double)(ye + ue + ve);
  4335. frame_psnr = vpx_sse_to_psnr(t_samples, 255.0, sq_error);
  4336. cpi->total_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
  4337. cpi->total_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
  4338. cpi->total_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
  4339. cpi->total_sq_error += sq_error;
  4340. cpi->total += frame_psnr;
  4341. #if CONFIG_POSTPROC
  4342. {
  4343. YV12_BUFFER_CONFIG *pp = &cm->post_proc_buffer;
  4344. double sq_error2;
  4345. double frame_psnr2, frame_ssim2 = 0;
  4346. double weight = 0;
  4347. vp8_deblock(cm, cm->frame_to_show, &cm->post_proc_buffer,
  4348. cm->filter_level * 10 / 6, 1, 0);
  4349. vpx_clear_system_state();
  4350. ye = calc_plane_error(orig->y_buffer, orig->y_stride, pp->y_buffer,
  4351. pp->y_stride, y_width, y_height);
  4352. ue = calc_plane_error(orig->u_buffer, orig->uv_stride, pp->u_buffer,
  4353. pp->uv_stride, uv_width, uv_height);
  4354. ve = calc_plane_error(orig->v_buffer, orig->uv_stride, pp->v_buffer,
  4355. pp->uv_stride, uv_width, uv_height);
  4356. sq_error2 = (double)(ye + ue + ve);
  4357. frame_psnr2 = vpx_sse_to_psnr(t_samples, 255.0, sq_error2);
  4358. cpi->totalp_y += vpx_sse_to_psnr(y_samples, 255.0, (double)ye);
  4359. cpi->totalp_u += vpx_sse_to_psnr(uv_samples, 255.0, (double)ue);
  4360. cpi->totalp_v += vpx_sse_to_psnr(uv_samples, 255.0, (double)ve);
  4361. cpi->total_sq_error2 += sq_error2;
  4362. cpi->totalp += frame_psnr2;
  4363. frame_ssim2 =
  4364. vpx_calc_ssim(cpi->Source, &cm->post_proc_buffer, &weight);
  4365. cpi->summed_quality += frame_ssim2 * weight;
  4366. cpi->summed_weights += weight;
  4367. if (cpi->oxcf.number_of_layers > 1) {
  4368. unsigned int i;
  4369. for (i = cpi->current_layer; i < cpi->oxcf.number_of_layers; ++i) {
  4370. cpi->frames_in_layer[i]++;
  4371. cpi->bytes_in_layer[i] += *size;
  4372. cpi->sum_psnr[i] += frame_psnr;
  4373. cpi->sum_psnr_p[i] += frame_psnr2;
  4374. cpi->total_error2[i] += sq_error;
  4375. cpi->total_error2_p[i] += sq_error2;
  4376. cpi->sum_ssim[i] += frame_ssim2 * weight;
  4377. cpi->sum_weights[i] += weight;
  4378. }
  4379. }
  4380. }
  4381. #endif
  4382. }
  4383. }
  4384. }
  4385. #if 0
  4386. if (cpi->common.frame_type != 0 && cpi->common.base_qindex == cpi->oxcf.worst_allowed_q)
  4387. {
  4388. skiptruecount += cpi->skip_true_count;
  4389. skipfalsecount += cpi->skip_false_count;
  4390. }
  4391. #endif
  4392. #if 0
  4393. if (cpi->pass != 1)
  4394. {
  4395. FILE *f = fopen("skip.stt", "a");
  4396. fprintf(f, "frame:%4d flags:%4x Q:%4d P:%4d Size:%5d\n", cpi->common.current_video_frame, *frame_flags, cpi->common.base_qindex, cpi->prob_skip_false, *size);
  4397. if (cpi->is_src_frame_alt_ref == 1)
  4398. fprintf(f, "skipcount: %4d framesize: %d\n", cpi->skip_true_count , *size);
  4399. fclose(f);
  4400. }
  4401. #endif
  4402. #endif
  4403. cpi->common.error.setjmp = 0;
  4404. #if CONFIG_MULTITHREAD
  4405. /* wait for the lpf thread done */
  4406. if (vpx_atomic_load_acquire(&cpi->b_multi_threaded) && cpi->b_lpf_running) {
  4407. sem_wait(&cpi->h_event_end_lpf);
  4408. cpi->b_lpf_running = 0;
  4409. }
  4410. #endif
  4411. return 0;
  4412. }
  4413. int vp8_get_preview_raw_frame(VP8_COMP *cpi, YV12_BUFFER_CONFIG *dest,
  4414. vp8_ppflags_t *flags) {
  4415. if (cpi->common.refresh_alt_ref_frame) {
  4416. return -1;
  4417. } else {
  4418. int ret;
  4419. #if CONFIG_POSTPROC
  4420. cpi->common.show_frame_mi = cpi->common.mi;
  4421. ret = vp8_post_proc_frame(&cpi->common, dest, flags);
  4422. #else
  4423. (void)flags;
  4424. if (cpi->common.frame_to_show) {
  4425. *dest = *cpi->common.frame_to_show;
  4426. dest->y_width = cpi->common.Width;
  4427. dest->y_height = cpi->common.Height;
  4428. dest->uv_height = cpi->common.Height / 2;
  4429. ret = 0;
  4430. } else {
  4431. ret = -1;
  4432. }
  4433. #endif
  4434. vpx_clear_system_state();
  4435. return ret;
  4436. }
  4437. }
  4438. int vp8_set_roimap(VP8_COMP *cpi, unsigned char *map, unsigned int rows,
  4439. unsigned int cols, int delta_q[4], int delta_lf[4],
  4440. unsigned int threshold[4]) {
  4441. signed char feature_data[MB_LVL_MAX][MAX_MB_SEGMENTS];
  4442. int internal_delta_q[MAX_MB_SEGMENTS];
  4443. const int range = 63;
  4444. int i;
  4445. // Check number of rows and columns match
  4446. if (cpi->common.mb_rows != (int)rows || cpi->common.mb_cols != (int)cols) {
  4447. return -1;
  4448. }
  4449. // Range check the delta Q values and convert the external Q range values
  4450. // to internal ones.
  4451. if ((abs(delta_q[0]) > range) || (abs(delta_q[1]) > range) ||
  4452. (abs(delta_q[2]) > range) || (abs(delta_q[3]) > range)) {
  4453. return -1;
  4454. }
  4455. // Range check the delta lf values
  4456. if ((abs(delta_lf[0]) > range) || (abs(delta_lf[1]) > range) ||
  4457. (abs(delta_lf[2]) > range) || (abs(delta_lf[3]) > range)) {
  4458. return -1;
  4459. }
  4460. // Also disable segmentation if no deltas are specified.
  4461. if (!map || (delta_q[0] == 0 && delta_q[1] == 0 && delta_q[2] == 0 &&
  4462. delta_q[3] == 0 && delta_lf[0] == 0 && delta_lf[1] == 0 &&
  4463. delta_lf[2] == 0 && delta_lf[3] == 0 && threshold[0] == 0 &&
  4464. threshold[1] == 0 && threshold[2] == 0 && threshold[3] == 0)) {
  4465. disable_segmentation(cpi);
  4466. return 0;
  4467. }
  4468. // Translate the external delta q values to internal values.
  4469. for (i = 0; i < MAX_MB_SEGMENTS; ++i) {
  4470. internal_delta_q[i] =
  4471. (delta_q[i] >= 0) ? q_trans[delta_q[i]] : -q_trans[-delta_q[i]];
  4472. }
  4473. /* Set the segmentation Map */
  4474. set_segmentation_map(cpi, map);
  4475. /* Activate segmentation. */
  4476. enable_segmentation(cpi);
  4477. /* Set up the quant segment data */
  4478. feature_data[MB_LVL_ALT_Q][0] = internal_delta_q[0];
  4479. feature_data[MB_LVL_ALT_Q][1] = internal_delta_q[1];
  4480. feature_data[MB_LVL_ALT_Q][2] = internal_delta_q[2];
  4481. feature_data[MB_LVL_ALT_Q][3] = internal_delta_q[3];
  4482. /* Set up the loop segment data s */
  4483. feature_data[MB_LVL_ALT_LF][0] = delta_lf[0];
  4484. feature_data[MB_LVL_ALT_LF][1] = delta_lf[1];
  4485. feature_data[MB_LVL_ALT_LF][2] = delta_lf[2];
  4486. feature_data[MB_LVL_ALT_LF][3] = delta_lf[3];
  4487. cpi->segment_encode_breakout[0] = threshold[0];
  4488. cpi->segment_encode_breakout[1] = threshold[1];
  4489. cpi->segment_encode_breakout[2] = threshold[2];
  4490. cpi->segment_encode_breakout[3] = threshold[3];
  4491. /* Initialise the feature data structure */
  4492. set_segment_data(cpi, &feature_data[0][0], SEGMENT_DELTADATA);
  4493. if (threshold[0] != 0 || threshold[1] != 0 || threshold[2] != 0 ||
  4494. threshold[3] != 0)
  4495. cpi->use_roi_static_threshold = 1;
  4496. cpi->cyclic_refresh_mode_enabled = 0;
  4497. return 0;
  4498. }
  4499. int vp8_set_active_map(VP8_COMP *cpi, unsigned char *map, unsigned int rows,
  4500. unsigned int cols) {
  4501. if ((int)rows == cpi->common.mb_rows && (int)cols == cpi->common.mb_cols) {
  4502. if (map) {
  4503. memcpy(cpi->active_map, map, rows * cols);
  4504. cpi->active_map_enabled = 1;
  4505. } else {
  4506. cpi->active_map_enabled = 0;
  4507. }
  4508. return 0;
  4509. } else {
  4510. return -1;
  4511. }
  4512. }
  4513. int vp8_set_internal_size(VP8_COMP *cpi, VPX_SCALING horiz_mode,
  4514. VPX_SCALING vert_mode) {
  4515. if (horiz_mode <= ONETWO) {
  4516. cpi->common.horiz_scale = horiz_mode;
  4517. } else {
  4518. return -1;
  4519. }
  4520. if (vert_mode <= ONETWO) {
  4521. cpi->common.vert_scale = vert_mode;
  4522. } else {
  4523. return -1;
  4524. }
  4525. return 0;
  4526. }
  4527. int vp8_calc_ss_err(YV12_BUFFER_CONFIG *source, YV12_BUFFER_CONFIG *dest) {
  4528. int i, j;
  4529. int Total = 0;
  4530. unsigned char *src = source->y_buffer;
  4531. unsigned char *dst = dest->y_buffer;
  4532. /* Loop through the Y plane raw and reconstruction data summing
  4533. * (square differences)
  4534. */
  4535. for (i = 0; i < source->y_height; i += 16) {
  4536. for (j = 0; j < source->y_width; j += 16) {
  4537. unsigned int sse;
  4538. Total += vpx_mse16x16(src + j, source->y_stride, dst + j, dest->y_stride,
  4539. &sse);
  4540. }
  4541. src += 16 * source->y_stride;
  4542. dst += 16 * dest->y_stride;
  4543. }
  4544. return Total;
  4545. }
  4546. int vp8_get_quantizer(VP8_COMP *cpi) { return cpi->common.base_qindex; }