/* * Copyright 2011 The LibYuv Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "libyuv/row.h" #include // For memcpy and memset. #include "libyuv/basic_types.h" #ifdef __cplusplus namespace libyuv { extern "C" { #endif // llvm x86 is poor at ternary operator, so use branchless min/max. #define USE_BRANCHLESS 1 #if USE_BRANCHLESS static __inline int32 clamp0(int32 v) { return ((-(v) >> 31) & (v)); } static __inline int32 clamp255(int32 v) { return (((255 - (v)) >> 31) | (v)) & 255; } static __inline uint32 Clamp(int32 val) { int v = clamp0(val); return static_cast(clamp255(v)); } static __inline uint32 Abs(int32 v) { int m = v >> 31; return (v + m) ^ m; } #else // USE_BRANCHLESS static __inline int32 clamp0(int32 v) { return (v < 0) ? 0 : v; } static __inline int32 clamp255(int32 v) { return (v > 255) ? 255 : v; } static __inline uint32 Clamp(int32 val) { int v = clamp0(val); return static_cast(clamp255(v)); } static __inline uint32 Abs(int32 v) { return (v < 0) ? -v : v; } #endif // USE_BRANCHLESS // For generating reciprocal table. // for (int i = 0; i <= 4096; ++i) { // uint32 f = (i >= 1) ? (uint32)(4294967296.0f / (float)i + .5f) : 0; // printf("%uu, ", f); // if ((i % 6) == 5) { // printf("\n "); // } // } // Fixed point 0.32 reciprocal table. extern const uint32 kRecipTable[4097] = { 0u, 0xffffffffu, 2147483648u, 1431655808u, 1073741824u, 858993472u, 715827904u, 613566784u, 536870912u, 477218592u, 429496736u, 390451584u, 357913952u, 330382112u, 306783392u, 286331168u, 268435456u, 252645136u, 238609296u, 226050912u, 214748368u, 204522256u, 195225792u, 186737712u, 178956976u, 171798688u, 165191056u, 159072864u, 153391696u, 148102320u, 143165584u, 138547328u, 134217728u, 130150528u, 126322568u, 122713352u, 119304648u, 116080200u, 113025456u, 110127368u, 107374184u, 104755296u, 102261128u, 99882960u, 97612896u, 95443720u, 93368856u, 91382280u, 89478488u, 87652392u, 85899344u, 84215048u, 82595528u, 81037120u, 79536432u, 78090312u, 76695848u, 75350304u, 74051160u, 72796056u, 71582792u, 70409296u, 69273664u, 68174088u, 67108864u, 66076420u, 65075264u, 64103988u, 63161284u, 62245904u, 61356676u, 60492496u, 59652324u, 58835168u, 58040100u, 57266232u, 56512728u, 55778796u, 55063684u, 54366676u, 53687092u, 53024288u, 52377648u, 51746592u, 51130564u, 50529028u, 49941480u, 49367440u, 48806448u, 48258060u, 47721860u, 47197444u, 46684428u, 46182444u, 45691140u, 45210184u, 44739244u, 44278012u, 43826196u, 43383508u, 42949672u, 42524428u, 42107524u, 41698712u, 41297764u, 40904452u, 40518560u, 40139880u, 39768216u, 39403368u, 39045156u, 38693400u, 38347924u, 38008560u, 37675152u, 37347540u, 37025580u, 36709124u, 36398028u, 36092164u, 35791396u, 35495596u, 35204648u, 34918432u, 34636832u, 34359740u, 34087044u, 33818640u, 33554432u, 33294320u, 33038210u, 32786010u, 32537632u, 32292988u, 32051994u, 31814572u, 31580642u, 31350126u, 31122952u, 30899046u, 30678338u, 30460760u, 30246248u, 30034736u, 29826162u, 29620464u, 29417584u, 29217464u, 29020050u, 28825284u, 28633116u, 28443492u, 28256364u, 28071682u, 27889398u, 27709466u, 27531842u, 27356480u, 27183338u, 27012372u, 26843546u, 26676816u, 26512144u, 26349492u, 26188824u, 26030104u, 25873296u, 25718368u, 25565282u, 25414008u, 25264514u, 25116768u, 24970740u, 24826400u, 24683720u, 24542670u, 24403224u, 24265352u, 24129030u, 23994230u, 23860930u, 23729102u, 23598722u, 23469766u, 23342214u, 23216040u, 23091222u, 22967740u, 22845570u, 22724694u, 22605092u, 22486740u, 22369622u, 22253716u, 22139006u, 22025474u, 21913098u, 21801864u, 21691754u, 21582750u, 21474836u, 21367996u, 21262214u, 21157474u, 21053762u, 20951060u, 20849356u, 20748634u, 20648882u, 20550082u, 20452226u, 20355296u, 20259280u, 20164166u, 20069940u, 19976592u, 19884108u, 19792476u, 19701684u, 19611722u, 19522578u, 19434242u, 19346700u, 19259944u, 19173962u, 19088744u, 19004280u, 18920560u, 18837576u, 18755316u, 18673770u, 18592932u, 18512790u, 18433336u, 18354562u, 18276456u, 18199014u, 18122224u, 18046082u, 17970574u, 17895698u, 17821442u, 17747798u, 17674762u, 17602324u, 17530478u, 17459216u, 17388532u, 17318416u, 17248864u, 17179870u, 17111424u, 17043522u, 16976156u, 16909320u, 16843010u, 16777216u, 16711936u, 16647160u, 16582886u, 16519106u, 16455814u, 16393006u, 16330674u, 16268816u, 16207424u, 16146494u, 16086020u, 16025998u, 15966422u, 15907286u, 15848588u, 15790322u, 15732482u, 15675064u, 15618064u, 15561476u, 15505298u, 15449524u, 15394148u, 15339170u, 15284582u, 15230380u, 15176564u, 15123124u, 15070062u, 15017368u, 14965044u, 14913082u, 14861480u, 14810232u, 14759338u, 14708792u, 14658592u, 14608732u, 14559212u, 14510026u, 14461170u, 14412642u, 14364440u, 14316558u, 14268994u, 14221746u, 14174810u, 14128182u, 14081860u, 14035842u, 13990122u, 13944700u, 13899572u, 13854734u, 13810184u, 13765922u, 13721940u, 13678240u, 13634818u, 13591670u, 13548794u, 13506186u, 13463848u, 13421774u, 13379960u, 13338408u, 13297112u, 13256072u, 13215284u, 13174746u, 13134458u, 13094412u, 13054612u, 13015052u, 12975732u, 12936648u, 12897800u, 12859184u, 12820798u, 12782642u, 12744710u, 12707004u, 12669520u, 12632258u, 12595212u, 12558384u, 12521772u, 12485370u, 12449182u, 12413200u, 12377428u, 12341860u, 12306498u, 12271336u, 12236374u, 12201612u, 12167046u, 12132676u, 12098500u, 12064516u, 12030722u, 11997116u, 11963698u, 11930466u, 11897416u, 11864552u, 11831866u, 11799362u, 11767034u, 11734884u, 11702908u, 11671108u, 11639478u, 11608020u, 11576732u, 11545612u, 11514658u, 11483870u, 11453246u, 11422786u, 11392486u, 11362348u, 11332368u, 11302546u, 11272880u, 11243370u, 11214014u, 11184812u, 11155760u, 11126858u, 11098108u, 11069504u, 11041048u, 11012738u, 10984572u, 10956550u, 10928670u, 10900932u, 10873336u, 10845878u, 10818558u, 10791376u, 10764330u, 10737418u, 10710642u, 10683998u, 10657488u, 10631108u, 10604858u, 10578738u, 10552746u, 10526882u, 10501144u, 10475530u, 10450042u, 10424678u, 10399438u, 10374318u, 10349320u, 10324442u, 10299682u, 10275042u, 10250520u, 10226114u, 10201824u, 10177648u, 10153588u, 10129640u, 10105806u, 10082084u, 10058472u, 10034970u, 10011580u, 9988296u, 9965122u, 9942054u, 9919094u, 9896238u, 9873488u, 9850842u, 9828300u, 9805862u, 9783526u, 9761290u, 9739156u, 9717122u, 9695186u, 9673350u, 9651612u, 9629972u, 9608428u, 9586982u, 9565630u, 9544372u, 9523210u, 9502140u, 9481164u, 9460280u, 9439490u, 9418788u, 9398178u, 9377658u, 9357228u, 9336886u, 9316632u, 9296466u, 9276388u, 9256396u, 9236490u, 9216668u, 9196932u, 9177282u, 9157714u, 9138228u, 9118828u, 9099508u, 9080270u, 9061112u, 9042036u, 9023042u, 9004124u, 8985288u, 8966530u, 8947850u, 8929246u, 8910722u, 8892272u, 8873900u, 8855604u, 8837382u, 8819236u, 8801162u, 8783164u, 8765240u, 8747388u, 8729608u, 8711902u, 8694266u, 8676702u, 8659208u, 8641786u, 8624432u, 8607150u, 8589936u, 8572790u, 8555712u, 8538702u, 8521762u, 8504886u, 8488078u, 8471336u, 8454660u, 8438050u, 8421506u, 8405024u, 8388608u, 8372256u, 8355968u, 8339743u, 8323580u, 8307481u, 8291443u, 8275467u, 8259553u, 8243699u, 8227907u, 8212175u, 8196503u, 8180890u, 8165337u, 8149843u, 8134408u, 8119031u, 8103712u, 8088451u, 8073247u, 8058100u, 8043010u, 8027977u, 8012999u, 7998077u, 7983211u, 7968400u, 7953643u, 7938942u, 7924294u, 7909701u, 7895161u, 7880674u, 7866241u, 7851860u, 7837532u, 7823256u, 7809032u, 7794859u, 7780738u, 7766668u, 7752649u, 7738680u, 7724762u, 7710893u, 7697074u, 7683305u, 7669585u, 7655913u, 7642291u, 7628717u, 7615190u, 7601712u, 7588282u, 7574898u, 7561562u, 7548273u, 7535031u, 7521834u, 7508684u, 7495580u, 7482522u, 7469509u, 7456541u, 7443618u, 7430740u, 7417906u, 7405116u, 7392371u, 7379669u, 7367011u, 7354396u, 7341825u, 7329296u, 7316810u, 7304366u, 7291965u, 7279606u, 7267288u, 7255013u, 7242778u, 7230585u, 7218433u, 7206321u, 7194250u, 7182220u, 7170229u, 7158279u, 7146368u, 7134497u, 7122666u, 7110873u, 7099120u, 7087405u, 7075729u, 7064091u, 7052492u, 7040930u, 7029407u, 7017921u, 7006472u, 6995061u, 6983687u, 6972350u, 6961049u, 6949786u, 6938558u, 6927367u, 6916212u, 6905092u, 6894009u, 6882961u, 6871948u, 6860970u, 6850028u, 6839120u, 6828247u, 6817409u, 6806605u, 6795835u, 6785099u, 6774397u, 6763728u, 6753093u, 6742492u, 6731924u, 6721389u, 6710887u, 6700417u, 6689980u, 6679576u, 6669204u, 6658864u, 6648556u, 6638280u, 6628036u, 6617824u, 6607642u, 6597492u, 6587373u, 6577286u, 6567229u, 6557202u, 6547206u, 6537241u, 6527306u, 6517401u, 6507526u, 6497681u, 6487866u, 6478081u, 6468324u, 6458598u, 6448900u, 6439232u, 6429592u, 6419981u, 6410399u, 6400846u, 6391321u, 6381824u, 6372355u, 6362915u, 6353502u, 6344117u, 6334760u, 6325431u, 6316129u, 6306854u, 6297606u, 6288386u, 6279192u, 6270025u, 6260886u, 6251772u, 6242685u, 6233625u, 6224591u, 6215582u, 6206600u, 6197644u, 6188714u, 6179809u, 6170930u, 6162077u, 6153249u, 6144446u, 6135668u, 6126915u, 6118187u, 6109484u, 6100806u, 6092152u, 6083523u, 6074919u, 6066338u, 6057782u, 6049250u, 6040742u, 6032258u, 6023797u, 6015361u, 6006948u, 5998558u, 5990192u, 5981849u, 5973529u, 5965233u, 5956959u, 5948708u, 5940481u, 5932276u, 5924093u, 5915933u, 5907796u, 5899681u, 5891588u, 5883517u, 5875469u, 5867442u, 5859437u, 5851454u, 5843493u, 5835554u, 5827636u, 5819739u, 5811864u, 5804010u, 5796177u, 5788366u, 5780575u, 5772806u, 5765057u, 5757329u, 5749622u, 5741935u, 5734269u, 5726623u, 5718998u, 5711393u, 5703808u, 5696243u, 5688699u, 5681174u, 5673669u, 5666184u, 5658719u, 5651273u, 5643847u, 5636440u, 5629053u, 5621685u, 5614337u, 5607007u, 5599697u, 5592406u, 5585133u, 5577880u, 5570645u, 5563429u, 5556232u, 5549054u, 5541894u, 5534752u, 5527629u, 5520524u, 5513437u, 5506369u, 5499318u, 5492286u, 5485271u, 5478275u, 5471296u, 5464335u, 5457392u, 5450466u, 5443558u, 5436668u, 5429795u, 5422939u, 5416100u, 5409279u, 5402475u, 5395688u, 5388918u, 5382165u, 5375429u, 5368709u, 5362007u, 5355321u, 5348652u, 5341999u, 5335363u, 5328744u, 5322141u, 5315554u, 5308983u, 5302429u, 5295891u, 5289369u, 5282863u, 5276373u, 5269899u, 5263441u, 5256998u, 5250572u, 5244161u, 5237765u, 5231386u, 5225021u, 5218673u, 5212339u, 5206021u, 5199719u, 5193431u, 5187159u, 5180902u, 5174660u, 5168433u, 5162221u, 5156023u, 5149841u, 5143674u, 5137521u, 5131383u, 5125260u, 5119151u, 5113057u, 5106977u, 5100912u, 5094861u, 5088824u, 5082802u, 5076794u, 5070800u, 5064820u, 5058855u, 5052903u, 5046965u, 5041042u, 5035132u, 5029236u, 5023354u, 5017485u, 5011631u, 5005790u, 4999962u, 4994148u, 4988348u, 4982561u, 4976787u, 4971027u, 4965280u, 4959547u, 4953826u, 4948119u, 4942425u, 4936744u, 4931076u, 4925421u, 4919780u, 4914150u, 4908534u, 4902931u, 4897340u, 4891763u, 4886197u, 4880645u, 4875105u, 4869578u, 4864063u, 4858561u, 4853071u, 4847593u, 4842128u, 4836675u, 4831235u, 4825806u, 4820390u, 4814986u, 4809594u, 4804214u, 4798846u, 4793491u, 4788147u, 4782815u, 4777494u, 4772186u, 4766890u, 4761605u, 4756332u, 4751070u, 4745820u, 4740582u, 4735356u, 4730140u, 4724937u, 4719745u, 4714564u, 4709394u, 4704236u, 4699089u, 4693954u, 4688829u, 4683716u, 4678614u, 4673523u, 4668443u, 4663374u, 4658316u, 4653269u, 4648233u, 4643208u, 4638194u, 4633190u, 4628198u, 4623216u, 4618245u, 4613284u, 4608334u, 4603395u, 4598466u, 4593548u, 4588641u, 4583743u, 4578857u, 4573980u, 4569114u, 4564259u, 4559414u, 4554579u, 4549754u, 4544939u, 4540135u, 4535341u, 4530556u, 4525782u, 4521018u, 4516264u, 4511521u, 4506786u, 4502062u, 4497348u, 4492644u, 4487949u, 4483265u, 4478590u, 4473925u, 4469269u, 4464623u, 4459987u, 4455361u, 4450744u, 4446136u, 4441538u, 4436950u, 4432371u, 4427802u, 4423242u, 4418691u, 4414150u, 4409618u, 4405095u, 4400581u, 4396077u, 4391582u, 4387097u, 4382620u, 4378152u, 4373694u, 4369245u, 4364804u, 4360373u, 4355951u, 4351538u, 4347133u, 4342738u, 4338351u, 4333973u, 4329604u, 4325244u, 4320893u, 4316550u, 4312216u, 4307891u, 4303575u, 4299267u, 4294968u, 4290677u, 4286395u, 4282121u, 4277856u, 4273600u, 4269351u, 4265112u, 4260881u, 4256658u, 4252443u, 4248237u, 4244039u, 4239850u, 4235668u, 4231495u, 4227330u, 4223174u, 4219025u, 4214885u, 4210753u, 4206628u, 4202512u, 4198404u, 4194304u, 4190212u, 4186128u, 4182052u, 4177984u, 4173924u, 4169871u, 4165827u, 4161790u, 4157761u, 4153740u, 4149727u, 4145721u, 4141724u, 4137734u, 4133751u, 4129776u, 4125809u, 4121850u, 4117898u, 4113953u, 4110017u, 4106087u, 4102166u, 4098251u, 4094345u, 4090445u, 4086553u, 4082669u, 4078791u, 4074922u, 4071059u, 4067204u, 4063356u, 4059516u, 4055682u, 4051856u, 4048037u, 4044225u, 4040421u, 4036624u, 4032833u, 4029050u, 4025274u, 4021505u, 4017743u, 4013988u, 4010240u, 4006499u, 4002766u, 3999039u, 3995319u, 3991605u, 3987899u, 3984200u, 3980507u, 3976822u, 3973143u, 3969471u, 3965806u, 3962147u, 3958495u, 3954850u, 3951212u, 3947580u, 3943955u, 3940337u, 3936725u, 3933120u, 3929522u, 3925930u, 3922345u, 3918766u, 3915194u, 3911628u, 3908069u, 3904516u, 3900970u, 3897430u, 3893896u, 3890369u, 3886848u, 3883334u, 3879826u, 3876324u, 3872829u, 3869340u, 3865857u, 3862381u, 3858911u, 3855447u, 3851989u, 3848537u, 3845092u, 3841652u, 3838219u, 3834792u, 3831371u, 3827957u, 3824548u, 3821145u, 3817749u, 3814358u, 3810974u, 3807595u, 3804223u, 3800856u, 3797496u, 3794141u, 3790792u, 3787449u, 3784112u, 3780781u, 3777456u, 3774137u, 3770823u, 3767515u, 3764213u, 3760917u, 3757627u, 3754342u, 3751063u, 3747790u, 3744523u, 3741261u, 3738005u, 3734754u, 3731510u, 3728270u, 3725037u, 3721809u, 3718587u, 3715370u, 3712159u, 3708953u, 3705753u, 3702558u, 3699369u, 3696185u, 3693007u, 3689835u, 3686667u, 3683506u, 3680349u, 3677198u, 3674053u, 3670912u, 3667777u, 3664648u, 3661524u, 3658405u, 3655291u, 3652183u, 3649080u, 3645983u, 3642890u, 3639803u, 3636721u, 3633644u, 3630573u, 3627506u, 3624445u, 3621389u, 3618338u, 3615292u, 3612252u, 3609216u, 3606186u, 3603161u, 3600140u, 3597125u, 3594115u, 3591110u, 3588110u, 3585115u, 3582125u, 3579140u, 3576159u, 3573184u, 3570214u, 3567249u, 3564288u, 3561333u, 3558382u, 3555437u, 3552496u, 3549560u, 3546629u, 3543703u, 3540781u, 3537864u, 3534953u, 3532046u, 3529143u, 3526246u, 3523353u, 3520465u, 3517582u, 3514703u, 3511829u, 3508960u, 3506096u, 3503236u, 3500381u, 3497530u, 3494685u, 3491843u, 3489007u, 3486175u, 3483347u, 3480525u, 3477706u, 3474893u, 3472084u, 3469279u, 3466479u, 3463683u, 3460892u, 3458106u, 3455324u, 3452546u, 3449773u, 3447004u, 3444240u, 3441480u, 3438725u, 3435974u, 3433227u, 3430485u, 3427747u, 3425014u, 3422285u, 3419560u, 3416840u, 3414124u, 3411412u, 3408704u, 3406001u, 3403302u, 3400608u, 3397917u, 3395231u, 3392549u, 3389872u, 3387198u, 3384529u, 3381864u, 3379203u, 3376547u, 3373894u, 3371246u, 3368602u, 3365962u, 3363326u, 3360694u, 3358067u, 3355443u, 3352824u, 3350209u, 3347597u, 3344990u, 3342387u, 3339788u, 3337193u, 3334602u, 3332015u, 3329432u, 3326853u, 3324278u, 3321707u, 3319140u, 3316577u, 3314018u, 3311463u, 3308912u, 3306364u, 3303821u, 3301282u, 3298746u, 3296214u, 3293687u, 3291163u, 3288643u, 3286127u, 3283614u, 3281106u, 3278601u, 3276100u, 3273603u, 3271110u, 3268621u, 3266135u, 3263653u, 3261175u, 3258701u, 3256230u, 3253763u, 3251300u, 3248841u, 3246385u, 3243933u, 3241485u, 3239040u, 3236599u, 3234162u, 3231729u, 3229299u, 3226873u, 3224450u, 3222031u, 3219616u, 3217204u, 3214796u, 3212392u, 3209991u, 3207593u, 3205200u, 3202809u, 3200423u, 3198040u, 3195660u, 3193284u, 3190912u, 3188543u, 3186178u, 3183816u, 3181457u, 3179102u, 3176751u, 3174403u, 3172059u, 3169718u, 3167380u, 3165046u, 3162715u, 3160388u, 3158064u, 3155744u, 3153427u, 3151113u, 3148803u, 3146496u, 3144193u, 3141893u, 3139596u, 3137303u, 3135013u, 3132726u, 3130443u, 3128163u, 3125886u, 3123613u, 3121343u, 3119076u, 3116812u, 3114552u, 3112295u, 3110042u, 3107791u, 3105544u, 3103300u, 3101060u, 3098822u, 3096588u, 3094357u, 3092129u, 3089905u, 3087683u, 3085465u, 3083250u, 3081038u, 3078830u, 3076624u, 3074422u, 3072223u, 3070027u, 3067834u, 3065644u, 3063458u, 3061274u, 3059094u, 3056916u, 3054742u, 3052571u, 3050403u, 3048238u, 3046076u, 3043917u, 3041762u, 3039609u, 3037459u, 3035313u, 3033169u, 3031029u, 3028891u, 3026756u, 3024625u, 3022496u, 3020371u, 3018248u, 3016129u, 3014012u, 3011899u, 3009788u, 3007680u, 3005576u, 3003474u, 3001375u, 2999279u, 2997186u, 2995096u, 2993009u, 2990924u, 2988843u, 2986765u, 2984689u, 2982616u, 2980546u, 2978480u, 2976415u, 2974354u, 2972296u, 2970240u, 2968188u, 2966138u, 2964091u, 2962047u, 2960005u, 2957967u, 2955931u, 2953898u, 2951868u, 2949840u, 2947816u, 2945794u, 2943775u, 2941759u, 2939745u, 2937734u, 2935726u, 2933721u, 2931718u, 2929719u, 2927722u, 2925727u, 2923736u, 2921747u, 2919760u, 2917777u, 2915796u, 2913818u, 2911842u, 2909870u, 2907899u, 2905932u, 2903967u, 2902005u, 2900046u, 2898089u, 2896135u, 2894183u, 2892234u, 2890288u, 2888344u, 2886403u, 2884464u, 2882529u, 2880595u, 2878665u, 2876736u, 2874811u, 2872888u, 2870968u, 2869050u, 2867135u, 2865222u, 2863312u, 2861404u, 2859499u, 2857596u, 2855696u, 2853799u, 2851904u, 2850012u, 2848122u, 2846234u, 2844349u, 2842467u, 2840587u, 2838710u, 2836835u, 2834962u, 2833092u, 2831224u, 2829359u, 2827497u, 2825637u, 2823779u, 2821923u, 2820071u, 2818220u, 2816372u, 2814527u, 2812683u, 2810843u, 2809004u, 2807168u, 2805335u, 2803504u, 2801675u, 2799848u, 2798024u, 2796203u, 2794384u, 2792567u, 2790752u, 2788940u, 2787130u, 2785323u, 2783517u, 2781715u, 2779914u, 2778116u, 2776320u, 2774527u, 2772736u, 2770947u, 2769160u, 2767376u, 2765594u, 2763814u, 2762037u, 2760262u, 2758489u, 2756719u, 2754950u, 2753184u, 2751421u, 2749659u, 2747900u, 2746143u, 2744388u, 2742636u, 2740885u, 2739137u, 2737392u, 2735648u, 2733907u, 2732168u, 2730431u, 2728696u, 2726963u, 2725233u, 2723505u, 2721779u, 2720055u, 2718334u, 2716614u, 2714897u, 2713182u, 2711469u, 2709759u, 2708050u, 2706344u, 2704639u, 2702937u, 2701237u, 2699540u, 2697844u, 2696150u, 2694459u, 2692770u, 2691082u, 2689397u, 2687714u, 2686033u, 2684355u, 2682678u, 2681003u, 2679331u, 2677661u, 2675992u, 2674326u, 2672662u, 2671000u, 2669340u, 2667682u, 2666026u, 2664372u, 2662720u, 2661070u, 2659423u, 2657777u, 2656133u, 2654492u, 2652852u, 2651215u, 2649579u, 2647945u, 2646314u, 2644684u, 2643057u, 2641431u, 2639808u, 2638186u, 2636567u, 2634949u, 2633334u, 2631720u, 2630109u, 2628499u, 2626891u, 2625286u, 2623682u, 2622080u, 2620480u, 2618883u, 2617287u, 2615693u, 2614101u, 2612511u, 2610922u, 2609336u, 2607752u, 2606170u, 2604589u, 2603011u, 2601434u, 2599859u, 2598286u, 2596716u, 2595147u, 2593579u, 2592014u, 2590451u, 2588889u, 2587330u, 2585772u, 2584216u, 2582662u, 2581110u, 2579560u, 2578012u, 2576465u, 2574921u, 2573378u, 2571837u, 2570298u, 2568760u, 2567225u, 2565691u, 2564160u, 2562630u, 2561102u, 2559575u, 2558051u, 2556528u, 2555007u, 2553488u, 2551971u, 2550456u, 2548942u, 2547430u, 2545920u, 2544412u, 2542906u, 2541401u, 2539898u, 2538397u, 2536898u, 2535400u, 2533904u, 2532410u, 2530918u, 2529427u, 2527938u, 2526451u, 2524966u, 2523483u, 2522001u, 2520521u, 2519043u, 2517566u, 2516091u, 2514618u, 2513147u, 2511677u, 2510209u, 2508743u, 2507278u, 2505815u, 2504354u, 2502895u, 2501437u, 2499981u, 2498527u, 2497074u, 2495623u, 2494174u, 2492726u, 2491280u, 2489836u, 2488394u, 2486953u, 2485514u, 2484076u, 2482640u, 2481206u, 2479773u, 2478342u, 2476913u, 2475486u, 2474060u, 2472635u, 2471213u, 2469792u, 2468372u, 2466954u, 2465538u, 2464124u, 2462711u, 2461299u, 2459890u, 2458482u, 2457075u, 2455670u, 2454267u, 2452866u, 2451465u, 2450067u, 2448670u, 2447275u, 2445881u, 2444489u, 2443099u, 2441710u, 2440322u, 2438937u, 2437553u, 2436170u, 2434789u, 2433409u, 2432031u, 2430655u, 2429280u, 2427907u, 2426535u, 2425165u, 2423797u, 2422430u, 2421064u, 2419700u, 2418338u, 2416977u, 2415617u, 2414259u, 2412903u, 2411548u, 2410195u, 2408843u, 2407493u, 2406144u, 2404797u, 2403451u, 2402107u, 2400764u, 2399423u, 2398083u, 2396745u, 2395409u, 2394073u, 2392740u, 2391407u, 2390077u, 2388747u, 2387419u, 2386093u, 2384768u, 2383445u, 2382123u, 2380802u, 2379483u, 2378166u, 2376850u, 2375535u, 2374222u, 2372910u, 2371600u, 2370291u, 2368984u, 2367678u, 2366373u, 2365070u, 2363769u, 2362468u, 2361170u, 2359872u, 2358576u, 2357282u, 2355989u, 2354697u, 2353407u, 2352118u, 2350831u, 2349545u, 2348260u, 2346977u, 2345695u, 2344415u, 2343136u, 2341858u, 2340582u, 2339307u, 2338033u, 2336761u, 2335491u, 2334221u, 2332954u, 2331687u, 2330422u, 2329158u, 2327896u, 2326635u, 2325375u, 2324117u, 2322860u, 2321604u, 2320350u, 2319097u, 2317845u, 2316595u, 2315346u, 2314099u, 2312853u, 2311608u, 2310364u, 2309122u, 2307882u, 2306642u, 2305404u, 2304167u, 2302932u, 2301697u, 2300465u, 2299233u, 2298003u, 2296774u, 2295547u, 2294320u, 2293095u, 2291872u, 2290649u, 2289428u, 2288209u, 2286990u, 2285773u, 2284557u, 2283343u, 2282129u, 2280917u, 2279707u, 2278497u, 2277289u, 2276082u, 2274877u, 2273673u, 2272470u, 2271268u, 2270067u, 2268868u, 2267670u, 2266474u, 2265278u, 2264084u, 2262891u, 2261700u, 2260509u, 2259320u, 2258132u, 2256946u, 2255760u, 2254576u, 2253393u, 2252212u, 2251031u, 2249852u, 2248674u, 2247497u, 2246322u, 2245148u, 2243975u, 2242803u, 2241632u, 2240463u, 2239295u, 2238128u, 2236962u, 2235798u, 2234635u, 2233472u, 2232312u, 2231152u, 2229994u, 2228836u, 2227680u, 2226525u, 2225372u, 2224219u, 2223068u, 2221918u, 2220769u, 2219621u, 2218475u, 2217330u, 2216186u, 2215043u, 2213901u, 2212760u, 2211621u, 2210483u, 2209345u, 2208210u, 2207075u, 2205941u, 2204809u, 2203678u, 2202547u, 2201419u, 2200291u, 2199164u, 2198039u, 2196914u, 2195791u, 2194669u, 2193548u, 2192429u, 2191310u, 2190193u, 2189076u, 2187961u, 2186847u, 2185734u, 2184622u, 2183512u, 2182402u, 2181294u, 2180187u, 2179080u, 2177975u, 2176872u, 2175769u, 2174667u, 2173567u, 2172467u, 2171369u, 2170272u, 2169176u, 2168081u, 2166987u, 2165894u, 2164802u, 2163712u, 2162622u, 2161534u, 2160446u, 2159360u, 2158275u, 2157191u, 2156108u, 2155026u, 2153946u, 2152866u, 2151787u, 2150710u, 2149633u, 2148558u, 2147484u, 2146411u, 2145338u, 2144267u, 2143197u, 2142128u, 2141061u, 2139994u, 2138928u, 2137863u, 2136800u, 2135737u, 2134676u, 2133615u, 2132556u, 2131498u, 2130440u, 2129384u, 2128329u, 2127275u, 2126222u, 2125169u, 2124118u, 2123068u, 2122020u, 2120972u, 2119925u, 2118879u, 2117834u, 2116790u, 2115748u, 2114706u, 2113665u, 2112625u, 2111587u, 2110549u, 2109513u, 2108477u, 2107442u, 2106409u, 2105376u, 2104345u, 2103314u, 2102285u, 2101256u, 2100229u, 2099202u, 2098177u, 2097152u, 2096129u, 2095106u, 2094085u, 2093064u, 2092045u, 2091026u, 2090008u, 2088992u, 2087976u, 2086962u, 2085948u, 2084936u, 2083924u, 2082913u, 2081904u, 2080895u, 2079887u, 2078881u, 2077875u, 2076870u, 2075866u, 2074863u, 2073862u, 2072861u, 2071861u, 2070862u, 2069864u, 2068867u, 2067871u, 2066876u, 2065881u, 2064888u, 2063896u, 2062905u, 2061914u, 2060925u, 2059936u, 2058949u, 2057962u, 2056977u, 2055992u, 2055008u, 2054026u, 2053044u, 2052063u, 2051083u, 2050104u, 2049126u, 2048149u, 2047172u, 2046197u, 2045223u, 2044249u, 2043277u, 2042305u, 2041334u, 2040365u, 2039396u, 2038428u, 2037461u, 2036495u, 2035530u, 2034565u, 2033602u, 2032640u, 2031678u, 2030717u, 2029758u, 2028799u, 2027841u, 2026884u, 2025928u, 2024973u, 2024019u, 2023065u, 2022113u, 2021161u, 2020210u, 2019261u, 2018312u, 2017364u, 2016417u, 2015470u, 2014525u, 2013581u, 2012637u, 2011694u, 2010753u, 2009812u, 2008872u, 2007932u, 2006994u, 2006057u, 2005120u, 2004185u, 2003250u, 2002316u, 2001383u, 2000451u, 1999519u, 1998589u, 1997659u, 1996731u, 1995803u, 1994876u, 1993950u, 1993024u, 1992100u, 1991176u, 1990254u, 1989332u, 1988411u, 1987491u, 1986571u, 1985653u, 1984735u, 1983819u, 1982903u, 1981988u, 1981074u, 1980160u, 1979248u, 1978336u, 1977425u, 1976515u, 1975606u, 1974698u, 1973790u, 1972884u, 1971978u, 1971073u, 1970169u, 1969265u, 1968363u, 1967461u, 1966560u, 1965660u, 1964761u, 1963863u, 1962965u, 1962068u, 1961172u, 1960277u, 1959383u, 1958489u, 1957597u, 1956705u, 1955814u, 1954924u, 1954034u, 1953146u, 1952258u, 1951371u, 1950485u, 1949599u, 1948715u, 1947831u, 1946948u, 1946066u, 1945185u, 1944304u, 1943424u, 1942545u, 1941667u, 1940790u, 1939913u, 1939037u, 1938162u, 1937288u, 1936415u, 1935542u, 1934670u, 1933799u, 1932929u, 1932059u, 1931190u, 1930322u, 1929455u, 1928589u, 1927723u, 1926858u, 1925994u, 1925131u, 1924269u, 1923407u, 1922546u, 1921686u, 1920826u, 1919968u, 1919110u, 1918253u, 1917396u, 1916541u, 1915686u, 1914832u, 1913978u, 1913126u, 1912274u, 1911423u, 1910573u, 1909723u, 1908874u, 1908026u, 1907179u, 1906333u, 1905487u, 1904642u, 1903798u, 1902954u, 1902111u, 1901269u, 1900428u, 1899588u, 1898748u, 1897909u, 1897070u, 1896233u, 1895396u, 1894560u, 1893725u, 1892890u, 1892056u, 1891223u, 1890391u, 1889559u, 1888728u, 1887898u, 1887068u, 1886240u, 1885412u, 1884584u, 1883758u, 1882932u, 1882107u, 1881282u, 1880459u, 1879636u, 1878813u, 1877992u, 1877171u, 1876351u, 1875532u, 1874713u, 1873895u, 1873078u, 1872261u, 1871446u, 1870630u, 1869816u, 1869002u, 1868189u, 1867377u, 1866566u, 1865755u, 1864945u, 1864135u, 1863326u, 1862518u, 1861711u, 1860904u, 1860099u, 1859293u, 1858489u, 1857685u, 1856882u, 1856079u, 1855278u, 1854476u, 1853676u, 1852876u, 1852077u, 1851279u, 1850481u, 1849685u, 1848888u, 1848093u, 1847298u, 1846504u, 1845710u, 1844917u, 1844125u, 1843334u, 1842543u, 1841753u, 1840963u, 1840175u, 1839386u, 1838599u, 1837812u, 1837026u, 1836241u, 1835456u, 1834672u, 1833889u, 1833106u, 1832324u, 1831543u, 1830762u, 1829982u, 1829202u, 1828424u, 1827646u, 1826868u, 1826092u, 1825316u, 1824540u, 1823765u, 1822991u, 1822218u, 1821445u, 1820673u, 1819901u, 1819131u, 1818360u, 1817591u, 1816822u, 1816054u, 1815286u, 1814519u, 1813753u, 1812988u, 1812223u, 1811458u, 1810695u, 1809931u, 1809169u, 1808407u, 1807646u, 1806886u, 1806126u, 1805367u, 1804608u, 1803850u, 1803093u, 1802336u, 1801580u, 1800825u, 1800070u, 1799316u, 1798563u, 1797810u, 1797058u, 1796306u, 1795555u, 1794805u, 1794055u, 1793306u, 1792557u, 1791810u, 1791062u, 1790316u, 1789570u, 1788824u, 1788080u, 1787336u, 1786592u, 1785849u, 1785107u, 1784365u, 1783624u, 1782884u, 1782144u, 1781405u, 1780666u, 1779928u, 1779191u, 1778454u, 1777718u, 1776983u, 1776248u, 1775514u, 1774780u, 1774047u, 1773314u, 1772583u, 1771851u, 1771121u, 1770391u, 1769661u, 1768932u, 1768204u, 1767476u, 1766749u, 1766023u, 1765297u, 1764572u, 1763847u, 1763123u, 1762399u, 1761677u, 1760954u, 1760233u, 1759511u, 1758791u, 1758071u, 1757352u, 1756633u, 1755915u, 1755197u, 1754480u, 1753764u, 1753048u, 1752333u, 1751618u, 1750904u, 1750190u, 1749478u, 1748765u, 1748054u, 1747342u, 1746632u, 1745922u, 1745212u, 1744503u, 1743795u, 1743087u, 1742380u, 1741674u, 1740968u, 1740262u, 1739557u, 1738853u, 1738150u, 1737446u, 1736744u, 1736042u, 1735340u, 1734640u, 1733939u, 1733239u, 1732540u, 1731842u, 1731144u, 1730446u, 1729749u, 1729053u, 1728357u, 1727662u, 1726967u, 1726273u, 1725580u, 1724887u, 1724194u, 1723502u, 1722811u, 1722120u, 1721430u, 1720740u, 1720051u, 1719362u, 1718674u, 1717987u, 1717300u, 1716614u, 1715928u, 1715243u, 1714558u, 1713874u, 1713190u, 1712507u, 1711824u, 1711142u, 1710461u, 1709780u, 1709100u, 1708420u, 1707741u, 1707062u, 1706384u, 1705706u, 1705029u, 1704352u, 1703676u, 1703001u, 1702326u, 1701651u, 1700977u, 1700304u, 1699631u, 1698959u, 1698287u, 1697616u, 1696945u, 1696275u, 1695605u, 1694936u, 1694267u, 1693599u, 1692932u, 1692265u, 1691598u, 1690932u, 1690267u, 1689602u, 1688937u, 1688273u, 1687610u, 1686947u, 1686285u, 1685623u, 1684962u, 1684301u, 1683641u, 1682981u, 1682322u, 1681663u, 1681005u, 1680347u, 1679690u, 1679033u, 1678377u, 1677722u, 1677067u, 1676412u, 1675758u, 1675104u, 1674451u, 1673799u, 1673147u, 1672495u, 1671844u, 1671194u, 1670544u, 1669894u, 1669245u, 1668597u, 1667949u, 1667301u, 1666654u, 1666008u, 1665362u, 1664716u, 1664071u, 1663427u, 1662783u, 1662139u, 1661496u, 1660854u, 1660212u, 1659570u, 1658929u, 1658289u, 1657649u, 1657009u, 1656370u, 1655731u, 1655093u, 1654456u, 1653819u, 1653182u, 1652546u, 1651911u, 1651275u, 1650641u, 1650007u, 1649373u, 1648740u, 1648107u, 1647475u, 1646843u, 1646212u, 1645581u, 1644951u, 1644321u, 1643692u, 1643063u, 1642435u, 1641807u, 1641180u, 1640553u, 1639926u, 1639301u, 1638675u, 1638050u, 1637426u, 1636802u, 1636178u, 1635555u, 1634932u, 1634310u, 1633689u, 1633067u, 1632447u, 1631827u, 1631207u, 1630587u, 1629969u, 1629350u, 1628732u, 1628115u, 1627498u, 1626882u, 1626266u, 1625650u, 1625035u, 1624420u, 1623806u, 1623193u, 1622579u, 1621967u, 1621354u, 1620742u, 1620131u, 1619520u, 1618910u, 1618300u, 1617690u, 1617081u, 1616473u, 1615864u, 1615257u, 1614649u, 1614043u, 1613436u, 1612830u, 1612225u, 1611620u, 1611016u, 1610411u, 1609808u, 1609205u, 1608602u, 1608000u, 1607398u, 1606797u, 1606196u, 1605595u, 1604995u, 1604396u, 1603797u, 1603198u, 1602600u, 1602002u, 1601405u, 1600808u, 1600211u, 1599615u, 1599020u, 1598425u, 1597830u, 1597236u, 1596642u, 1596049u, 1595456u, 1594864u, 1594272u, 1593680u, 1593089u, 1592498u, 1591908u, 1591318u, 1590729u, 1590140u, 1589551u, 1588963u, 1588376u, 1587788u, 1587202u, 1586615u, 1586029u, 1585444u, 1584859u, 1584274u, 1583690u, 1583106u, 1582523u, 1581940u, 1581358u, 1580776u, 1580194u, 1579613u, 1579032u, 1578452u, 1577872u, 1577292u, 1576713u, 1576135u, 1575557u, 1574979u, 1574402u, 1573825u, 1573248u, 1572672u, 1572096u, 1571521u, 1570946u, 1570372u, 1569798u, 1569225u, 1568651u, 1568079u, 1567506u, 1566935u, 1566363u, 1565792u, 1565221u, 1564651u, 1564081u, 1563512u, 1562943u, 1562374u, 1561806u, 1561239u, 1560671u, 1560104u, 1559538u, 1558972u, 1558406u, 1557841u, 1557276u, 1556712u, 1556148u, 1555584u, 1555021u, 1554458u, 1553896u, 1553334u, 1552772u, 1552211u, 1551650u, 1551090u, 1550530u, 1549970u, 1549411u, 1548852u, 1548294u, 1547736u, 1547178u, 1546621u, 1546065u, 1545508u, 1544952u, 1544397u, 1543842u, 1543287u, 1542733u, 1542179u, 1541625u, 1541072u, 1540519u, 1539967u, 1539415u, 1538863u, 1538312u, 1537761u, 1537211u, 1536661u, 1536111u, 1535562u, 1535013u, 1534465u, 1533917u, 1533369u, 1532822u, 1532275u, 1531729u, 1531183u, 1530637u, 1530092u, 1529547u, 1529002u, 1528458u, 1527914u, 1527371u, 1526828u, 1526286u, 1525743u, 1525202u, 1524660u, 1524119u, 1523578u, 1523038u, 1522498u, 1521959u, 1521420u, 1520881u, 1520342u, 1519804u, 1519267u, 1518730u, 1518193u, 1517656u, 1517120u, 1516585u, 1516049u, 1515514u, 1514980u, 1514446u, 1513912u, 1513378u, 1512845u, 1512312u, 1511780u, 1511248u, 1510717u, 1510185u, 1509655u, 1509124u, 1508594u, 1508064u, 1507535u, 1507006u, 1506478u, 1505949u, 1505421u, 1504894u, 1504367u, 1503840u, 1503314u, 1502788u, 1502262u, 1501737u, 1501212u, 1500687u, 1500163u, 1499639u, 1499116u, 1498593u, 1498070u, 1497548u, 1497026u, 1496504u, 1495983u, 1495462u, 1494942u, 1494422u, 1493902u, 1493382u, 1492863u, 1492345u, 1491826u, 1491308u, 1490791u, 1490273u, 1489756u, 1489240u, 1488724u, 1488208u, 1487692u, 1487177u, 1486662u, 1486148u, 1485634u, 1485120u, 1484607u, 1484094u, 1483581u, 1483069u, 1482557u, 1482045u, 1481534u, 1481023u, 1480513u, 1480003u, 1479493u, 1478983u, 1478474u, 1477965u, 1477457u, 1476949u, 1476441u, 1475934u, 1475427u, 1474920u, 1474414u, 1473908u, 1473402u, 1472897u, 1472392u, 1471887u, 1471383u, 1470879u, 1470376u, 1469873u, 1469370u, 1468867u, 1468365u, 1467863u, 1467362u, 1466860u, 1466360u, 1465859u, 1465359u, 1464859u, 1464360u, 1463861u, 1463362u, 1462864u, 1462366u, 1461868u, 1461370u, 1460873u, 1460377u, 1459880u, 1459384u, 1458888u, 1458393u, 1457898u, 1457403u, 1456909u, 1456415u, 1455921u, 1455428u, 1454935u, 1454442u, 1453950u, 1453458u, 1452966u, 1452475u, 1451984u, 1451493u, 1451003u, 1450512u, 1450023u, 1449533u, 1449044u, 1448556u, 1448067u, 1447579u, 1447091u, 1446604u, 1446117u, 1445630u, 1445144u, 1444658u, 1444172u, 1443687u, 1443201u, 1442717u, 1442232u, 1441748u, 1441264u, 1440781u, 1440298u, 1439815u, 1439332u, 1438850u, 1438368u, 1437887u, 1437405u, 1436925u, 1436444u, 1435964u, 1435484u, 1435004u, 1434525u, 1434046u, 1433567u, 1433089u, 1432611u, 1432133u, 1431656u, 1431179u, 1430702u, 1430226u, 1429749u, 1429274u, 1428798u, 1428323u, 1427848u, 1427374u, 1426899u, 1426426u, 1425952u, 1425479u, 1425006u, 1424533u, 1424061u, 1423589u, 1423117u, 1422646u, 1422175u, 1421704u, 1421233u, 1420763u, 1420293u, 1419824u, 1419355u, 1418886u, 1418417u, 1417949u, 1417481u, 1417013u, 1416546u, 1416079u, 1415612u, 1415146u, 1414680u, 1414214u, 1413748u, 1413283u, 1412818u, 1412354u, 1411889u, 1411425u, 1410962u, 1410498u, 1410035u, 1409573u, 1409110u, 1408648u, 1408186u, 1407725u, 1407263u, 1406802u, 1406342u, 1405881u, 1405421u, 1404962u, 1404502u, 1404043u, 1403584u, 1403126u, 1402667u, 1402209u, 1401752u, 1401294u, 1400837u, 1400381u, 1399924u, 1399468u, 1399012u, 1398557u, 1398101u, 1397646u, 1397192u, 1396737u, 1396283u, 1395830u, 1395376u, 1394923u, 1394470u, 1394017u, 1393565u, 1393113u, 1392661u, 1392210u, 1391759u, 1391308u, 1390857u, 1390407u, 1389957u, 1389507u, 1389058u, 1388609u, 1388160u, 1387712u, 1387263u, 1386815u, 1386368u, 1385920u, 1385473u, 1385027u, 1384580u, 1384134u, 1383688u, 1383242u, 1382797u, 1382352u, 1381907u, 1381463u, 1381018u, 1380575u, 1380131u, 1379688u, 1379245u, 1378802u, 1378359u, 1377917u, 1377475u, 1377034u, 1376592u, 1376151u, 1375710u, 1375270u, 1374830u, 1374390u, 1373950u, 1373511u, 1373071u, 1372633u, 1372194u, 1371756u, 1371318u, 1370880u, 1370443u, 1370006u, 1369569u, 1369132u, 1368696u, 1368260u, 1367824u, 1367389u, 1366953u, 1366518u, 1366084u, 1365649u, 1365215u, 1364782u, 1364348u, 1363915u, 1363482u, 1363049u, 1362617u, 1362184u, 1361753u, 1361321u, 1360890u, 1360459u, 1360028u, 1359597u, 1359167u, 1358737u, 1358307u, 1357878u, 1357449u, 1357020u, 1356591u, 1356163u, 1355735u, 1355307u, 1354879u, 1354452u, 1354025u, 1353598u, 1353172u, 1352746u, 1352320u, 1351894u, 1351469u, 1351044u, 1350619u, 1350194u, 1349770u, 1349346u, 1348922u, 1348498u, 1348075u, 1347652u, 1347229u, 1346807u, 1346385u, 1345963u, 1345541u, 1345120u, 1344699u, 1344278u, 1343857u, 1343437u, 1343017u, 1342597u, 1342177u, 1341758u, 1341339u, 1340920u, 1340502u, 1340083u, 1339665u, 1339248u, 1338830u, 1338413u, 1337996u, 1337579u, 1337163u, 1336747u, 1336331u, 1335915u, 1335500u, 1335085u, 1334670u, 1334255u, 1333841u, 1333427u, 1333013u, 1332599u, 1332186u, 1331773u, 1331360u, 1330947u, 1330535u, 1330123u, 1329711u, 1329300u, 1328888u, 1328477u, 1328067u, 1327656u, 1327246u, 1326836u, 1326426u, 1326017u, 1325607u, 1325198u, 1324789u, 1324381u, 1323973u, 1323565u, 1323157u, 1322749u, 1322342u, 1321935u, 1321528u, 1321122u, 1320716u, 1320310u, 1319904u, 1319498u, 1319093u, 1318688u, 1318283u, 1317879u, 1317475u, 1317071u, 1316667u, 1316263u, 1315860u, 1315457u, 1315054u, 1314652u, 1314250u, 1313848u, 1313446u, 1313044u, 1312643u, 1312242u, 1311841u, 1311440u, 1311040u, 1310640u, 1310240u, 1309841u, 1309441u, 1309042u, 1308643u, 1308245u, 1307846u, 1307448u, 1307050u, 1306653u, 1306255u, 1305858u, 1305461u, 1305065u, 1304668u, 1304272u, 1303876u, 1303480u, 1303085u, 1302690u, 1302295u, 1301900u, 1301505u, 1301111u, 1300717u, 1300323u, 1299930u, 1299536u, 1299143u, 1298750u, 1298358u, 1297965u, 1297573u, 1297181u, 1296790u, 1296398u, 1296007u, 1295616u, 1295225u, 1294835u, 1294445u, 1294055u, 1293665u, 1293275u, 1292886u, 1292497u, 1292108u, 1291720u, 1291331u, 1290943u, 1290555u, 1290167u, 1289780u, 1289393u, 1289006u, 1288619u, 1288233u, 1287846u, 1287460u, 1287074u, 1286689u, 1286304u, 1285918u, 1285534u, 1285149u, 1284764u, 1284380u, 1283996u, 1283613u, 1283229u, 1282846u, 1282463u, 1282080u, 1281697u, 1281315u, 1280933u, 1280551u, 1280169u, 1279788u, 1279406u, 1279025u, 1278645u, 1278264u, 1277884u, 1277504u, 1277124u, 1276744u, 1276365u, 1275986u, 1275607u, 1275228u, 1274849u, 1274471u, 1274093u, 1273715u, 1273338u, 1272960u, 1272583u, 1272206u, 1271829u, 1271453u, 1271077u, 1270700u, 1270325u, 1269949u, 1269574u, 1269198u, 1268823u, 1268449u, 1268074u, 1267700u, 1267326u, 1266952u, 1266578u, 1266205u, 1265832u, 1265459u, 1265086u, 1264714u, 1264341u, 1263969u, 1263597u, 1263226u, 1262854u, 1262483u, 1262112u, 1261741u, 1261371u, 1261000u, 1260630u, 1260260u, 1259891u, 1259521u, 1259152u, 1258783u, 1258414u, 1258046u, 1257677u, 1257309u, 1256941u, 1256573u, 1256206u, 1255838u, 1255471u, 1255104u, 1254738u, 1254371u, 1254005u, 1253639u, 1253273u, 1252908u, 1252542u, 1252177u, 1251812u, 1251447u, 1251083u, 1250719u, 1250354u, 1249991u, 1249627u, 1249263u, 1248900u, 1248537u, 1248174u, 1247812u, 1247449u, 1247087u, 1246725u, 1246363u, 1246002u, 1245640u, 1245279u, 1244918u, 1244557u, 1244197u, 1243837u, 1243476u, 1243117u, 1242757u, 1242397u, 1242038u, 1241679u, 1241320u, 1240961u, 1240603u, 1240245u, 1239887u, 1239529u, 1239171u, 1238814u, 1238457u, 1238100u, 1237743u, 1237386u, 1237030u, 1236674u, 1236318u, 1235962u, 1235606u, 1235251u, 1234896u, 1234541u, 1234186u, 1233832u, 1233477u, 1233123u, 1232769u, 1232415u, 1232062u, 1231708u, 1231355u, 1231002u, 1230650u, 1230297u, 1229945u, 1229593u, 1229241u, 1228889u, 1228538u, 1228186u, 1227835u, 1227484u, 1227134u, 1226783u, 1226433u, 1226083u, 1225733u, 1225383u, 1225034u, 1224684u, 1224335u, 1223986u, 1223637u, 1223289u, 1222941u, 1222593u, 1222245u, 1221897u, 1221549u, 1221202u, 1220855u, 1220508u, 1220161u, 1219815u, 1219468u, 1219122u, 1218776u, 1218431u, 1218085u, 1217740u, 1217394u, 1217049u, 1216705u, 1216360u, 1216016u, 1215672u, 1215328u, 1214984u, 1214640u, 1214297u, 1213954u, 1213610u, 1213268u, 1212925u, 1212583u, 1212240u, 1211898u, 1211556u, 1211215u, 1210873u, 1210532u, 1210191u, 1209850u, 1209509u, 1209169u, 1208828u, 1208488u, 1208148u, 1207809u, 1207469u, 1207130u, 1206791u, 1206452u, 1206113u, 1205774u, 1205436u, 1205098u, 1204759u, 1204422u, 1204084u, 1203747u, 1203409u, 1203072u, 1202735u, 1202399u, 1202062u, 1201726u, 1201390u, 1201054u, 1200718u, 1200382u, 1200047u, 1199712u, 1199377u, 1199042u, 1198707u, 1198373u, 1198038u, 1197704u, 1197370u, 1197037u, 1196703u, 1196370u, 1196037u, 1195704u, 1195371u, 1195038u, 1194706u, 1194374u, 1194042u, 1193710u, 1193378u, 1193047u, 1192715u, 1192384u, 1192053u, 1191722u, 1191392u, 1191061u, 1190731u, 1190401u, 1190071u, 1189742u, 1189412u, 1189083u, 1188754u, 1188425u, 1188096u, 1187768u, 1187439u, 1187111u, 1186783u, 1186455u, 1186127u, 1185800u, 1185473u, 1185146u, 1184819u, 1184492u, 1184165u, 1183839u, 1183513u, 1183187u, 1182861u, 1182535u, 1182210u, 1181884u, 1181559u, 1181234u, 1180909u, 1180585u, 1180260u, 1179936u, 1179612u, 1179288u, 1178964u, 1178641u, 1178318u, 1177994u, 1177671u, 1177349u, 1177026u, 1176703u, 1176381u, 1176059u, 1175737u, 1175415u, 1175094u, 1174772u, 1174451u, 1174130u, 1173809u, 1173488u, 1173168u, 1172847u, 1172527u, 1172207u, 1171887u, 1171568u, 1171248u, 1170929u, 1170610u, 1170291u, 1169972u, 1169653u, 1169335u, 1169017u, 1168699u, 1168381u, 1168063u, 1167745u, 1167428u, 1167111u, 1166794u, 1166477u, 1166160u, 1165844u, 1165527u, 1165211u, 1164895u, 1164579u, 1164263u, 1163948u, 1163632u, 1163317u, 1163002u, 1162687u, 1162373u, 1162058u, 1161744u, 1161430u, 1161116u, 1160802u, 1160488u, 1160175u, 1159862u, 1159548u, 1159236u, 1158923u, 1158610u, 1158298u, 1157985u, 1157673u, 1157361u, 1157049u, 1156738u, 1156426u, 1156115u, 1155804u, 1155493u, 1155182u, 1154872u, 1154561u, 1154251u, 1153941u, 1153631u, 1153321u, 1153011u, 1152702u, 1152393u, 1152084u, 1151775u, 1151466u, 1151157u, 1150849u, 1150540u, 1150232u, 1149924u, 1149617u, 1149309u, 1149001u, 1148694u, 1148387u, 1148080u, 1147773u, 1147467u, 1147160u, 1146854u, 1146548u, 1146242u, 1145936u, 1145630u, 1145325u, 1145019u, 1144714u, 1144409u, 1144104u, 1143800u, 1143495u, 1143191u, 1142887u, 1142582u, 1142279u, 1141975u, 1141671u, 1141368u, 1141065u, 1140762u, 1140459u, 1140156u, 1139853u, 1139551u, 1139249u, 1138947u, 1138645u, 1138343u, 1138041u, 1137740u, 1137438u, 1137137u, 1136836u, 1136535u, 1136235u, 1135934u, 1135634u, 1135334u, 1135034u, 1134734u, 1134434u, 1134135u, 1133835u, 1133536u, 1133237u, 1132938u, 1132639u, 1132341u, 1132042u, 1131744u, 1131446u, 1131148u, 1130850u, 1130552u, 1130255u, 1129957u, 1129660u, 1129363u, 1129066u, 1128769u, 1128473u, 1128176u, 1127880u, 1127584u, 1127288u, 1126992u, 1126697u, 1126401u, 1126106u, 1125811u, 1125516u, 1125221u, 1124926u, 1124631u, 1124337u, 1124043u, 1123749u, 1123455u, 1123161u, 1122867u, 1122574u, 1122281u, 1121987u, 1121694u, 1121401u, 1121109u, 1120816u, 1120524u, 1120231u, 1119939u, 1119647u, 1119356u, 1119064u, 1118772u, 1118481u, 1118190u, 1117899u, 1117608u, 1117317u, 1117027u, 1116736u, 1116446u, 1116156u, 1115866u, 1115576u, 1115286u, 1114997u, 1114707u, 1114418u, 1114129u, 1113840u, 1113551u, 1113263u, 1112974u, 1112686u, 1112398u, 1112110u, 1111822u, 1111534u, 1111246u, 1110959u, 1110672u, 1110385u, 1110098u, 1109811u, 1109524u, 1109237u, 1108951u, 1108665u, 1108379u, 1108093u, 1107807u, 1107521u, 1107236u, 1106950u, 1106665u, 1106380u, 1106095u, 1105810u, 1105526u, 1105241u, 1104957u, 1104673u, 1104389u, 1104105u, 1103821u, 1103537u, 1103254u, 1102971u, 1102687u, 1102404u, 1102122u, 1101839u, 1101556u, 1101274u, 1100991u, 1100709u, 1100427u, 1100145u, 1099864u, 1099582u, 1099301u, 1099019u, 1098738u, 1098457u, 1098176u, 1097896u, 1097615u, 1097335u, 1097054u, 1096774u, 1096494u, 1096214u, 1095935u, 1095655u, 1095376u, 1095096u, 1094817u, 1094538u, 1094259u, 1093981u, 1093702u, 1093424u, 1093145u, 1092867u, 1092589u, 1092311u, 1092033u, 1091756u, 1091478u, 1091201u, 1090924u, 1090647u, 1090370u, 1090093u, 1089817u, 1089540u, 1089264u, 1088988u, 1088712u, 1088436u, 1088160u, 1087884u, 1087609u, 1087334u, 1087058u, 1086783u, 1086508u, 1086234u, 1085959u, 1085684u, 1085410u, 1085136u, 1084862u, 1084588u, 1084314u, 1084040u, 1083767u, 1083493u, 1083220u, 1082947u, 1082674u, 1082401u, 1082128u, 1081856u, 1081583u, 1081311u, 1081039u, 1080767u, 1080495u, 1080223u, 1079952u, 1079680u, 1079409u, 1079138u, 1078867u, 1078596u, 1078325u, 1078054u, 1077784u, 1077513u, 1077243u, 1076973u, 1076703u, 1076433u, 1076163u, 1075894u, 1075624u, 1075355u, 1075086u, 1074817u, 1074548u, 1074279u, 1074010u, 1073742u, 1073474u, 1073205u, 1072937u, 1072669u, 1072401u, 1072134u, 1071866u, 1071599u, 1071331u, 1071064u, 1070797u, 1070530u, 1070264u, 1069997u, 1069730u, 1069464u, 1069198u, 1068932u, 1068666u, 1068400u, 1068134u, 1067869u, 1067603u, 1067338u, 1067073u, 1066808u, 1066543u, 1066278u, 1066013u, 1065749u, 1065484u, 1065220u, 1064956u, 1064692u, 1064428u, 1064164u, 1063901u, 1063637u, 1063374u, 1063111u, 1062848u, 1062585u, 1062322u, 1062059u, 1061797u, 1061534u, 1061272u, 1061010u, 1060748u, 1060486u, 1060224u, 1059962u, 1059701u, 1059439u, 1059178u, 1058917u, 1058656u, 1058395u, 1058134u, 1057874u, 1057613u, 1057353u, 1057093u, 1056833u, 1056573u, 1056313u, 1056053u, 1055793u, 1055534u, 1055275u, 1055015u, 1054756u, 1054497u, 1054238u, 1053980u, 1053721u, 1053463u, 1053204u, 1052946u, 1052688u, 1052430u, 1052172u, 1051915u, 1051657u, 1051400u, 1051142u, 1050885u, 1050628u, 1050371u, 1050114u, 1049858u, 1049601u, 1049345u, 1049088u, 1048832u, 1048576u }; // Divide num by div and return as 16.16 fixed point result. int FixedDiv_C(int num, int div) { if (static_cast(div) <= 4097u) { return static_cast((static_cast(num) * kRecipTable[div]) >> 16); } return static_cast((static_cast(num) << 16) / div); } #ifdef LIBYUV_LITTLE_ENDIAN #define WRITEWORD(p, v) *reinterpret_cast(p) = v #else static inline void WRITEWORD(uint8* p, uint32 v) { p[0] = (uint8)(v & 255); p[1] = (uint8)((v >> 8) & 255); p[2] = (uint8)((v >> 16) & 255); p[3] = (uint8)((v >> 24) & 255); } #endif void RGB24ToARGBRow_C(const uint8* src_rgb24, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_rgb24[0]; uint8 g = src_rgb24[1]; uint8 r = src_rgb24[2]; dst_argb[0] = b; dst_argb[1] = g; dst_argb[2] = r; dst_argb[3] = 255u; dst_argb += 4; src_rgb24 += 3; } } void RAWToARGBRow_C(const uint8* src_raw, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 r = src_raw[0]; uint8 g = src_raw[1]; uint8 b = src_raw[2]; dst_argb[0] = b; dst_argb[1] = g; dst_argb[2] = r; dst_argb[3] = 255u; dst_argb += 4; src_raw += 3; } } void RGB565ToARGBRow_C(const uint8* src_rgb565, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_rgb565[0] & 0x1f; uint8 g = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r = src_rgb565[1] >> 3; dst_argb[0] = (b << 3) | (b >> 2); dst_argb[1] = (g << 2) | (g >> 4); dst_argb[2] = (r << 3) | (r >> 2); dst_argb[3] = 255u; dst_argb += 4; src_rgb565 += 2; } } void ARGB1555ToARGBRow_C(const uint8* src_argb1555, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb1555[0] & 0x1f; uint8 g = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r = (src_argb1555[1] & 0x7c) >> 2; uint8 a = src_argb1555[1] >> 7; dst_argb[0] = (b << 3) | (b >> 2); dst_argb[1] = (g << 3) | (g >> 2); dst_argb[2] = (r << 3) | (r >> 2); dst_argb[3] = -a; dst_argb += 4; src_argb1555 += 2; } } void ARGB4444ToARGBRow_C(const uint8* src_argb4444, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb4444[0] & 0x0f; uint8 g = src_argb4444[0] >> 4; uint8 r = src_argb4444[1] & 0x0f; uint8 a = src_argb4444[1] >> 4; dst_argb[0] = (b << 4) | b; dst_argb[1] = (g << 4) | g; dst_argb[2] = (r << 4) | r; dst_argb[3] = (a << 4) | a; dst_argb += 4; src_argb4444 += 2; } } void ARGBToRGB24Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb[0]; uint8 g = src_argb[1]; uint8 r = src_argb[2]; dst_rgb[0] = b; dst_rgb[1] = g; dst_rgb[2] = r; dst_rgb += 3; src_argb += 4; } } void ARGBToRAWRow_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb[0]; uint8 g = src_argb[1]; uint8 r = src_argb[2]; dst_rgb[0] = r; dst_rgb[1] = g; dst_rgb[2] = b; dst_rgb += 3; src_argb += 4; } } void ARGBToRGB565Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 2; uint8 r0 = src_argb[2] >> 3; uint8 b1 = src_argb[4] >> 3; uint8 g1 = src_argb[5] >> 2; uint8 r1 = src_argb[6] >> 3; WRITEWORD(dst_rgb, b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27)); dst_rgb += 4; src_argb += 8; } if (width & 1) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 2; uint8 r0 = src_argb[2] >> 3; *reinterpret_cast(dst_rgb) = b0 | (g0 << 5) | (r0 << 11); } } void ARGBToARGB1555Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 3; uint8 r0 = src_argb[2] >> 3; uint8 a0 = src_argb[3] >> 7; uint8 b1 = src_argb[4] >> 3; uint8 g1 = src_argb[5] >> 3; uint8 r1 = src_argb[6] >> 3; uint8 a1 = src_argb[7] >> 7; *reinterpret_cast(dst_rgb) = b0 | (g0 << 5) | (r0 << 10) | (a0 << 15) | (b1 << 16) | (g1 << 21) | (r1 << 26) | (a1 << 31); dst_rgb += 4; src_argb += 8; } if (width & 1) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 3; uint8 r0 = src_argb[2] >> 3; uint8 a0 = src_argb[3] >> 7; *reinterpret_cast(dst_rgb) = b0 | (g0 << 5) | (r0 << 10) | (a0 << 15); } } void ARGBToARGB4444Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb[0] >> 4; uint8 g0 = src_argb[1] >> 4; uint8 r0 = src_argb[2] >> 4; uint8 a0 = src_argb[3] >> 4; uint8 b1 = src_argb[4] >> 4; uint8 g1 = src_argb[5] >> 4; uint8 r1 = src_argb[6] >> 4; uint8 a1 = src_argb[7] >> 4; *reinterpret_cast(dst_rgb) = b0 | (g0 << 4) | (r0 << 8) | (a0 << 12) | (b1 << 16) | (g1 << 20) | (r1 << 24) | (a1 << 28); dst_rgb += 4; src_argb += 8; } if (width & 1) { uint8 b0 = src_argb[0] >> 4; uint8 g0 = src_argb[1] >> 4; uint8 r0 = src_argb[2] >> 4; uint8 a0 = src_argb[3] >> 4; *reinterpret_cast(dst_rgb) = b0 | (g0 << 4) | (r0 << 8) | (a0 << 12); } } static __inline int RGBToY(uint8 r, uint8 g, uint8 b) { return (66 * r + 129 * g + 25 * b + 0x1080) >> 8; } static __inline int RGBToU(uint8 r, uint8 g, uint8 b) { return (112 * b - 74 * g - 38 * r + 0x8080) >> 8; } static __inline int RGBToV(uint8 r, uint8 g, uint8 b) { return (112 * r - 94 * g - 18 * b + 0x8080) >> 8; } #define MAKEROWY(NAME, R, G, B, BPP) \ void NAME ## ToYRow_C(const uint8* src_argb0, uint8* dst_y, int width) { \ for (int x = 0; x < width; ++x) { \ dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \ src_argb0 += BPP; \ dst_y += 1; \ } \ } \ void NAME ## ToUVRow_C(const uint8* src_rgb0, int src_stride_rgb, \ uint8* dst_u, uint8* dst_v, int width) { \ const uint8* src_rgb1 = src_rgb0 + src_stride_rgb; \ for (int x = 0; x < width - 1; x += 2) { \ uint8 ab = (src_rgb0[B] + src_rgb0[B + BPP] + \ src_rgb1[B] + src_rgb1[B + BPP]) >> 2; \ uint8 ag = (src_rgb0[G] + src_rgb0[G + BPP] + \ src_rgb1[G] + src_rgb1[G + BPP]) >> 2; \ uint8 ar = (src_rgb0[R] + src_rgb0[R + BPP] + \ src_rgb1[R] + src_rgb1[R + BPP]) >> 2; \ dst_u[0] = RGBToU(ar, ag, ab); \ dst_v[0] = RGBToV(ar, ag, ab); \ src_rgb0 += BPP * 2; \ src_rgb1 += BPP * 2; \ dst_u += 1; \ dst_v += 1; \ } \ if (width & 1) { \ uint8 ab = (src_rgb0[B] + src_rgb1[B]) >> 1; \ uint8 ag = (src_rgb0[G] + src_rgb1[G]) >> 1; \ uint8 ar = (src_rgb0[R] + src_rgb1[R]) >> 1; \ dst_u[0] = RGBToU(ar, ag, ab); \ dst_v[0] = RGBToV(ar, ag, ab); \ } \ } MAKEROWY(ARGB, 2, 1, 0, 4) MAKEROWY(BGRA, 1, 2, 3, 4) MAKEROWY(ABGR, 0, 1, 2, 4) MAKEROWY(RGBA, 3, 2, 1, 4) MAKEROWY(RGB24, 2, 1, 0, 3) MAKEROWY(RAW, 0, 1, 2, 3) #undef MAKEROWY // JPeg uses a variation on BT.601-1 full range // y = 0.29900 * r + 0.58700 * g + 0.11400 * b // u = -0.16874 * r - 0.33126 * g + 0.50000 * b + center // v = 0.50000 * r - 0.41869 * g - 0.08131 * b + center // BT.601 Mpeg range uses: // b 0.1016 * 255 = 25.908 = 25 // g 0.5078 * 255 = 129.489 = 129 // r 0.2578 * 255 = 65.739 = 66 // JPeg 8 bit Y (not used): // b 0.11400 * 256 = 29.184 = 29 // g 0.58700 * 256 = 150.272 = 150 // r 0.29900 * 256 = 76.544 = 77 // JPeg 7 bit Y: // b 0.11400 * 128 = 14.592 = 15 // g 0.58700 * 128 = 75.136 = 75 // r 0.29900 * 128 = 38.272 = 38 // JPeg 8 bit U: // b 0.50000 * 255 = 127.5 = 127 // g -0.33126 * 255 = -84.4713 = -84 // r -0.16874 * 255 = -43.0287 = -43 // JPeg 8 bit V: // b -0.08131 * 255 = -20.73405 = -20 // g -0.41869 * 255 = -106.76595 = -107 // r 0.50000 * 255 = 127.5 = 127 static __inline int RGBToYJ(uint8 r, uint8 g, uint8 b) { return (38 * r + 75 * g + 15 * b + 64) >> 7; } static __inline int RGBToUJ(uint8 r, uint8 g, uint8 b) { return (127 * b - 84 * g - 43 * r + 0x8080) >> 8; } static __inline int RGBToVJ(uint8 r, uint8 g, uint8 b) { return (127 * r - 107 * g - 20 * b + 0x8080) >> 8; } #define AVGB(a, b) (((a) + (b) + 1) >> 1) #define MAKEROWYJ(NAME, R, G, B, BPP) \ void NAME ## ToYJRow_C(const uint8* src_argb0, uint8* dst_y, int width) { \ for (int x = 0; x < width; ++x) { \ dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \ src_argb0 += BPP; \ dst_y += 1; \ } \ } \ void NAME ## ToUVJRow_C(const uint8* src_rgb0, int src_stride_rgb, \ uint8* dst_u, uint8* dst_v, int width) { \ const uint8* src_rgb1 = src_rgb0 + src_stride_rgb; \ for (int x = 0; x < width - 1; x += 2) { \ uint8 ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \ AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \ uint8 ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \ AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \ uint8 ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \ AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \ dst_u[0] = RGBToUJ(ar, ag, ab); \ dst_v[0] = RGBToVJ(ar, ag, ab); \ src_rgb0 += BPP * 2; \ src_rgb1 += BPP * 2; \ dst_u += 1; \ dst_v += 1; \ } \ if (width & 1) { \ uint8 ab = AVGB(src_rgb0[B], src_rgb1[B]); \ uint8 ag = AVGB(src_rgb0[G], src_rgb1[G]); \ uint8 ar = AVGB(src_rgb0[R], src_rgb1[R]); \ dst_u[0] = RGBToUJ(ar, ag, ab); \ dst_v[0] = RGBToVJ(ar, ag, ab); \ } \ } MAKEROWYJ(ARGB, 2, 1, 0, 4) #undef MAKEROWYJ void RGB565ToYRow_C(const uint8* src_rgb565, uint8* dst_y, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_rgb565[0] & 0x1f; uint8 g = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r = src_rgb565[1] >> 3; b = (b << 3) | (b >> 2); g = (g << 2) | (g >> 4); r = (r << 3) | (r >> 2); dst_y[0] = RGBToY(r, g, b); src_rgb565 += 2; dst_y += 1; } } void ARGB1555ToYRow_C(const uint8* src_argb1555, uint8* dst_y, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb1555[0] & 0x1f; uint8 g = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r = (src_argb1555[1] & 0x7c) >> 2; b = (b << 3) | (b >> 2); g = (g << 3) | (g >> 2); r = (r << 3) | (r >> 2); dst_y[0] = RGBToY(r, g, b); src_argb1555 += 2; dst_y += 1; } } void ARGB4444ToYRow_C(const uint8* src_argb4444, uint8* dst_y, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb4444[0] & 0x0f; uint8 g = src_argb4444[0] >> 4; uint8 r = src_argb4444[1] & 0x0f; b = (b << 4) | b; g = (g << 4) | g; r = (r << 4) | r; dst_y[0] = RGBToY(r, g, b); src_argb4444 += 2; dst_y += 1; } } void RGB565ToUVRow_C(const uint8* src_rgb565, int src_stride_rgb565, uint8* dst_u, uint8* dst_v, int width) { const uint8* next_rgb565 = src_rgb565 + src_stride_rgb565; for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_rgb565[0] & 0x1f; uint8 g0 = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r0 = src_rgb565[1] >> 3; uint8 b1 = src_rgb565[2] & 0x1f; uint8 g1 = (src_rgb565[2] >> 5) | ((src_rgb565[3] & 0x07) << 3); uint8 r1 = src_rgb565[3] >> 3; uint8 b2 = next_rgb565[0] & 0x1f; uint8 g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); uint8 r2 = next_rgb565[1] >> 3; uint8 b3 = next_rgb565[2] & 0x1f; uint8 g3 = (next_rgb565[2] >> 5) | ((next_rgb565[3] & 0x07) << 3); uint8 r3 = next_rgb565[3] >> 3; uint8 b = (b0 + b1 + b2 + b3); // 565 * 4 = 787. uint8 g = (g0 + g1 + g2 + g3); uint8 r = (r0 + r1 + r2 + r3); b = (b << 1) | (b >> 6); // 787 -> 888. r = (r << 1) | (r >> 6); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); src_rgb565 += 4; next_rgb565 += 4; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 b0 = src_rgb565[0] & 0x1f; uint8 g0 = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r0 = src_rgb565[1] >> 3; uint8 b2 = next_rgb565[0] & 0x1f; uint8 g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); uint8 r2 = next_rgb565[1] >> 3; uint8 b = (b0 + b2); // 565 * 2 = 676. uint8 g = (g0 + g2); uint8 r = (r0 + r2); b = (b << 2) | (b >> 4); // 676 -> 888 g = (g << 1) | (g >> 6); r = (r << 2) | (r >> 4); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); } } void ARGB1555ToUVRow_C(const uint8* src_argb1555, int src_stride_argb1555, uint8* dst_u, uint8* dst_v, int width) { const uint8* next_argb1555 = src_argb1555 + src_stride_argb1555; for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb1555[0] & 0x1f; uint8 g0 = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r0 = (src_argb1555[1] & 0x7c) >> 2; uint8 b1 = src_argb1555[2] & 0x1f; uint8 g1 = (src_argb1555[2] >> 5) | ((src_argb1555[3] & 0x03) << 3); uint8 r1 = (src_argb1555[3] & 0x7c) >> 2; uint8 b2 = next_argb1555[0] & 0x1f; uint8 g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); uint8 r2 = (next_argb1555[1] & 0x7c) >> 2; uint8 b3 = next_argb1555[2] & 0x1f; uint8 g3 = (next_argb1555[2] >> 5) | ((next_argb1555[3] & 0x03) << 3); uint8 r3 = (next_argb1555[3] & 0x7c) >> 2; uint8 b = (b0 + b1 + b2 + b3); // 555 * 4 = 777. uint8 g = (g0 + g1 + g2 + g3); uint8 r = (r0 + r1 + r2 + r3); b = (b << 1) | (b >> 6); // 777 -> 888. g = (g << 1) | (g >> 6); r = (r << 1) | (r >> 6); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); src_argb1555 += 4; next_argb1555 += 4; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 b0 = src_argb1555[0] & 0x1f; uint8 g0 = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r0 = (src_argb1555[1] & 0x7c) >> 2; uint8 b2 = next_argb1555[0] & 0x1f; uint8 g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); uint8 r2 = next_argb1555[1] >> 3; uint8 b = (b0 + b2); // 555 * 2 = 666. uint8 g = (g0 + g2); uint8 r = (r0 + r2); b = (b << 2) | (b >> 4); // 666 -> 888. g = (g << 2) | (g >> 4); r = (r << 2) | (r >> 4); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); } } void ARGB4444ToUVRow_C(const uint8* src_argb4444, int src_stride_argb4444, uint8* dst_u, uint8* dst_v, int width) { const uint8* next_argb4444 = src_argb4444 + src_stride_argb4444; for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb4444[0] & 0x0f; uint8 g0 = src_argb4444[0] >> 4; uint8 r0 = src_argb4444[1] & 0x0f; uint8 b1 = src_argb4444[2] & 0x0f; uint8 g1 = src_argb4444[2] >> 4; uint8 r1 = src_argb4444[3] & 0x0f; uint8 b2 = next_argb4444[0] & 0x0f; uint8 g2 = next_argb4444[0] >> 4; uint8 r2 = next_argb4444[1] & 0x0f; uint8 b3 = next_argb4444[2] & 0x0f; uint8 g3 = next_argb4444[2] >> 4; uint8 r3 = next_argb4444[3] & 0x0f; uint8 b = (b0 + b1 + b2 + b3); // 444 * 4 = 666. uint8 g = (g0 + g1 + g2 + g3); uint8 r = (r0 + r1 + r2 + r3); b = (b << 2) | (b >> 4); // 666 -> 888. g = (g << 2) | (g >> 4); r = (r << 2) | (r >> 4); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); src_argb4444 += 4; next_argb4444 += 4; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 b0 = src_argb4444[0] & 0x0f; uint8 g0 = src_argb4444[0] >> 4; uint8 r0 = src_argb4444[1] & 0x0f; uint8 b2 = next_argb4444[0] & 0x0f; uint8 g2 = next_argb4444[0] >> 4; uint8 r2 = next_argb4444[1] & 0x0f; uint8 b = (b0 + b2); // 444 * 2 = 555. uint8 g = (g0 + g2); uint8 r = (r0 + r2); b = (b << 3) | (b >> 2); // 555 -> 888. g = (g << 3) | (g >> 2); r = (r << 3) | (r >> 2); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); } } void ARGBToUV444Row_C(const uint8* src_argb, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width; ++x) { uint8 ab = src_argb[0]; uint8 ag = src_argb[1]; uint8 ar = src_argb[2]; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); src_argb += 4; dst_u += 1; dst_v += 1; } } void ARGBToUV422Row_C(const uint8* src_argb, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 ab = (src_argb[0] + src_argb[4]) >> 1; uint8 ag = (src_argb[1] + src_argb[5]) >> 1; uint8 ar = (src_argb[2] + src_argb[6]) >> 1; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); src_argb += 8; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 ab = src_argb[0]; uint8 ag = src_argb[1]; uint8 ar = src_argb[2]; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } } void ARGBToUV411Row_C(const uint8* src_argb, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width - 3; x += 4) { uint8 ab = (src_argb[0] + src_argb[4] + src_argb[8] + src_argb[12]) >> 2; uint8 ag = (src_argb[1] + src_argb[5] + src_argb[9] + src_argb[13]) >> 2; uint8 ar = (src_argb[2] + src_argb[6] + src_argb[10] + src_argb[14]) >> 2; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); src_argb += 16; dst_u += 1; dst_v += 1; } if ((width & 3) == 3) { uint8 ab = (src_argb[0] + src_argb[4] + src_argb[8]) / 3; uint8 ag = (src_argb[1] + src_argb[5] + src_argb[9]) / 3; uint8 ar = (src_argb[2] + src_argb[6] + src_argb[10]) / 3; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } else if ((width & 3) == 2) { uint8 ab = (src_argb[0] + src_argb[4]) >> 1; uint8 ag = (src_argb[1] + src_argb[5]) >> 1; uint8 ar = (src_argb[2] + src_argb[6]) >> 1; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } else if ((width & 3) == 1) { uint8 ab = src_argb[0]; uint8 ag = src_argb[1]; uint8 ar = src_argb[2]; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } } void ARGBGrayRow_C(const uint8* src_argb, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 y = RGBToYJ(src_argb[2], src_argb[1], src_argb[0]); dst_argb[2] = dst_argb[1] = dst_argb[0] = y; dst_argb[3] = src_argb[3]; dst_argb += 4; src_argb += 4; } } // Convert a row of image to Sepia tone. void ARGBSepiaRow_C(uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; int sb = (b * 17 + g * 68 + r * 35) >> 7; int sg = (b * 22 + g * 88 + r * 45) >> 7; int sr = (b * 24 + g * 98 + r * 50) >> 7; // b does not over flow. a is preserved from original. dst_argb[0] = sb; dst_argb[1] = clamp255(sg); dst_argb[2] = clamp255(sr); dst_argb += 4; } } // Apply color matrix to a row of image. Matrix is signed. void ARGBColorMatrixRow_C(uint8* dst_argb, const int8* matrix_argb, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; int a = dst_argb[3]; int sb = (b * matrix_argb[0] + g * matrix_argb[1] + r * matrix_argb[2] + a * matrix_argb[3]) >> 7; int sg = (b * matrix_argb[4] + g * matrix_argb[5] + r * matrix_argb[6] + a * matrix_argb[7]) >> 7; int sr = (b * matrix_argb[8] + g * matrix_argb[9] + r * matrix_argb[10] + a * matrix_argb[11]) >> 7; dst_argb[0] = Clamp(sb); dst_argb[1] = Clamp(sg); dst_argb[2] = Clamp(sr); dst_argb += 4; } } // Apply color table to a row of image. void ARGBColorTableRow_C(uint8* dst_argb, const uint8* table_argb, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; int a = dst_argb[3]; dst_argb[0] = table_argb[b * 4 + 0]; dst_argb[1] = table_argb[g * 4 + 1]; dst_argb[2] = table_argb[r * 4 + 2]; dst_argb[3] = table_argb[a * 4 + 3]; dst_argb += 4; } } void ARGBQuantizeRow_C(uint8* dst_argb, int scale, int interval_size, int interval_offset, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; dst_argb[0] = (b * scale >> 16) * interval_size + interval_offset; dst_argb[1] = (g * scale >> 16) * interval_size + interval_offset; dst_argb[2] = (r * scale >> 16) * interval_size + interval_offset; dst_argb += 4; } } #define REPEAT8(v) (v) | ((v) << 8) #define SHADE(f, v) v * f >> 24 void ARGBShadeRow_C(const uint8* src_argb, uint8* dst_argb, int width, uint32 value) { const uint32 b_scale = REPEAT8(value & 0xff); const uint32 g_scale = REPEAT8((value >> 8) & 0xff); const uint32 r_scale = REPEAT8((value >> 16) & 0xff); const uint32 a_scale = REPEAT8(value >> 24); for (int i = 0; i < width; ++i) { const uint32 b = REPEAT8(src_argb[0]); const uint32 g = REPEAT8(src_argb[1]); const uint32 r = REPEAT8(src_argb[2]); const uint32 a = REPEAT8(src_argb[3]); dst_argb[0] = SHADE(b, b_scale); dst_argb[1] = SHADE(g, g_scale); dst_argb[2] = SHADE(r, r_scale); dst_argb[3] = SHADE(a, a_scale); src_argb += 4; dst_argb += 4; } } #undef REPEAT8 #undef SHADE #define REPEAT8(v) (v) | ((v) << 8) #define SHADE(f, v) v * f >> 16 void ARGBMultiplyRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { const uint32 b = REPEAT8(src_argb0[0]); const uint32 g = REPEAT8(src_argb0[1]); const uint32 r = REPEAT8(src_argb0[2]); const uint32 a = REPEAT8(src_argb0[3]); const uint32 b_scale = src_argb1[0]; const uint32 g_scale = src_argb1[1]; const uint32 r_scale = src_argb1[2]; const uint32 a_scale = src_argb1[3]; dst_argb[0] = SHADE(b, b_scale); dst_argb[1] = SHADE(g, g_scale); dst_argb[2] = SHADE(r, r_scale); dst_argb[3] = SHADE(a, a_scale); src_argb0 += 4; src_argb1 += 4; dst_argb += 4; } } #undef REPEAT8 #undef SHADE #define SHADE(f, v) clamp255(v + f) void ARGBAddRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { const int b = src_argb0[0]; const int g = src_argb0[1]; const int r = src_argb0[2]; const int a = src_argb0[3]; const int b_add = src_argb1[0]; const int g_add = src_argb1[1]; const int r_add = src_argb1[2]; const int a_add = src_argb1[3]; dst_argb[0] = SHADE(b, b_add); dst_argb[1] = SHADE(g, g_add); dst_argb[2] = SHADE(r, r_add); dst_argb[3] = SHADE(a, a_add); src_argb0 += 4; src_argb1 += 4; dst_argb += 4; } } #undef SHADE #define SHADE(f, v) clamp0(f - v) void ARGBSubtractRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { const int b = src_argb0[0]; const int g = src_argb0[1]; const int r = src_argb0[2]; const int a = src_argb0[3]; const int b_sub = src_argb1[0]; const int g_sub = src_argb1[1]; const int r_sub = src_argb1[2]; const int a_sub = src_argb1[3]; dst_argb[0] = SHADE(b, b_sub); dst_argb[1] = SHADE(g, g_sub); dst_argb[2] = SHADE(r, r_sub); dst_argb[3] = SHADE(a, a_sub); src_argb0 += 4; src_argb1 += 4; dst_argb += 4; } } #undef SHADE // Sobel functions which mimics SSSE3. void SobelXRow_C(const uint8* src_y0, const uint8* src_y1, const uint8* src_y2, uint8* dst_sobelx, int width) { for (int i = 0; i < width; ++i) { int a = src_y0[i]; int b = src_y1[i]; int c = src_y2[i]; int a_sub = src_y0[i + 2]; int b_sub = src_y1[i + 2]; int c_sub = src_y2[i + 2]; int a_diff = a - a_sub; int b_diff = b - b_sub; int c_diff = c - c_sub; int sobel = Abs(a_diff + b_diff * 2 + c_diff); dst_sobelx[i] = static_cast(clamp255(sobel)); } } void SobelYRow_C(const uint8* src_y0, const uint8* src_y1, uint8* dst_sobely, int width) { for (int i = 0; i < width; ++i) { int a = src_y0[i + 0]; int b = src_y0[i + 1]; int c = src_y0[i + 2]; int a_sub = src_y1[i + 0]; int b_sub = src_y1[i + 1]; int c_sub = src_y1[i + 2]; int a_diff = a - a_sub; int b_diff = b - b_sub; int c_diff = c - c_sub; int sobel = Abs(a_diff + b_diff * 2 + c_diff); dst_sobely[i] = static_cast(clamp255(sobel)); } } void SobelRow_C(const uint8* src_sobelx, const uint8* src_sobely, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { int r = src_sobelx[i]; int b = src_sobely[i]; int s = clamp255(r + b); dst_argb[0] = static_cast(s); dst_argb[1] = static_cast(s); dst_argb[2] = static_cast(s); dst_argb[3] = static_cast(255u); dst_argb += 4; } } void SobelXYRow_C(const uint8* src_sobelx, const uint8* src_sobely, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { int r = src_sobelx[i]; int b = src_sobely[i]; int g = clamp255(r + b); dst_argb[0] = static_cast(b); dst_argb[1] = static_cast(g); dst_argb[2] = static_cast(r); dst_argb[3] = static_cast(255u); dst_argb += 4; } } void I400ToARGBRow_C(const uint8* src_y, uint8* dst_argb, int width) { // Copy a Y to RGB. for (int x = 0; x < width; ++x) { uint8 y = src_y[0]; dst_argb[2] = dst_argb[1] = dst_argb[0] = y; dst_argb[3] = 255u; dst_argb += 4; ++src_y; } } // C reference code that mimics the YUV assembly. #define YG 74 /* static_cast(1.164 * 64 + 0.5) */ #define UB 127 /* min(63,static_cast(2.018 * 64)) */ #define UG -25 /* static_cast(-0.391 * 64 - 0.5) */ #define UR 0 #define VB 0 #define VG -52 /* static_cast(-0.813 * 64 - 0.5) */ #define VR 102 /* static_cast(1.596 * 64 + 0.5) */ // Bias #define BB UB * 128 + VB * 128 #define BG UG * 128 + VG * 128 #define BR UR * 128 + VR * 128 static __inline void YuvPixel(uint8 y, uint8 u, uint8 v, uint8* b, uint8* g, uint8* r) { int32 y1 = (static_cast(y) - 16) * YG; *b = Clamp(static_cast((u * UB + v * VB) - (BB) + y1) >> 6); *g = Clamp(static_cast((u * UG + v * VG) - (BG) + y1) >> 6); *r = Clamp(static_cast((u * UR + v * VR) - (BR) + y1) >> 6); } #if !defined(LIBYUV_DISABLE_NEON) && \ (defined(__ARM_NEON__) || defined(LIBYUV_NEON)) // C mimic assembly. // TODO(fbarchard): Remove subsampling from Neon. void I444ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 u = (src_u[0] + src_u[1] + 1) >> 1; uint8 v = (src_v[0] + src_v[1] + 1) >> 1; YuvPixel(src_y[0], u, v, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], u, v, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; src_u += 2; src_v += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); } } #else void I444ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width; ++x) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; src_y += 1; src_u += 1; src_v += 1; rgb_buf += 4; // Advance 1 pixel. } } #endif // Also used for 420 void I422ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void I422ToRGB24Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 3, rgb_buf + 4, rgb_buf + 5); src_y += 2; src_u += 1; src_v += 1; rgb_buf += 6; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); } } void I422ToRAWRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 5, rgb_buf + 4, rgb_buf + 3); src_y += 2; src_u += 1; src_v += 1; rgb_buf += 6; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); } } void I422ToARGB4444Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_argb4444, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); YuvPixel(src_y[1], src_u[0], src_v[0], &b1, &g1, &r1); b0 = b0 >> 4; g0 = g0 >> 4; r0 = r0 >> 4; b1 = b1 >> 4; g1 = g1 >> 4; r1 = r1 >> 4; *reinterpret_cast(dst_argb4444) = b0 | (g0 << 4) | (r0 << 8) | (b1 << 16) | (g1 << 20) | (r1 << 24) | 0xf000f000; src_y += 2; src_u += 1; src_v += 1; dst_argb4444 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); b0 = b0 >> 4; g0 = g0 >> 4; r0 = r0 >> 4; *reinterpret_cast(dst_argb4444) = b0 | (g0 << 4) | (r0 << 8) | 0xf000; } } void I422ToARGB1555Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_argb1555, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); YuvPixel(src_y[1], src_u[0], src_v[0], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 3; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 3; r1 = r1 >> 3; *reinterpret_cast(dst_argb1555) = b0 | (g0 << 5) | (r0 << 10) | (b1 << 16) | (g1 << 21) | (r1 << 26) | 0x80008000; src_y += 2; src_u += 1; src_v += 1; dst_argb1555 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 3; r0 = r0 >> 3; *reinterpret_cast(dst_argb1555) = b0 | (g0 << 5) | (r0 << 10) | 0x8000; } } void I422ToRGB565Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_rgb565, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); YuvPixel(src_y[1], src_u[0], src_v[0], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); src_y += 2; src_u += 1; src_v += 1; dst_rgb565 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); } } void I411ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 3; x += 4) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; YuvPixel(src_y[2], src_u[0], src_v[0], rgb_buf + 8, rgb_buf + 9, rgb_buf + 10); rgb_buf[11] = 255; YuvPixel(src_y[3], src_u[0], src_v[0], rgb_buf + 12, rgb_buf + 13, rgb_buf + 14); rgb_buf[15] = 255; src_y += 4; src_u += 1; src_v += 1; rgb_buf += 16; // Advance 4 pixels. } if (width & 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void NV12ToARGBRow_C(const uint8* src_y, const uint8* usrc_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], usrc_v[0], usrc_v[1], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; usrc_v += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void NV21ToARGBRow_C(const uint8* src_y, const uint8* src_vu, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_vu[1], src_vu[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_vu[1], src_vu[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; src_vu += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_vu[1], src_vu[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void NV12ToRGB565Row_C(const uint8* src_y, const uint8* usrc_v, uint8* dst_rgb565, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], &b0, &g0, &r0); YuvPixel(src_y[1], usrc_v[0], usrc_v[1], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); src_y += 2; usrc_v += 2; dst_rgb565 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); } } void NV21ToRGB565Row_C(const uint8* src_y, const uint8* vsrc_u, uint8* dst_rgb565, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], vsrc_u[1], vsrc_u[0], &b0, &g0, &r0); YuvPixel(src_y[1], vsrc_u[1], vsrc_u[0], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); src_y += 2; vsrc_u += 2; dst_rgb565 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], vsrc_u[1], vsrc_u[0], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); } } void YUY2ToARGBRow_C(const uint8* src_yuy2, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_yuy2[0], src_yuy2[1], src_yuy2[3], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_yuy2[2], src_yuy2[1], src_yuy2[3], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_yuy2 += 4; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_yuy2[0], src_yuy2[1], src_yuy2[3], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void UYVYToARGBRow_C(const uint8* src_uyvy, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_uyvy[1], src_uyvy[0], src_uyvy[2], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_uyvy[3], src_uyvy[0], src_uyvy[2], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_uyvy += 4; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_uyvy[1], src_uyvy[0], src_uyvy[2], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void I422ToBGRARow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 3, rgb_buf + 2, rgb_buf + 1); rgb_buf[0] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 7, rgb_buf + 6, rgb_buf + 5); rgb_buf[4] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 3, rgb_buf + 2, rgb_buf + 1); rgb_buf[0] = 255; } } void I422ToABGRRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 6, rgb_buf + 5, rgb_buf + 4); rgb_buf[7] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); rgb_buf[3] = 255; } } void I422ToRGBARow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 1, rgb_buf + 2, rgb_buf + 3); rgb_buf[0] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 5, rgb_buf + 6, rgb_buf + 7); rgb_buf[4] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 1, rgb_buf + 2, rgb_buf + 3); rgb_buf[0] = 255; } } void YToARGBRow_C(const uint8* src_y, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], 128, 128, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], 128, 128, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], 128, 128, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void MirrorRow_C(const uint8* src, uint8* dst, int width) { src += width - 1; for (int x = 0; x < width - 1; x += 2) { dst[x] = src[0]; dst[x + 1] = src[-1]; src -= 2; } if (width & 1) { dst[width - 1] = src[0]; } } void MirrorUVRow_C(const uint8* src_uv, uint8* dst_u, uint8* dst_v, int width) { src_uv += (width - 1) << 1; for (int x = 0; x < width - 1; x += 2) { dst_u[x] = src_uv[0]; dst_u[x + 1] = src_uv[-2]; dst_v[x] = src_uv[1]; dst_v[x + 1] = src_uv[-2 + 1]; src_uv -= 4; } if (width & 1) { dst_u[width - 1] = src_uv[0]; dst_v[width - 1] = src_uv[1]; } } void ARGBMirrorRow_C(const uint8* src, uint8* dst, int width) { const uint32* src32 = reinterpret_cast(src); uint32* dst32 = reinterpret_cast(dst); src32 += width - 1; for (int x = 0; x < width - 1; x += 2) { dst32[x] = src32[0]; dst32[x + 1] = src32[-1]; src32 -= 2; } if (width & 1) { dst32[width - 1] = src32[0]; } } void SplitUVRow_C(const uint8* src_uv, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width - 1; x += 2) { dst_u[x] = src_uv[0]; dst_u[x + 1] = src_uv[2]; dst_v[x] = src_uv[1]; dst_v[x + 1] = src_uv[3]; src_uv += 4; } if (width & 1) { dst_u[width - 1] = src_uv[0]; dst_v[width - 1] = src_uv[1]; } } void MergeUVRow_C(const uint8* src_u, const uint8* src_v, uint8* dst_uv, int width) { for (int x = 0; x < width - 1; x += 2) { dst_uv[0] = src_u[x]; dst_uv[1] = src_v[x]; dst_uv[2] = src_u[x + 1]; dst_uv[3] = src_v[x + 1]; dst_uv += 4; } if (width & 1) { dst_uv[0] = src_u[width - 1]; dst_uv[1] = src_v[width - 1]; } } void CopyRow_C(const uint8* src, uint8* dst, int count) { memcpy(dst, src, count); } void SetRow_C(uint8* dst, uint32 v8, int count) { #ifdef _MSC_VER // VC will generate rep stosb. for (int x = 0; x < count; ++x) { dst[x] = v8; } #else memset(dst, v8, count); #endif } void ARGBSetRows_C(uint8* dst, uint32 v32, int width, int dst_stride, int height) { for (int y = 0; y < height; ++y) { uint32* d = reinterpret_cast(dst); for (int x = 0; x < width; ++x) { d[x] = v32; } dst += dst_stride; } } // Filter 2 rows of YUY2 UV's (422) into U and V (420). void YUY2ToUVRow_C(const uint8* src_yuy2, int src_stride_yuy2, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values, filtering 2 rows of YUY2. for (int x = 0; x < width; x += 2) { dst_u[0] = (src_yuy2[1] + src_yuy2[src_stride_yuy2 + 1] + 1) >> 1; dst_v[0] = (src_yuy2[3] + src_yuy2[src_stride_yuy2 + 3] + 1) >> 1; src_yuy2 += 4; dst_u += 1; dst_v += 1; } } // Copy row of YUY2 UV's (422) into U and V (422). void YUY2ToUV422Row_C(const uint8* src_yuy2, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values. for (int x = 0; x < width; x += 2) { dst_u[0] = src_yuy2[1]; dst_v[0] = src_yuy2[3]; src_yuy2 += 4; dst_u += 1; dst_v += 1; } } // Copy row of YUY2 Y's (422) into Y (420/422). void YUY2ToYRow_C(const uint8* src_yuy2, uint8* dst_y, int width) { // Output a row of Y values. for (int x = 0; x < width - 1; x += 2) { dst_y[x] = src_yuy2[0]; dst_y[x + 1] = src_yuy2[2]; src_yuy2 += 4; } if (width & 1) { dst_y[width - 1] = src_yuy2[0]; } } // Filter 2 rows of UYVY UV's (422) into U and V (420). void UYVYToUVRow_C(const uint8* src_uyvy, int src_stride_uyvy, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values. for (int x = 0; x < width; x += 2) { dst_u[0] = (src_uyvy[0] + src_uyvy[src_stride_uyvy + 0] + 1) >> 1; dst_v[0] = (src_uyvy[2] + src_uyvy[src_stride_uyvy + 2] + 1) >> 1; src_uyvy += 4; dst_u += 1; dst_v += 1; } } // Copy row of UYVY UV's (422) into U and V (422). void UYVYToUV422Row_C(const uint8* src_uyvy, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values. for (int x = 0; x < width; x += 2) { dst_u[0] = src_uyvy[0]; dst_v[0] = src_uyvy[2]; src_uyvy += 4; dst_u += 1; dst_v += 1; } } // Copy row of UYVY Y's (422) into Y (420/422). void UYVYToYRow_C(const uint8* src_uyvy, uint8* dst_y, int width) { // Output a row of Y values. for (int x = 0; x < width - 1; x += 2) { dst_y[x] = src_uyvy[1]; dst_y[x + 1] = src_uyvy[3]; src_uyvy += 4; } if (width & 1) { dst_y[width - 1] = src_uyvy[1]; } } #define BLEND(f, b, a) (((256 - a) * b) >> 8) + f // Blend src_argb0 over src_argb1 and store to dst_argb. // dst_argb may be src_argb0 or src_argb1. // This code mimics the SSSE3 version for better testability. void ARGBBlendRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int x = 0; x < width - 1; x += 2) { uint32 fb = src_argb0[0]; uint32 fg = src_argb0[1]; uint32 fr = src_argb0[2]; uint32 a = src_argb0[3]; uint32 bb = src_argb1[0]; uint32 bg = src_argb1[1]; uint32 br = src_argb1[2]; dst_argb[0] = BLEND(fb, bb, a); dst_argb[1] = BLEND(fg, bg, a); dst_argb[2] = BLEND(fr, br, a); dst_argb[3] = 255u; fb = src_argb0[4 + 0]; fg = src_argb0[4 + 1]; fr = src_argb0[4 + 2]; a = src_argb0[4 + 3]; bb = src_argb1[4 + 0]; bg = src_argb1[4 + 1]; br = src_argb1[4 + 2]; dst_argb[4 + 0] = BLEND(fb, bb, a); dst_argb[4 + 1] = BLEND(fg, bg, a); dst_argb[4 + 2] = BLEND(fr, br, a); dst_argb[4 + 3] = 255u; src_argb0 += 8; src_argb1 += 8; dst_argb += 8; } if (width & 1) { uint32 fb = src_argb0[0]; uint32 fg = src_argb0[1]; uint32 fr = src_argb0[2]; uint32 a = src_argb0[3]; uint32 bb = src_argb1[0]; uint32 bg = src_argb1[1]; uint32 br = src_argb1[2]; dst_argb[0] = BLEND(fb, bb, a); dst_argb[1] = BLEND(fg, bg, a); dst_argb[2] = BLEND(fr, br, a); dst_argb[3] = 255u; } } #undef BLEND #define ATTENUATE(f, a) (a | (a << 8)) * (f | (f << 8)) >> 24 // Multiply source RGB by alpha and store to destination. // This code mimics the SSSE3 version for better testability. void ARGBAttenuateRow_C(const uint8* src_argb, uint8* dst_argb, int width) { for (int i = 0; i < width - 1; i += 2) { uint32 b = src_argb[0]; uint32 g = src_argb[1]; uint32 r = src_argb[2]; uint32 a = src_argb[3]; dst_argb[0] = ATTENUATE(b, a); dst_argb[1] = ATTENUATE(g, a); dst_argb[2] = ATTENUATE(r, a); dst_argb[3] = a; b = src_argb[4]; g = src_argb[5]; r = src_argb[6]; a = src_argb[7]; dst_argb[4] = ATTENUATE(b, a); dst_argb[5] = ATTENUATE(g, a); dst_argb[6] = ATTENUATE(r, a); dst_argb[7] = a; src_argb += 8; dst_argb += 8; } if (width & 1) { const uint32 b = src_argb[0]; const uint32 g = src_argb[1]; const uint32 r = src_argb[2]; const uint32 a = src_argb[3]; dst_argb[0] = ATTENUATE(b, a); dst_argb[1] = ATTENUATE(g, a); dst_argb[2] = ATTENUATE(r, a); dst_argb[3] = a; } } #undef ATTENUATE // Divide source RGB by alpha and store to destination. // b = (b * 255 + (a / 2)) / a; // g = (g * 255 + (a / 2)) / a; // r = (r * 255 + (a / 2)) / a; // Reciprocal method is off by 1 on some values. ie 125 // 8.8 fixed point inverse table with 1.0 in upper short and 1 / a in lower. #define T(a) 0x01000000 + (0x10000 / a) uint32 fixed_invtbl8[256] = { 0x01000000, 0x0100ffff, T(0x02), T(0x03), T(0x04), T(0x05), T(0x06), T(0x07), T(0x08), T(0x09), T(0x0a), T(0x0b), T(0x0c), T(0x0d), T(0x0e), T(0x0f), T(0x10), T(0x11), T(0x12), T(0x13), T(0x14), T(0x15), T(0x16), T(0x17), T(0x18), T(0x19), T(0x1a), T(0x1b), T(0x1c), T(0x1d), T(0x1e), T(0x1f), T(0x20), T(0x21), T(0x22), T(0x23), T(0x24), T(0x25), T(0x26), T(0x27), T(0x28), T(0x29), T(0x2a), T(0x2b), T(0x2c), T(0x2d), T(0x2e), T(0x2f), T(0x30), T(0x31), T(0x32), T(0x33), T(0x34), T(0x35), T(0x36), T(0x37), T(0x38), T(0x39), T(0x3a), T(0x3b), T(0x3c), T(0x3d), T(0x3e), T(0x3f), T(0x40), T(0x41), T(0x42), T(0x43), T(0x44), T(0x45), T(0x46), T(0x47), T(0x48), T(0x49), T(0x4a), T(0x4b), T(0x4c), T(0x4d), T(0x4e), T(0x4f), T(0x50), T(0x51), T(0x52), T(0x53), T(0x54), T(0x55), T(0x56), T(0x57), T(0x58), T(0x59), T(0x5a), T(0x5b), T(0x5c), T(0x5d), T(0x5e), T(0x5f), T(0x60), T(0x61), T(0x62), T(0x63), T(0x64), T(0x65), T(0x66), T(0x67), T(0x68), T(0x69), T(0x6a), T(0x6b), T(0x6c), T(0x6d), T(0x6e), T(0x6f), T(0x70), T(0x71), T(0x72), T(0x73), T(0x74), T(0x75), T(0x76), T(0x77), T(0x78), T(0x79), T(0x7a), T(0x7b), T(0x7c), T(0x7d), T(0x7e), T(0x7f), T(0x80), T(0x81), T(0x82), T(0x83), T(0x84), T(0x85), T(0x86), T(0x87), T(0x88), T(0x89), T(0x8a), T(0x8b), T(0x8c), T(0x8d), T(0x8e), T(0x8f), T(0x90), T(0x91), T(0x92), T(0x93), T(0x94), T(0x95), T(0x96), T(0x97), T(0x98), T(0x99), T(0x9a), T(0x9b), T(0x9c), T(0x9d), T(0x9e), T(0x9f), T(0xa0), T(0xa1), T(0xa2), T(0xa3), T(0xa4), T(0xa5), T(0xa6), T(0xa7), T(0xa8), T(0xa9), T(0xaa), T(0xab), T(0xac), T(0xad), T(0xae), T(0xaf), T(0xb0), T(0xb1), T(0xb2), T(0xb3), T(0xb4), T(0xb5), T(0xb6), T(0xb7), T(0xb8), T(0xb9), T(0xba), T(0xbb), T(0xbc), T(0xbd), T(0xbe), T(0xbf), T(0xc0), T(0xc1), T(0xc2), T(0xc3), T(0xc4), T(0xc5), T(0xc6), T(0xc7), T(0xc8), T(0xc9), T(0xca), T(0xcb), T(0xcc), T(0xcd), T(0xce), T(0xcf), T(0xd0), T(0xd1), T(0xd2), T(0xd3), T(0xd4), T(0xd5), T(0xd6), T(0xd7), T(0xd8), T(0xd9), T(0xda), T(0xdb), T(0xdc), T(0xdd), T(0xde), T(0xdf), T(0xe0), T(0xe1), T(0xe2), T(0xe3), T(0xe4), T(0xe5), T(0xe6), T(0xe7), T(0xe8), T(0xe9), T(0xea), T(0xeb), T(0xec), T(0xed), T(0xee), T(0xef), T(0xf0), T(0xf1), T(0xf2), T(0xf3), T(0xf4), T(0xf5), T(0xf6), T(0xf7), T(0xf8), T(0xf9), T(0xfa), T(0xfb), T(0xfc), T(0xfd), T(0xfe), 0x01000100 }; #undef T void ARGBUnattenuateRow_C(const uint8* src_argb, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { uint32 b = src_argb[0]; uint32 g = src_argb[1]; uint32 r = src_argb[2]; const uint32 a = src_argb[3]; const uint32 ia = fixed_invtbl8[a] & 0xffff; // 8.8 fixed point b = (b * ia) >> 8; g = (g * ia) >> 8; r = (r * ia) >> 8; // Clamping should not be necessary but is free in assembly. dst_argb[0] = clamp255(b); dst_argb[1] = clamp255(g); dst_argb[2] = clamp255(r); dst_argb[3] = a; src_argb += 4; dst_argb += 4; } } void ComputeCumulativeSumRow_C(const uint8* row, int32* cumsum, const int32* previous_cumsum, int width) { int32 row_sum[4] = {0, 0, 0, 0}; for (int x = 0; x < width; ++x) { row_sum[0] += row[x * 4 + 0]; row_sum[1] += row[x * 4 + 1]; row_sum[2] += row[x * 4 + 2]; row_sum[3] += row[x * 4 + 3]; cumsum[x * 4 + 0] = row_sum[0] + previous_cumsum[x * 4 + 0]; cumsum[x * 4 + 1] = row_sum[1] + previous_cumsum[x * 4 + 1]; cumsum[x * 4 + 2] = row_sum[2] + previous_cumsum[x * 4 + 2]; cumsum[x * 4 + 3] = row_sum[3] + previous_cumsum[x * 4 + 3]; } } void CumulativeSumToAverageRow_C(const int32* tl, const int32* bl, int w, int area, uint8* dst, int count) { float ooa = 1.0f / area; for (int i = 0; i < count; ++i) { dst[0] = static_cast((bl[w + 0] + tl[0] - bl[0] - tl[w + 0]) * ooa); dst[1] = static_cast((bl[w + 1] + tl[1] - bl[1] - tl[w + 1]) * ooa); dst[2] = static_cast((bl[w + 2] + tl[2] - bl[2] - tl[w + 2]) * ooa); dst[3] = static_cast((bl[w + 3] + tl[3] - bl[3] - tl[w + 3]) * ooa); dst += 4; tl += 4; bl += 4; } } // Copy pixels from rotated source to destination row with a slope. LIBYUV_API void ARGBAffineRow_C(const uint8* src_argb, int src_argb_stride, uint8* dst_argb, const float* uv_dudv, int width) { // Render a row of pixels from source into a buffer. float uv[2]; uv[0] = uv_dudv[0]; uv[1] = uv_dudv[1]; for (int i = 0; i < width; ++i) { int x = static_cast(uv[0]); int y = static_cast(uv[1]); *reinterpret_cast(dst_argb) = *reinterpret_cast(src_argb + y * src_argb_stride + x * 4); dst_argb += 4; uv[0] += uv_dudv[2]; uv[1] += uv_dudv[3]; } } // C version 2x2 -> 2x1. void InterpolateRow_C(uint8* dst_ptr, const uint8* src_ptr, ptrdiff_t src_stride, int width, int source_y_fraction) { int y1_fraction = source_y_fraction; int y0_fraction = 256 - y1_fraction; const uint8* src_ptr1 = src_ptr + src_stride; for (int x = 0; x < width - 1; x += 2) { dst_ptr[0] = (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction) >> 8; dst_ptr[1] = (src_ptr[1] * y0_fraction + src_ptr1[1] * y1_fraction) >> 8; src_ptr += 2; src_ptr1 += 2; dst_ptr += 2; } if (width & 1) { dst_ptr[0] = (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction) >> 8; } } // Blend 2 rows into 1 for conversions such as I422ToI420. void HalfRow_C(const uint8* src_uv, int src_uv_stride, uint8* dst_uv, int pix) { for (int x = 0; x < pix; ++x) { dst_uv[x] = (src_uv[x] + src_uv[src_uv_stride + x] + 1) >> 1; } } // Select 2 channels from ARGB on alternating pixels. e.g. BGBGBGBG void ARGBToBayerRow_C(const uint8* src_argb, uint8* dst_bayer, uint32 selector, int pix) { int index0 = selector & 0xff; int index1 = (selector >> 8) & 0xff; // Copy a row of Bayer. for (int x = 0; x < pix - 1; x += 2) { dst_bayer[0] = src_argb[index0]; dst_bayer[1] = src_argb[index1]; src_argb += 8; dst_bayer += 2; } if (pix & 1) { dst_bayer[0] = src_argb[index0]; } } // Use first 4 shuffler values to reorder ARGB channels. void ARGBShuffleRow_C(const uint8* src_argb, uint8* dst_argb, const uint8* shuffler, int pix) { int index0 = shuffler[0]; int index1 = shuffler[1]; int index2 = shuffler[2]; int index3 = shuffler[3]; // Shuffle a row of ARGB. for (int x = 0; x < pix; ++x) { // To support in-place conversion. uint8 b = src_argb[index0]; uint8 g = src_argb[index1]; uint8 r = src_argb[index2]; uint8 a = src_argb[index3]; dst_argb[0] = b; dst_argb[1] = g; dst_argb[2] = r; dst_argb[3] = a; src_argb += 4; dst_argb += 4; } } void I422ToYUY2Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_frame, int width) { for (int x = 0; x < width - 1; x += 2) { dst_frame[0] = src_y[0]; dst_frame[1] = src_u[0]; dst_frame[2] = src_y[1]; dst_frame[3] = src_v[0]; dst_frame += 4; src_y += 2; src_u += 1; src_v += 1; } if (width & 1) { dst_frame[0] = src_y[0]; dst_frame[1] = src_u[0]; dst_frame[2] = src_y[0]; // duplicate last y dst_frame[3] = src_v[0]; } } void I422ToUYVYRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_frame, int width) { for (int x = 0; x < width - 1; x += 2) { dst_frame[0] = src_u[0]; dst_frame[1] = src_y[0]; dst_frame[2] = src_v[0]; dst_frame[3] = src_y[1]; dst_frame += 4; src_y += 2; src_u += 1; src_v += 1; } if (width & 1) { dst_frame[0] = src_u[0]; dst_frame[1] = src_y[0]; dst_frame[2] = src_v[0]; dst_frame[3] = src_y[0]; // duplicate last y } } #if !defined(LIBYUV_DISABLE_X86) // row_win.cc has asm version, but GCC uses 2 step wrapper. 5% slower. // TODO(fbarchard): Handle width > kMaxStride here instead of calling code. #if defined(__x86_64__) || defined(__i386__) void I422ToRGB565Row_SSSE3(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); I422ToARGBRow_SSSE3(src_y, src_u, src_v, row, width); ARGBToRGB565Row_SSE2(row, rgb_buf, width); } #endif // defined(__x86_64__) || defined(__i386__) #if defined(_M_IX86) || defined(__x86_64__) || defined(__i386__) void I422ToARGB1555Row_SSSE3(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); I422ToARGBRow_SSSE3(src_y, src_u, src_v, row, width); ARGBToARGB1555Row_SSE2(row, rgb_buf, width); } void I422ToARGB4444Row_SSSE3(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); I422ToARGBRow_SSSE3(src_y, src_u, src_v, row, width); ARGBToARGB4444Row_SSE2(row, rgb_buf, width); } void NV12ToRGB565Row_SSSE3(const uint8* src_y, const uint8* src_uv, uint8* dst_rgb565, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); NV12ToARGBRow_SSSE3(src_y, src_uv, row, width); ARGBToRGB565Row_SSE2(row, dst_rgb565, width); } void NV21ToRGB565Row_SSSE3(const uint8* src_y, const uint8* src_vu, uint8* dst_rgb565, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); NV21ToARGBRow_SSSE3(src_y, src_vu, row, width); ARGBToRGB565Row_SSE2(row, dst_rgb565, width); } void YUY2ToARGBRow_SSSE3(const uint8* src_yuy2, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); YUY2ToUV422Row_SSE2(src_yuy2, row_u, row_v, width); YUY2ToYRow_SSE2(src_yuy2, row_y, width); I422ToARGBRow_SSSE3(row_y, row_u, row_v, dst_argb, width); } void YUY2ToARGBRow_Unaligned_SSSE3(const uint8* src_yuy2, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); YUY2ToUV422Row_Unaligned_SSE2(src_yuy2, row_u, row_v, width); YUY2ToYRow_Unaligned_SSE2(src_yuy2, row_y, width); I422ToARGBRow_Unaligned_SSSE3(row_y, row_u, row_v, dst_argb, width); } void UYVYToARGBRow_SSSE3(const uint8* src_uyvy, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); UYVYToUV422Row_SSE2(src_uyvy, row_u, row_v, width); UYVYToYRow_SSE2(src_uyvy, row_y, width); I422ToARGBRow_SSSE3(row_y, row_u, row_v, dst_argb, width); } void UYVYToARGBRow_Unaligned_SSSE3(const uint8* src_uyvy, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); UYVYToUV422Row_Unaligned_SSE2(src_uyvy, row_u, row_v, width); UYVYToYRow_Unaligned_SSE2(src_uyvy, row_y, width); I422ToARGBRow_Unaligned_SSSE3(row_y, row_u, row_v, dst_argb, width); } #endif // defined(_M_IX86) || defined(__x86_64__) || defined(__i386__) #endif // !defined(LIBYUV_DISABLE_X86) #undef clamp0 #undef clamp255 #ifdef __cplusplus } // extern "C" } // namespace libyuv #endif