/* * Copyright 2011 The LibYuv Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "libyuv/row.h" #include // For memcpy and memset. #include "libyuv/basic_types.h" #ifdef __cplusplus namespace libyuv { extern "C" { #endif // llvm x86 is poor at ternary operator, so use branchless min/max. #define USE_BRANCHLESS 1 #if USE_BRANCHLESS static __inline int32 clamp0(int32 v) { return ((-(v) >> 31) & (v)); } static __inline int32 clamp255(int32 v) { return (((255 - (v)) >> 31) | (v)) & 255; } static __inline uint32 Clamp(int32 val) { int v = clamp0(val); return static_cast(clamp255(v)); } static __inline uint32 Abs(int32 v) { int m = v >> 31; return (v + m) ^ m; } #else // USE_BRANCHLESS static __inline int32 clamp0(int32 v) { return (v < 0) ? 0 : v; } static __inline int32 clamp255(int32 v) { return (v > 255) ? 255 : v; } static __inline uint32 Clamp(int32 val) { int v = clamp0(val); return static_cast(clamp255(v)); } static __inline uint32 Abs(int32 v) { return (v < 0) ? -v : v; } #endif // USE_BRANCHLESS // Fixed point 0.32 reciprocal table. extern const uint32 kRecipTable[4097] = { 0, 0xffffffff, 2147483648, 1431655808, 1073741824, 858993472, 715827904, 613566784, 536870912, 477218592, 429496736, 390451584, 357913952, 330382112, 306783392, 286331168, 268435456, 252645136, 238609296, 226050912, 214748368, 204522256, 195225792, 186737712, 178956976, 171798688, 165191056, 159072864, 153391696, 148102320, 143165584, 138547328, 134217728, 130150528, 126322568, 122713352, 119304648, 116080200, 113025456, 110127368, 107374184, 104755296, 102261128, 99882960, 97612896, 95443720, 93368856, 91382280, 89478488, 87652392, 85899344, 84215048, 82595528, 81037120, 79536432, 78090312, 76695848, 75350304, 74051160, 72796056, 71582792, 70409296, 69273664, 68174088, 67108864, 66076420, 65075264, 64103988, 63161284, 62245904, 61356676, 60492496, 59652324, 58835168, 58040100, 57266232, 56512728, 55778796, 55063684, 54366676, 53687092, 53024288, 52377648, 51746592, 51130564, 50529028, 49941480, 49367440, 48806448, 48258060, 47721860, 47197444, 46684428, 46182444, 45691140, 45210184, 44739244, 44278012, 43826196, 43383508, 42949672, 42524428, 42107524, 41698712, 41297764, 40904452, 40518560, 40139880, 39768216, 39403368, 39045156, 38693400, 38347924, 38008560, 37675152, 37347540, 37025580, 36709124, 36398028, 36092164, 35791396, 35495596, 35204648, 34918432, 34636832, 34359740, 34087044, 33818640, 33554432, 33294320, 33038210, 32786010, 32537632, 32292988, 32051994, 31814572, 31580642, 31350126, 31122952, 30899046, 30678338, 30460760, 30246248, 30034736, 29826162, 29620464, 29417584, 29217464, 29020050, 28825284, 28633116, 28443492, 28256364, 28071682, 27889398, 27709466, 27531842, 27356480, 27183338, 27012372, 26843546, 26676816, 26512144, 26349492, 26188824, 26030104, 25873296, 25718368, 25565282, 25414008, 25264514, 25116768, 24970740, 24826400, 24683720, 24542670, 24403224, 24265352, 24129030, 23994230, 23860930, 23729102, 23598722, 23469766, 23342214, 23216040, 23091222, 22967740, 22845570, 22724694, 22605092, 22486740, 22369622, 22253716, 22139006, 22025474, 21913098, 21801864, 21691754, 21582750, 21474836, 21367996, 21262214, 21157474, 21053762, 20951060, 20849356, 20748634, 20648882, 20550082, 20452226, 20355296, 20259280, 20164166, 20069940, 19976592, 19884108, 19792476, 19701684, 19611722, 19522578, 19434242, 19346700, 19259944, 19173962, 19088744, 19004280, 18920560, 18837576, 18755316, 18673770, 18592932, 18512790, 18433336, 18354562, 18276456, 18199014, 18122224, 18046082, 17970574, 17895698, 17821442, 17747798, 17674762, 17602324, 17530478, 17459216, 17388532, 17318416, 17248864, 17179870, 17111424, 17043522, 16976156, 16909320, 16843010, 16777216, 16711936, 16647160, 16582886, 16519106, 16455814, 16393006, 16330674, 16268816, 16207424, 16146494, 16086020, 16025998, 15966422, 15907286, 15848588, 15790322, 15732482, 15675064, 15618064, 15561476, 15505298, 15449524, 15394148, 15339170, 15284582, 15230380, 15176564, 15123124, 15070062, 15017368, 14965044, 14913082, 14861480, 14810232, 14759338, 14708792, 14658592, 14608732, 14559212, 14510026, 14461170, 14412642, 14364440, 14316558, 14268994, 14221746, 14174810, 14128182, 14081860, 14035842, 13990122, 13944700, 13899572, 13854734, 13810184, 13765922, 13721940, 13678240, 13634818, 13591670, 13548794, 13506186, 13463848, 13421774, 13379960, 13338408, 13297112, 13256072, 13215284, 13174746, 13134458, 13094412, 13054612, 13015052, 12975732, 12936648, 12897800, 12859184, 12820798, 12782642, 12744710, 12707004, 12669520, 12632258, 12595212, 12558384, 12521772, 12485370, 12449182, 12413200, 12377428, 12341860, 12306498, 12271336, 12236374, 12201612, 12167046, 12132676, 12098500, 12064516, 12030722, 11997116, 11963698, 11930466, 11897416, 11864552, 11831866, 11799362, 11767034, 11734884, 11702908, 11671108, 11639478, 11608020, 11576732, 11545612, 11514658, 11483870, 11453246, 11422786, 11392486, 11362348, 11332368, 11302546, 11272880, 11243370, 11214014, 11184812, 11155760, 11126858, 11098108, 11069504, 11041048, 11012738, 10984572, 10956550, 10928670, 10900932, 10873336, 10845878, 10818558, 10791376, 10764330, 10737418, 10710642, 10683998, 10657488, 10631108, 10604858, 10578738, 10552746, 10526882, 10501144, 10475530, 10450042, 10424678, 10399438, 10374318, 10349320, 10324442, 10299682, 10275042, 10250520, 10226114, 10201824, 10177648, 10153588, 10129640, 10105806, 10082084, 10058472, 10034970, 10011580, 9988296, 9965122, 9942054, 9919094, 9896238, 9873488, 9850842, 9828300, 9805862, 9783526, 9761290, 9739156, 9717122, 9695186, 9673350, 9651612, 9629972, 9608428, 9586982, 9565630, 9544372, 9523210, 9502140, 9481164, 9460280, 9439490, 9418788, 9398178, 9377658, 9357228, 9336886, 9316632, 9296466, 9276388, 9256396, 9236490, 9216668, 9196932, 9177282, 9157714, 9138228, 9118828, 9099508, 9080270, 9061112, 9042036, 9023042, 9004124, 8985288, 8966530, 8947850, 8929246, 8910722, 8892272, 8873900, 8855604, 8837382, 8819236, 8801162, 8783164, 8765240, 8747388, 8729608, 8711902, 8694266, 8676702, 8659208, 8641786, 8624432, 8607150, 8589936, 8572790, 8555712, 8538702, 8521762, 8504886, 8488078, 8471336, 8454660, 8438050, 8421506, 8405024, 8388608, 8372256, 8355968, 8339743, 8323580, 8307481, 8291443, 8275467, 8259553, 8243699, 8227907, 8212175, 8196503, 8180890, 8165337, 8149843, 8134408, 8119031, 8103712, 8088451, 8073247, 8058100, 8043010, 8027977, 8012999, 7998077, 7983211, 7968400, 7953643, 7938942, 7924294, 7909701, 7895161, 7880674, 7866241, 7851860, 7837532, 7823256, 7809032, 7794859, 7780738, 7766668, 7752649, 7738680, 7724762, 7710893, 7697074, 7683305, 7669585, 7655913, 7642291, 7628717, 7615190, 7601712, 7588282, 7574898, 7561562, 7548273, 7535031, 7521834, 7508684, 7495580, 7482522, 7469509, 7456541, 7443618, 7430740, 7417906, 7405116, 7392371, 7379669, 7367011, 7354396, 7341825, 7329296, 7316810, 7304366, 7291965, 7279606, 7267288, 7255013, 7242778, 7230585, 7218433, 7206321, 7194250, 7182220, 7170229, 7158279, 7146368, 7134497, 7122666, 7110873, 7099120, 7087405, 7075729, 7064091, 7052492, 7040930, 7029407, 7017921, 7006472, 6995061, 6983687, 6972350, 6961049, 6949786, 6938558, 6927367, 6916212, 6905092, 6894009, 6882961, 6871948, 6860970, 6850028, 6839120, 6828247, 6817409, 6806605, 6795835, 6785099, 6774397, 6763728, 6753093, 6742492, 6731924, 6721389, 6710887, 6700417, 6689980, 6679576, 6669204, 6658864, 6648556, 6638280, 6628036, 6617824, 6607642, 6597492, 6587373, 6577286, 6567229, 6557202, 6547206, 6537241, 6527306, 6517401, 6507526, 6497681, 6487866, 6478081, 6468324, 6458598, 6448900, 6439232, 6429592, 6419981, 6410399, 6400846, 6391321, 6381824, 6372355, 6362915, 6353502, 6344117, 6334760, 6325431, 6316129, 6306854, 6297606, 6288386, 6279192, 6270025, 6260886, 6251772, 6242685, 6233625, 6224591, 6215582, 6206600, 6197644, 6188714, 6179809, 6170930, 6162077, 6153249, 6144446, 6135668, 6126915, 6118187, 6109484, 6100806, 6092152, 6083523, 6074919, 6066338, 6057782, 6049250, 6040742, 6032258, 6023797, 6015361, 6006948, 5998558, 5990192, 5981849, 5973529, 5965233, 5956959, 5948708, 5940481, 5932276, 5924093, 5915933, 5907796, 5899681, 5891588, 5883517, 5875469, 5867442, 5859437, 5851454, 5843493, 5835554, 5827636, 5819739, 5811864, 5804010, 5796177, 5788366, 5780575, 5772806, 5765057, 5757329, 5749622, 5741935, 5734269, 5726623, 5718998, 5711393, 5703808, 5696243, 5688699, 5681174, 5673669, 5666184, 5658719, 5651273, 5643847, 5636440, 5629053, 5621685, 5614337, 5607007, 5599697, 5592406, 5585133, 5577880, 5570645, 5563429, 5556232, 5549054, 5541894, 5534752, 5527629, 5520524, 5513437, 5506369, 5499318, 5492286, 5485271, 5478275, 5471296, 5464335, 5457392, 5450466, 5443558, 5436668, 5429795, 5422939, 5416100, 5409279, 5402475, 5395688, 5388918, 5382165, 5375429, 5368709, 5362007, 5355321, 5348652, 5341999, 5335363, 5328744, 5322141, 5315554, 5308983, 5302429, 5295891, 5289369, 5282863, 5276373, 5269899, 5263441, 5256998, 5250572, 5244161, 5237765, 5231386, 5225021, 5218673, 5212339, 5206021, 5199719, 5193431, 5187159, 5180902, 5174660, 5168433, 5162221, 5156023, 5149841, 5143674, 5137521, 5131383, 5125260, 5119151, 5113057, 5106977, 5100912, 5094861, 5088824, 5082802, 5076794, 5070800, 5064820, 5058855, 5052903, 5046965, 5041042, 5035132, 5029236, 5023354, 5017485, 5011631, 5005790, 4999962, 4994148, 4988348, 4982561, 4976787, 4971027, 4965280, 4959547, 4953826, 4948119, 4942425, 4936744, 4931076, 4925421, 4919780, 4914150, 4908534, 4902931, 4897340, 4891763, 4886197, 4880645, 4875105, 4869578, 4864063, 4858561, 4853071, 4847593, 4842128, 4836675, 4831235, 4825806, 4820390, 4814986, 4809594, 4804214, 4798846, 4793491, 4788147, 4782815, 4777494, 4772186, 4766890, 4761605, 4756332, 4751070, 4745820, 4740582, 4735356, 4730140, 4724937, 4719745, 4714564, 4709394, 4704236, 4699089, 4693954, 4688829, 4683716, 4678614, 4673523, 4668443, 4663374, 4658316, 4653269, 4648233, 4643208, 4638194, 4633190, 4628198, 4623216, 4618245, 4613284, 4608334, 4603395, 4598466, 4593548, 4588641, 4583743, 4578857, 4573980, 4569114, 4564259, 4559414, 4554579, 4549754, 4544939, 4540135, 4535341, 4530556, 4525782, 4521018, 4516264, 4511521, 4506786, 4502062, 4497348, 4492644, 4487949, 4483265, 4478590, 4473925, 4469269, 4464623, 4459987, 4455361, 4450744, 4446136, 4441538, 4436950, 4432371, 4427802, 4423242, 4418691, 4414150, 4409618, 4405095, 4400581, 4396077, 4391582, 4387097, 4382620, 4378152, 4373694, 4369245, 4364804, 4360373, 4355951, 4351538, 4347133, 4342738, 4338351, 4333973, 4329604, 4325244, 4320893, 4316550, 4312216, 4307891, 4303575, 4299267, 4294968, 4290677, 4286395, 4282121, 4277856, 4273600, 4269351, 4265112, 4260881, 4256658, 4252443, 4248237, 4244039, 4239850, 4235668, 4231495, 4227330, 4223174, 4219025, 4214885, 4210753, 4206628, 4202512, 4198404, 4194304, 4190212, 4186128, 4182052, 4177984, 4173924, 4169871, 4165827, 4161790, 4157761, 4153740, 4149727, 4145721, 4141724, 4137734, 4133751, 4129776, 4125809, 4121850, 4117898, 4113953, 4110017, 4106087, 4102166, 4098251, 4094345, 4090445, 4086553, 4082669, 4078791, 4074922, 4071059, 4067204, 4063356, 4059516, 4055682, 4051856, 4048037, 4044225, 4040421, 4036624, 4032833, 4029050, 4025274, 4021505, 4017743, 4013988, 4010240, 4006499, 4002766, 3999039, 3995319, 3991605, 3987899, 3984200, 3980507, 3976822, 3973143, 3969471, 3965806, 3962147, 3958495, 3954850, 3951212, 3947580, 3943955, 3940337, 3936725, 3933120, 3929522, 3925930, 3922345, 3918766, 3915194, 3911628, 3908069, 3904516, 3900970, 3897430, 3893896, 3890369, 3886848, 3883334, 3879826, 3876324, 3872829, 3869340, 3865857, 3862381, 3858911, 3855447, 3851989, 3848537, 3845092, 3841652, 3838219, 3834792, 3831371, 3827957, 3824548, 3821145, 3817749, 3814358, 3810974, 3807595, 3804223, 3800856, 3797496, 3794141, 3790792, 3787449, 3784112, 3780781, 3777456, 3774137, 3770823, 3767515, 3764213, 3760917, 3757627, 3754342, 3751063, 3747790, 3744523, 3741261, 3738005, 3734754, 3731510, 3728270, 3725037, 3721809, 3718587, 3715370, 3712159, 3708953, 3705753, 3702558, 3699369, 3696185, 3693007, 3689835, 3686667, 3683506, 3680349, 3677198, 3674053, 3670912, 3667777, 3664648, 3661524, 3658405, 3655291, 3652183, 3649080, 3645983, 3642890, 3639803, 3636721, 3633644, 3630573, 3627506, 3624445, 3621389, 3618338, 3615292, 3612252, 3609216, 3606186, 3603161, 3600140, 3597125, 3594115, 3591110, 3588110, 3585115, 3582125, 3579140, 3576159, 3573184, 3570214, 3567249, 3564288, 3561333, 3558382, 3555437, 3552496, 3549560, 3546629, 3543703, 3540781, 3537864, 3534953, 3532046, 3529143, 3526246, 3523353, 3520465, 3517582, 3514703, 3511829, 3508960, 3506096, 3503236, 3500381, 3497530, 3494685, 3491843, 3489007, 3486175, 3483347, 3480525, 3477706, 3474893, 3472084, 3469279, 3466479, 3463683, 3460892, 3458106, 3455324, 3452546, 3449773, 3447004, 3444240, 3441480, 3438725, 3435974, 3433227, 3430485, 3427747, 3425014, 3422285, 3419560, 3416840, 3414124, 3411412, 3408704, 3406001, 3403302, 3400608, 3397917, 3395231, 3392549, 3389872, 3387198, 3384529, 3381864, 3379203, 3376547, 3373894, 3371246, 3368602, 3365962, 3363326, 3360694, 3358067, 3355443, 3352824, 3350209, 3347597, 3344990, 3342387, 3339788, 3337193, 3334602, 3332015, 3329432, 3326853, 3324278, 3321707, 3319140, 3316577, 3314018, 3311463, 3308912, 3306364, 3303821, 3301282, 3298746, 3296214, 3293687, 3291163, 3288643, 3286127, 3283614, 3281106, 3278601, 3276100, 3273603, 3271110, 3268621, 3266135, 3263653, 3261175, 3258701, 3256230, 3253763, 3251300, 3248841, 3246385, 3243933, 3241485, 3239040, 3236599, 3234162, 3231729, 3229299, 3226873, 3224450, 3222031, 3219616, 3217204, 3214796, 3212392, 3209991, 3207593, 3205200, 3202809, 3200423, 3198040, 3195660, 3193284, 3190912, 3188543, 3186178, 3183816, 3181457, 3179102, 3176751, 3174403, 3172059, 3169718, 3167380, 3165046, 3162715, 3160388, 3158064, 3155744, 3153427, 3151113, 3148803, 3146496, 3144193, 3141893, 3139596, 3137303, 3135013, 3132726, 3130443, 3128163, 3125886, 3123613, 3121343, 3119076, 3116812, 3114552, 3112295, 3110042, 3107791, 3105544, 3103300, 3101060, 3098822, 3096588, 3094357, 3092129, 3089905, 3087683, 3085465, 3083250, 3081038, 3078830, 3076624, 3074422, 3072223, 3070027, 3067834, 3065644, 3063458, 3061274, 3059094, 3056916, 3054742, 3052571, 3050403, 3048238, 3046076, 3043917, 3041762, 3039609, 3037459, 3035313, 3033169, 3031029, 3028891, 3026756, 3024625, 3022496, 3020371, 3018248, 3016129, 3014012, 3011899, 3009788, 3007680, 3005576, 3003474, 3001375, 2999279, 2997186, 2995096, 2993009, 2990924, 2988843, 2986765, 2984689, 2982616, 2980546, 2978480, 2976415, 2974354, 2972296, 2970240, 2968188, 2966138, 2964091, 2962047, 2960005, 2957967, 2955931, 2953898, 2951868, 2949840, 2947816, 2945794, 2943775, 2941759, 2939745, 2937734, 2935726, 2933721, 2931718, 2929719, 2927722, 2925727, 2923736, 2921747, 2919760, 2917777, 2915796, 2913818, 2911842, 2909870, 2907899, 2905932, 2903967, 2902005, 2900046, 2898089, 2896135, 2894183, 2892234, 2890288, 2888344, 2886403, 2884464, 2882529, 2880595, 2878665, 2876736, 2874811, 2872888, 2870968, 2869050, 2867135, 2865222, 2863312, 2861404, 2859499, 2857596, 2855696, 2853799, 2851904, 2850012, 2848122, 2846234, 2844349, 2842467, 2840587, 2838710, 2836835, 2834962, 2833092, 2831224, 2829359, 2827497, 2825637, 2823779, 2821923, 2820071, 2818220, 2816372, 2814527, 2812683, 2810843, 2809004, 2807168, 2805335, 2803504, 2801675, 2799848, 2798024, 2796203, 2794384, 2792567, 2790752, 2788940, 2787130, 2785323, 2783517, 2781715, 2779914, 2778116, 2776320, 2774527, 2772736, 2770947, 2769160, 2767376, 2765594, 2763814, 2762037, 2760262, 2758489, 2756719, 2754950, 2753184, 2751421, 2749659, 2747900, 2746143, 2744388, 2742636, 2740885, 2739137, 2737392, 2735648, 2733907, 2732168, 2730431, 2728696, 2726963, 2725233, 2723505, 2721779, 2720055, 2718334, 2716614, 2714897, 2713182, 2711469, 2709759, 2708050, 2706344, 2704639, 2702937, 2701237, 2699540, 2697844, 2696150, 2694459, 2692770, 2691082, 2689397, 2687714, 2686033, 2684355, 2682678, 2681003, 2679331, 2677661, 2675992, 2674326, 2672662, 2671000, 2669340, 2667682, 2666026, 2664372, 2662720, 2661070, 2659423, 2657777, 2656133, 2654492, 2652852, 2651215, 2649579, 2647945, 2646314, 2644684, 2643057, 2641431, 2639808, 2638186, 2636567, 2634949, 2633334, 2631720, 2630109, 2628499, 2626891, 2625286, 2623682, 2622080, 2620480, 2618883, 2617287, 2615693, 2614101, 2612511, 2610922, 2609336, 2607752, 2606170, 2604589, 2603011, 2601434, 2599859, 2598286, 2596716, 2595147, 2593579, 2592014, 2590451, 2588889, 2587330, 2585772, 2584216, 2582662, 2581110, 2579560, 2578012, 2576465, 2574921, 2573378, 2571837, 2570298, 2568760, 2567225, 2565691, 2564160, 2562630, 2561102, 2559575, 2558051, 2556528, 2555007, 2553488, 2551971, 2550456, 2548942, 2547430, 2545920, 2544412, 2542906, 2541401, 2539898, 2538397, 2536898, 2535400, 2533904, 2532410, 2530918, 2529427, 2527938, 2526451, 2524966, 2523483, 2522001, 2520521, 2519043, 2517566, 2516091, 2514618, 2513147, 2511677, 2510209, 2508743, 2507278, 2505815, 2504354, 2502895, 2501437, 2499981, 2498527, 2497074, 2495623, 2494174, 2492726, 2491280, 2489836, 2488394, 2486953, 2485514, 2484076, 2482640, 2481206, 2479773, 2478342, 2476913, 2475486, 2474060, 2472635, 2471213, 2469792, 2468372, 2466954, 2465538, 2464124, 2462711, 2461299, 2459890, 2458482, 2457075, 2455670, 2454267, 2452866, 2451465, 2450067, 2448670, 2447275, 2445881, 2444489, 2443099, 2441710, 2440322, 2438937, 2437553, 2436170, 2434789, 2433409, 2432031, 2430655, 2429280, 2427907, 2426535, 2425165, 2423797, 2422430, 2421064, 2419700, 2418338, 2416977, 2415617, 2414259, 2412903, 2411548, 2410195, 2408843, 2407493, 2406144, 2404797, 2403451, 2402107, 2400764, 2399423, 2398083, 2396745, 2395409, 2394073, 2392740, 2391407, 2390077, 2388747, 2387419, 2386093, 2384768, 2383445, 2382123, 2380802, 2379483, 2378166, 2376850, 2375535, 2374222, 2372910, 2371600, 2370291, 2368984, 2367678, 2366373, 2365070, 2363769, 2362468, 2361170, 2359872, 2358576, 2357282, 2355989, 2354697, 2353407, 2352118, 2350831, 2349545, 2348260, 2346977, 2345695, 2344415, 2343136, 2341858, 2340582, 2339307, 2338033, 2336761, 2335491, 2334221, 2332954, 2331687, 2330422, 2329158, 2327896, 2326635, 2325375, 2324117, 2322860, 2321604, 2320350, 2319097, 2317845, 2316595, 2315346, 2314099, 2312853, 2311608, 2310364, 2309122, 2307882, 2306642, 2305404, 2304167, 2302932, 2301697, 2300465, 2299233, 2298003, 2296774, 2295547, 2294320, 2293095, 2291872, 2290649, 2289428, 2288209, 2286990, 2285773, 2284557, 2283343, 2282129, 2280917, 2279707, 2278497, 2277289, 2276082, 2274877, 2273673, 2272470, 2271268, 2270067, 2268868, 2267670, 2266474, 2265278, 2264084, 2262891, 2261700, 2260509, 2259320, 2258132, 2256946, 2255760, 2254576, 2253393, 2252212, 2251031, 2249852, 2248674, 2247497, 2246322, 2245148, 2243975, 2242803, 2241632, 2240463, 2239295, 2238128, 2236962, 2235798, 2234635, 2233472, 2232312, 2231152, 2229994, 2228836, 2227680, 2226525, 2225372, 2224219, 2223068, 2221918, 2220769, 2219621, 2218475, 2217330, 2216186, 2215043, 2213901, 2212760, 2211621, 2210483, 2209345, 2208210, 2207075, 2205941, 2204809, 2203678, 2202547, 2201419, 2200291, 2199164, 2198039, 2196914, 2195791, 2194669, 2193548, 2192429, 2191310, 2190193, 2189076, 2187961, 2186847, 2185734, 2184622, 2183512, 2182402, 2181294, 2180187, 2179080, 2177975, 2176872, 2175769, 2174667, 2173567, 2172467, 2171369, 2170272, 2169176, 2168081, 2166987, 2165894, 2164802, 2163712, 2162622, 2161534, 2160446, 2159360, 2158275, 2157191, 2156108, 2155026, 2153946, 2152866, 2151787, 2150710, 2149633, 2148558, 2147484, 2146411, 2145338, 2144267, 2143197, 2142128, 2141061, 2139994, 2138928, 2137863, 2136800, 2135737, 2134676, 2133615, 2132556, 2131498, 2130440, 2129384, 2128329, 2127275, 2126222, 2125169, 2124118, 2123068, 2122020, 2120972, 2119925, 2118879, 2117834, 2116790, 2115748, 2114706, 2113665, 2112625, 2111587, 2110549, 2109513, 2108477, 2107442, 2106409, 2105376, 2104345, 2103314, 2102285, 2101256, 2100229, 2099202, 2098177, 2097152, 2096129, 2095106, 2094085, 2093064, 2092045, 2091026, 2090008, 2088992, 2087976, 2086962, 2085948, 2084936, 2083924, 2082913, 2081904, 2080895, 2079887, 2078881, 2077875, 2076870, 2075866, 2074863, 2073862, 2072861, 2071861, 2070862, 2069864, 2068867, 2067871, 2066876, 2065881, 2064888, 2063896, 2062905, 2061914, 2060925, 2059936, 2058949, 2057962, 2056977, 2055992, 2055008, 2054026, 2053044, 2052063, 2051083, 2050104, 2049126, 2048149, 2047172, 2046197, 2045223, 2044249, 2043277, 2042305, 2041334, 2040365, 2039396, 2038428, 2037461, 2036495, 2035530, 2034565, 2033602, 2032640, 2031678, 2030717, 2029758, 2028799, 2027841, 2026884, 2025928, 2024973, 2024019, 2023065, 2022113, 2021161, 2020210, 2019261, 2018312, 2017364, 2016417, 2015470, 2014525, 2013581, 2012637, 2011694, 2010753, 2009812, 2008872, 2007932, 2006994, 2006057, 2005120, 2004185, 2003250, 2002316, 2001383, 2000451, 1999519, 1998589, 1997659, 1996731, 1995803, 1994876, 1993950, 1993024, 1992100, 1991176, 1990254, 1989332, 1988411, 1987491, 1986571, 1985653, 1984735, 1983819, 1982903, 1981988, 1981074, 1980160, 1979248, 1978336, 1977425, 1976515, 1975606, 1974698, 1973790, 1972884, 1971978, 1971073, 1970169, 1969265, 1968363, 1967461, 1966560, 1965660, 1964761, 1963863, 1962965, 1962068, 1961172, 1960277, 1959383, 1958489, 1957597, 1956705, 1955814, 1954924, 1954034, 1953146, 1952258, 1951371, 1950485, 1949599, 1948715, 1947831, 1946948, 1946066, 1945185, 1944304, 1943424, 1942545, 1941667, 1940790, 1939913, 1939037, 1938162, 1937288, 1936415, 1935542, 1934670, 1933799, 1932929, 1932059, 1931190, 1930322, 1929455, 1928589, 1927723, 1926858, 1925994, 1925131, 1924269, 1923407, 1922546, 1921686, 1920826, 1919968, 1919110, 1918253, 1917396, 1916541, 1915686, 1914832, 1913978, 1913126, 1912274, 1911423, 1910573, 1909723, 1908874, 1908026, 1907179, 1906333, 1905487, 1904642, 1903798, 1902954, 1902111, 1901269, 1900428, 1899588, 1898748, 1897909, 1897070, 1896233, 1895396, 1894560, 1893725, 1892890, 1892056, 1891223, 1890391, 1889559, 1888728, 1887898, 1887068, 1886240, 1885412, 1884584, 1883758, 1882932, 1882107, 1881282, 1880459, 1879636, 1878813, 1877992, 1877171, 1876351, 1875532, 1874713, 1873895, 1873078, 1872261, 1871446, 1870630, 1869816, 1869002, 1868189, 1867377, 1866566, 1865755, 1864945, 1864135, 1863326, 1862518, 1861711, 1860904, 1860099, 1859293, 1858489, 1857685, 1856882, 1856079, 1855278, 1854476, 1853676, 1852876, 1852077, 1851279, 1850481, 1849685, 1848888, 1848093, 1847298, 1846504, 1845710, 1844917, 1844125, 1843334, 1842543, 1841753, 1840963, 1840175, 1839386, 1838599, 1837812, 1837026, 1836241, 1835456, 1834672, 1833889, 1833106, 1832324, 1831543, 1830762, 1829982, 1829202, 1828424, 1827646, 1826868, 1826092, 1825316, 1824540, 1823765, 1822991, 1822218, 1821445, 1820673, 1819901, 1819131, 1818360, 1817591, 1816822, 1816054, 1815286, 1814519, 1813753, 1812988, 1812223, 1811458, 1810695, 1809931, 1809169, 1808407, 1807646, 1806886, 1806126, 1805367, 1804608, 1803850, 1803093, 1802336, 1801580, 1800825, 1800070, 1799316, 1798563, 1797810, 1797058, 1796306, 1795555, 1794805, 1794055, 1793306, 1792557, 1791810, 1791062, 1790316, 1789570, 1788824, 1788080, 1787336, 1786592, 1785849, 1785107, 1784365, 1783624, 1782884, 1782144, 1781405, 1780666, 1779928, 1779191, 1778454, 1777718, 1776983, 1776248, 1775514, 1774780, 1774047, 1773314, 1772583, 1771851, 1771121, 1770391, 1769661, 1768932, 1768204, 1767476, 1766749, 1766023, 1765297, 1764572, 1763847, 1763123, 1762399, 1761677, 1760954, 1760233, 1759511, 1758791, 1758071, 1757352, 1756633, 1755915, 1755197, 1754480, 1753764, 1753048, 1752333, 1751618, 1750904, 1750190, 1749478, 1748765, 1748054, 1747342, 1746632, 1745922, 1745212, 1744503, 1743795, 1743087, 1742380, 1741674, 1740968, 1740262, 1739557, 1738853, 1738150, 1737446, 1736744, 1736042, 1735340, 1734640, 1733939, 1733239, 1732540, 1731842, 1731144, 1730446, 1729749, 1729053, 1728357, 1727662, 1726967, 1726273, 1725580, 1724887, 1724194, 1723502, 1722811, 1722120, 1721430, 1720740, 1720051, 1719362, 1718674, 1717987, 1717300, 1716614, 1715928, 1715243, 1714558, 1713874, 1713190, 1712507, 1711824, 1711142, 1710461, 1709780, 1709100, 1708420, 1707741, 1707062, 1706384, 1705706, 1705029, 1704352, 1703676, 1703001, 1702326, 1701651, 1700977, 1700304, 1699631, 1698959, 1698287, 1697616, 1696945, 1696275, 1695605, 1694936, 1694267, 1693599, 1692932, 1692265, 1691598, 1690932, 1690267, 1689602, 1688937, 1688273, 1687610, 1686947, 1686285, 1685623, 1684962, 1684301, 1683641, 1682981, 1682322, 1681663, 1681005, 1680347, 1679690, 1679033, 1678377, 1677722, 1677067, 1676412, 1675758, 1675104, 1674451, 1673799, 1673147, 1672495, 1671844, 1671194, 1670544, 1669894, 1669245, 1668597, 1667949, 1667301, 1666654, 1666008, 1665362, 1664716, 1664071, 1663427, 1662783, 1662139, 1661496, 1660854, 1660212, 1659570, 1658929, 1658289, 1657649, 1657009, 1656370, 1655731, 1655093, 1654456, 1653819, 1653182, 1652546, 1651911, 1651275, 1650641, 1650007, 1649373, 1648740, 1648107, 1647475, 1646843, 1646212, 1645581, 1644951, 1644321, 1643692, 1643063, 1642435, 1641807, 1641180, 1640553, 1639926, 1639301, 1638675, 1638050, 1637426, 1636802, 1636178, 1635555, 1634932, 1634310, 1633689, 1633067, 1632447, 1631827, 1631207, 1630587, 1629969, 1629350, 1628732, 1628115, 1627498, 1626882, 1626266, 1625650, 1625035, 1624420, 1623806, 1623193, 1622579, 1621967, 1621354, 1620742, 1620131, 1619520, 1618910, 1618300, 1617690, 1617081, 1616473, 1615864, 1615257, 1614649, 1614043, 1613436, 1612830, 1612225, 1611620, 1611016, 1610411, 1609808, 1609205, 1608602, 1608000, 1607398, 1606797, 1606196, 1605595, 1604995, 1604396, 1603797, 1603198, 1602600, 1602002, 1601405, 1600808, 1600211, 1599615, 1599020, 1598425, 1597830, 1597236, 1596642, 1596049, 1595456, 1594864, 1594272, 1593680, 1593089, 1592498, 1591908, 1591318, 1590729, 1590140, 1589551, 1588963, 1588376, 1587788, 1587202, 1586615, 1586029, 1585444, 1584859, 1584274, 1583690, 1583106, 1582523, 1581940, 1581358, 1580776, 1580194, 1579613, 1579032, 1578452, 1577872, 1577292, 1576713, 1576135, 1575557, 1574979, 1574402, 1573825, 1573248, 1572672, 1572096, 1571521, 1570946, 1570372, 1569798, 1569225, 1568651, 1568079, 1567506, 1566935, 1566363, 1565792, 1565221, 1564651, 1564081, 1563512, 1562943, 1562374, 1561806, 1561239, 1560671, 1560104, 1559538, 1558972, 1558406, 1557841, 1557276, 1556712, 1556148, 1555584, 1555021, 1554458, 1553896, 1553334, 1552772, 1552211, 1551650, 1551090, 1550530, 1549970, 1549411, 1548852, 1548294, 1547736, 1547178, 1546621, 1546065, 1545508, 1544952, 1544397, 1543842, 1543287, 1542733, 1542179, 1541625, 1541072, 1540519, 1539967, 1539415, 1538863, 1538312, 1537761, 1537211, 1536661, 1536111, 1535562, 1535013, 1534465, 1533917, 1533369, 1532822, 1532275, 1531729, 1531183, 1530637, 1530092, 1529547, 1529002, 1528458, 1527914, 1527371, 1526828, 1526286, 1525743, 1525202, 1524660, 1524119, 1523578, 1523038, 1522498, 1521959, 1521420, 1520881, 1520342, 1519804, 1519267, 1518730, 1518193, 1517656, 1517120, 1516585, 1516049, 1515514, 1514980, 1514446, 1513912, 1513378, 1512845, 1512312, 1511780, 1511248, 1510717, 1510185, 1509655, 1509124, 1508594, 1508064, 1507535, 1507006, 1506478, 1505949, 1505421, 1504894, 1504367, 1503840, 1503314, 1502788, 1502262, 1501737, 1501212, 1500687, 1500163, 1499639, 1499116, 1498593, 1498070, 1497548, 1497026, 1496504, 1495983, 1495462, 1494942, 1494422, 1493902, 1493382, 1492863, 1492345, 1491826, 1491308, 1490791, 1490273, 1489756, 1489240, 1488724, 1488208, 1487692, 1487177, 1486662, 1486148, 1485634, 1485120, 1484607, 1484094, 1483581, 1483069, 1482557, 1482045, 1481534, 1481023, 1480513, 1480003, 1479493, 1478983, 1478474, 1477965, 1477457, 1476949, 1476441, 1475934, 1475427, 1474920, 1474414, 1473908, 1473402, 1472897, 1472392, 1471887, 1471383, 1470879, 1470376, 1469873, 1469370, 1468867, 1468365, 1467863, 1467362, 1466860, 1466360, 1465859, 1465359, 1464859, 1464360, 1463861, 1463362, 1462864, 1462366, 1461868, 1461370, 1460873, 1460377, 1459880, 1459384, 1458888, 1458393, 1457898, 1457403, 1456909, 1456415, 1455921, 1455428, 1454935, 1454442, 1453950, 1453458, 1452966, 1452475, 1451984, 1451493, 1451003, 1450512, 1450023, 1449533, 1449044, 1448556, 1448067, 1447579, 1447091, 1446604, 1446117, 1445630, 1445144, 1444658, 1444172, 1443687, 1443201, 1442717, 1442232, 1441748, 1441264, 1440781, 1440298, 1439815, 1439332, 1438850, 1438368, 1437887, 1437405, 1436925, 1436444, 1435964, 1435484, 1435004, 1434525, 1434046, 1433567, 1433089, 1432611, 1432133, 1431656, 1431179, 1430702, 1430226, 1429749, 1429274, 1428798, 1428323, 1427848, 1427374, 1426899, 1426426, 1425952, 1425479, 1425006, 1424533, 1424061, 1423589, 1423117, 1422646, 1422175, 1421704, 1421233, 1420763, 1420293, 1419824, 1419355, 1418886, 1418417, 1417949, 1417481, 1417013, 1416546, 1416079, 1415612, 1415146, 1414680, 1414214, 1413748, 1413283, 1412818, 1412354, 1411889, 1411425, 1410962, 1410498, 1410035, 1409573, 1409110, 1408648, 1408186, 1407725, 1407263, 1406802, 1406342, 1405881, 1405421, 1404962, 1404502, 1404043, 1403584, 1403126, 1402667, 1402209, 1401752, 1401294, 1400837, 1400381, 1399924, 1399468, 1399012, 1398557, 1398101, 1397646, 1397192, 1396737, 1396283, 1395830, 1395376, 1394923, 1394470, 1394017, 1393565, 1393113, 1392661, 1392210, 1391759, 1391308, 1390857, 1390407, 1389957, 1389507, 1389058, 1388609, 1388160, 1387712, 1387263, 1386815, 1386368, 1385920, 1385473, 1385027, 1384580, 1384134, 1383688, 1383242, 1382797, 1382352, 1381907, 1381463, 1381018, 1380575, 1380131, 1379688, 1379245, 1378802, 1378359, 1377917, 1377475, 1377034, 1376592, 1376151, 1375710, 1375270, 1374830, 1374390, 1373950, 1373511, 1373071, 1372633, 1372194, 1371756, 1371318, 1370880, 1370443, 1370006, 1369569, 1369132, 1368696, 1368260, 1367824, 1367389, 1366953, 1366518, 1366084, 1365649, 1365215, 1364782, 1364348, 1363915, 1363482, 1363049, 1362617, 1362184, 1361753, 1361321, 1360890, 1360459, 1360028, 1359597, 1359167, 1358737, 1358307, 1357878, 1357449, 1357020, 1356591, 1356163, 1355735, 1355307, 1354879, 1354452, 1354025, 1353598, 1353172, 1352746, 1352320, 1351894, 1351469, 1351044, 1350619, 1350194, 1349770, 1349346, 1348922, 1348498, 1348075, 1347652, 1347229, 1346807, 1346385, 1345963, 1345541, 1345120, 1344699, 1344278, 1343857, 1343437, 1343017, 1342597, 1342177, 1341758, 1341339, 1340920, 1340502, 1340083, 1339665, 1339248, 1338830, 1338413, 1337996, 1337579, 1337163, 1336747, 1336331, 1335915, 1335500, 1335085, 1334670, 1334255, 1333841, 1333427, 1333013, 1332599, 1332186, 1331773, 1331360, 1330947, 1330535, 1330123, 1329711, 1329300, 1328888, 1328477, 1328067, 1327656, 1327246, 1326836, 1326426, 1326017, 1325607, 1325198, 1324789, 1324381, 1323973, 1323565, 1323157, 1322749, 1322342, 1321935, 1321528, 1321122, 1320716, 1320310, 1319904, 1319498, 1319093, 1318688, 1318283, 1317879, 1317475, 1317071, 1316667, 1316263, 1315860, 1315457, 1315054, 1314652, 1314250, 1313848, 1313446, 1313044, 1312643, 1312242, 1311841, 1311440, 1311040, 1310640, 1310240, 1309841, 1309441, 1309042, 1308643, 1308245, 1307846, 1307448, 1307050, 1306653, 1306255, 1305858, 1305461, 1305065, 1304668, 1304272, 1303876, 1303480, 1303085, 1302690, 1302295, 1301900, 1301505, 1301111, 1300717, 1300323, 1299930, 1299536, 1299143, 1298750, 1298358, 1297965, 1297573, 1297181, 1296790, 1296398, 1296007, 1295616, 1295225, 1294835, 1294445, 1294055, 1293665, 1293275, 1292886, 1292497, 1292108, 1291720, 1291331, 1290943, 1290555, 1290167, 1289780, 1289393, 1289006, 1288619, 1288233, 1287846, 1287460, 1287074, 1286689, 1286304, 1285918, 1285534, 1285149, 1284764, 1284380, 1283996, 1283613, 1283229, 1282846, 1282463, 1282080, 1281697, 1281315, 1280933, 1280551, 1280169, 1279788, 1279406, 1279025, 1278645, 1278264, 1277884, 1277504, 1277124, 1276744, 1276365, 1275986, 1275607, 1275228, 1274849, 1274471, 1274093, 1273715, 1273338, 1272960, 1272583, 1272206, 1271829, 1271453, 1271077, 1270700, 1270325, 1269949, 1269574, 1269198, 1268823, 1268449, 1268074, 1267700, 1267326, 1266952, 1266578, 1266205, 1265832, 1265459, 1265086, 1264714, 1264341, 1263969, 1263597, 1263226, 1262854, 1262483, 1262112, 1261741, 1261371, 1261000, 1260630, 1260260, 1259891, 1259521, 1259152, 1258783, 1258414, 1258046, 1257677, 1257309, 1256941, 1256573, 1256206, 1255838, 1255471, 1255104, 1254738, 1254371, 1254005, 1253639, 1253273, 1252908, 1252542, 1252177, 1251812, 1251447, 1251083, 1250719, 1250354, 1249991, 1249627, 1249263, 1248900, 1248537, 1248174, 1247812, 1247449, 1247087, 1246725, 1246363, 1246002, 1245640, 1245279, 1244918, 1244557, 1244197, 1243837, 1243476, 1243117, 1242757, 1242397, 1242038, 1241679, 1241320, 1240961, 1240603, 1240245, 1239887, 1239529, 1239171, 1238814, 1238457, 1238100, 1237743, 1237386, 1237030, 1236674, 1236318, 1235962, 1235606, 1235251, 1234896, 1234541, 1234186, 1233832, 1233477, 1233123, 1232769, 1232415, 1232062, 1231708, 1231355, 1231002, 1230650, 1230297, 1229945, 1229593, 1229241, 1228889, 1228538, 1228186, 1227835, 1227484, 1227134, 1226783, 1226433, 1226083, 1225733, 1225383, 1225034, 1224684, 1224335, 1223986, 1223637, 1223289, 1222941, 1222593, 1222245, 1221897, 1221549, 1221202, 1220855, 1220508, 1220161, 1219815, 1219468, 1219122, 1218776, 1218431, 1218085, 1217740, 1217394, 1217049, 1216705, 1216360, 1216016, 1215672, 1215328, 1214984, 1214640, 1214297, 1213954, 1213610, 1213268, 1212925, 1212583, 1212240, 1211898, 1211556, 1211215, 1210873, 1210532, 1210191, 1209850, 1209509, 1209169, 1208828, 1208488, 1208148, 1207809, 1207469, 1207130, 1206791, 1206452, 1206113, 1205774, 1205436, 1205098, 1204759, 1204422, 1204084, 1203747, 1203409, 1203072, 1202735, 1202399, 1202062, 1201726, 1201390, 1201054, 1200718, 1200382, 1200047, 1199712, 1199377, 1199042, 1198707, 1198373, 1198038, 1197704, 1197370, 1197037, 1196703, 1196370, 1196037, 1195704, 1195371, 1195038, 1194706, 1194374, 1194042, 1193710, 1193378, 1193047, 1192715, 1192384, 1192053, 1191722, 1191392, 1191061, 1190731, 1190401, 1190071, 1189742, 1189412, 1189083, 1188754, 1188425, 1188096, 1187768, 1187439, 1187111, 1186783, 1186455, 1186127, 1185800, 1185473, 1185146, 1184819, 1184492, 1184165, 1183839, 1183513, 1183187, 1182861, 1182535, 1182210, 1181884, 1181559, 1181234, 1180909, 1180585, 1180260, 1179936, 1179612, 1179288, 1178964, 1178641, 1178318, 1177994, 1177671, 1177349, 1177026, 1176703, 1176381, 1176059, 1175737, 1175415, 1175094, 1174772, 1174451, 1174130, 1173809, 1173488, 1173168, 1172847, 1172527, 1172207, 1171887, 1171568, 1171248, 1170929, 1170610, 1170291, 1169972, 1169653, 1169335, 1169017, 1168699, 1168381, 1168063, 1167745, 1167428, 1167111, 1166794, 1166477, 1166160, 1165844, 1165527, 1165211, 1164895, 1164579, 1164263, 1163948, 1163632, 1163317, 1163002, 1162687, 1162373, 1162058, 1161744, 1161430, 1161116, 1160802, 1160488, 1160175, 1159862, 1159548, 1159236, 1158923, 1158610, 1158298, 1157985, 1157673, 1157361, 1157049, 1156738, 1156426, 1156115, 1155804, 1155493, 1155182, 1154872, 1154561, 1154251, 1153941, 1153631, 1153321, 1153011, 1152702, 1152393, 1152084, 1151775, 1151466, 1151157, 1150849, 1150540, 1150232, 1149924, 1149617, 1149309, 1149001, 1148694, 1148387, 1148080, 1147773, 1147467, 1147160, 1146854, 1146548, 1146242, 1145936, 1145630, 1145325, 1145019, 1144714, 1144409, 1144104, 1143800, 1143495, 1143191, 1142887, 1142582, 1142279, 1141975, 1141671, 1141368, 1141065, 1140762, 1140459, 1140156, 1139853, 1139551, 1139249, 1138947, 1138645, 1138343, 1138041, 1137740, 1137438, 1137137, 1136836, 1136535, 1136235, 1135934, 1135634, 1135334, 1135034, 1134734, 1134434, 1134135, 1133835, 1133536, 1133237, 1132938, 1132639, 1132341, 1132042, 1131744, 1131446, 1131148, 1130850, 1130552, 1130255, 1129957, 1129660, 1129363, 1129066, 1128769, 1128473, 1128176, 1127880, 1127584, 1127288, 1126992, 1126697, 1126401, 1126106, 1125811, 1125516, 1125221, 1124926, 1124631, 1124337, 1124043, 1123749, 1123455, 1123161, 1122867, 1122574, 1122281, 1121987, 1121694, 1121401, 1121109, 1120816, 1120524, 1120231, 1119939, 1119647, 1119356, 1119064, 1118772, 1118481, 1118190, 1117899, 1117608, 1117317, 1117027, 1116736, 1116446, 1116156, 1115866, 1115576, 1115286, 1114997, 1114707, 1114418, 1114129, 1113840, 1113551, 1113263, 1112974, 1112686, 1112398, 1112110, 1111822, 1111534, 1111246, 1110959, 1110672, 1110385, 1110098, 1109811, 1109524, 1109237, 1108951, 1108665, 1108379, 1108093, 1107807, 1107521, 1107236, 1106950, 1106665, 1106380, 1106095, 1105810, 1105526, 1105241, 1104957, 1104673, 1104389, 1104105, 1103821, 1103537, 1103254, 1102971, 1102687, 1102404, 1102122, 1101839, 1101556, 1101274, 1100991, 1100709, 1100427, 1100145, 1099864, 1099582, 1099301, 1099019, 1098738, 1098457, 1098176, 1097896, 1097615, 1097335, 1097054, 1096774, 1096494, 1096214, 1095935, 1095655, 1095376, 1095096, 1094817, 1094538, 1094259, 1093981, 1093702, 1093424, 1093145, 1092867, 1092589, 1092311, 1092033, 1091756, 1091478, 1091201, 1090924, 1090647, 1090370, 1090093, 1089817, 1089540, 1089264, 1088988, 1088712, 1088436, 1088160, 1087884, 1087609, 1087334, 1087058, 1086783, 1086508, 1086234, 1085959, 1085684, 1085410, 1085136, 1084862, 1084588, 1084314, 1084040, 1083767, 1083493, 1083220, 1082947, 1082674, 1082401, 1082128, 1081856, 1081583, 1081311, 1081039, 1080767, 1080495, 1080223, 1079952, 1079680, 1079409, 1079138, 1078867, 1078596, 1078325, 1078054, 1077784, 1077513, 1077243, 1076973, 1076703, 1076433, 1076163, 1075894, 1075624, 1075355, 1075086, 1074817, 1074548, 1074279, 1074010, 1073742, 1073474, 1073205, 1072937, 1072669, 1072401, 1072134, 1071866, 1071599, 1071331, 1071064, 1070797, 1070530, 1070264, 1069997, 1069730, 1069464, 1069198, 1068932, 1068666, 1068400, 1068134, 1067869, 1067603, 1067338, 1067073, 1066808, 1066543, 1066278, 1066013, 1065749, 1065484, 1065220, 1064956, 1064692, 1064428, 1064164, 1063901, 1063637, 1063374, 1063111, 1062848, 1062585, 1062322, 1062059, 1061797, 1061534, 1061272, 1061010, 1060748, 1060486, 1060224, 1059962, 1059701, 1059439, 1059178, 1058917, 1058656, 1058395, 1058134, 1057874, 1057613, 1057353, 1057093, 1056833, 1056573, 1056313, 1056053, 1055793, 1055534, 1055275, 1055015, 1054756, 1054497, 1054238, 1053980, 1053721, 1053463, 1053204, 1052946, 1052688, 1052430, 1052172, 1051915, 1051657, 1051400, 1051142, 1050885, 1050628, 1050371, 1050114, 1049858, 1049601, 1049345, 1049088, 1048832, 1048576 }; // Divide num by div and return as 16.16 fixed point result. int FixedDiv_C(int num, int div) { if (static_cast(div) <= 4097u) { return static_cast((static_cast(num) * kRecipTable[div]) >> 16); } return static_cast((static_cast(num) << 16) / div); } #ifdef LIBYUV_LITTLE_ENDIAN #define WRITEWORD(p, v) *reinterpret_cast(p) = v #else static inline void WRITEWORD(uint8* p, uint32 v) { p[0] = (uint8)(v & 255); p[1] = (uint8)((v >> 8) & 255); p[2] = (uint8)((v >> 16) & 255); p[3] = (uint8)((v >> 24) & 255); } #endif void RGB24ToARGBRow_C(const uint8* src_rgb24, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_rgb24[0]; uint8 g = src_rgb24[1]; uint8 r = src_rgb24[2]; dst_argb[0] = b; dst_argb[1] = g; dst_argb[2] = r; dst_argb[3] = 255u; dst_argb += 4; src_rgb24 += 3; } } void RAWToARGBRow_C(const uint8* src_raw, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 r = src_raw[0]; uint8 g = src_raw[1]; uint8 b = src_raw[2]; dst_argb[0] = b; dst_argb[1] = g; dst_argb[2] = r; dst_argb[3] = 255u; dst_argb += 4; src_raw += 3; } } void RGB565ToARGBRow_C(const uint8* src_rgb565, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_rgb565[0] & 0x1f; uint8 g = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r = src_rgb565[1] >> 3; dst_argb[0] = (b << 3) | (b >> 2); dst_argb[1] = (g << 2) | (g >> 4); dst_argb[2] = (r << 3) | (r >> 2); dst_argb[3] = 255u; dst_argb += 4; src_rgb565 += 2; } } void ARGB1555ToARGBRow_C(const uint8* src_argb1555, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb1555[0] & 0x1f; uint8 g = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r = (src_argb1555[1] & 0x7c) >> 2; uint8 a = src_argb1555[1] >> 7; dst_argb[0] = (b << 3) | (b >> 2); dst_argb[1] = (g << 3) | (g >> 2); dst_argb[2] = (r << 3) | (r >> 2); dst_argb[3] = -a; dst_argb += 4; src_argb1555 += 2; } } void ARGB4444ToARGBRow_C(const uint8* src_argb4444, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb4444[0] & 0x0f; uint8 g = src_argb4444[0] >> 4; uint8 r = src_argb4444[1] & 0x0f; uint8 a = src_argb4444[1] >> 4; dst_argb[0] = (b << 4) | b; dst_argb[1] = (g << 4) | g; dst_argb[2] = (r << 4) | r; dst_argb[3] = (a << 4) | a; dst_argb += 4; src_argb4444 += 2; } } void ARGBToRGB24Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb[0]; uint8 g = src_argb[1]; uint8 r = src_argb[2]; dst_rgb[0] = b; dst_rgb[1] = g; dst_rgb[2] = r; dst_rgb += 3; src_argb += 4; } } void ARGBToRAWRow_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb[0]; uint8 g = src_argb[1]; uint8 r = src_argb[2]; dst_rgb[0] = r; dst_rgb[1] = g; dst_rgb[2] = b; dst_rgb += 3; src_argb += 4; } } void ARGBToRGB565Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 2; uint8 r0 = src_argb[2] >> 3; uint8 b1 = src_argb[4] >> 3; uint8 g1 = src_argb[5] >> 2; uint8 r1 = src_argb[6] >> 3; WRITEWORD(dst_rgb, b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27)); dst_rgb += 4; src_argb += 8; } if (width & 1) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 2; uint8 r0 = src_argb[2] >> 3; *reinterpret_cast(dst_rgb) = b0 | (g0 << 5) | (r0 << 11); } } void ARGBToARGB1555Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 3; uint8 r0 = src_argb[2] >> 3; uint8 a0 = src_argb[3] >> 7; uint8 b1 = src_argb[4] >> 3; uint8 g1 = src_argb[5] >> 3; uint8 r1 = src_argb[6] >> 3; uint8 a1 = src_argb[7] >> 7; *reinterpret_cast(dst_rgb) = b0 | (g0 << 5) | (r0 << 10) | (a0 << 15) | (b1 << 16) | (g1 << 21) | (r1 << 26) | (a1 << 31); dst_rgb += 4; src_argb += 8; } if (width & 1) { uint8 b0 = src_argb[0] >> 3; uint8 g0 = src_argb[1] >> 3; uint8 r0 = src_argb[2] >> 3; uint8 a0 = src_argb[3] >> 7; *reinterpret_cast(dst_rgb) = b0 | (g0 << 5) | (r0 << 10) | (a0 << 15); } } void ARGBToARGB4444Row_C(const uint8* src_argb, uint8* dst_rgb, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb[0] >> 4; uint8 g0 = src_argb[1] >> 4; uint8 r0 = src_argb[2] >> 4; uint8 a0 = src_argb[3] >> 4; uint8 b1 = src_argb[4] >> 4; uint8 g1 = src_argb[5] >> 4; uint8 r1 = src_argb[6] >> 4; uint8 a1 = src_argb[7] >> 4; *reinterpret_cast(dst_rgb) = b0 | (g0 << 4) | (r0 << 8) | (a0 << 12) | (b1 << 16) | (g1 << 20) | (r1 << 24) | (a1 << 28); dst_rgb += 4; src_argb += 8; } if (width & 1) { uint8 b0 = src_argb[0] >> 4; uint8 g0 = src_argb[1] >> 4; uint8 r0 = src_argb[2] >> 4; uint8 a0 = src_argb[3] >> 4; *reinterpret_cast(dst_rgb) = b0 | (g0 << 4) | (r0 << 8) | (a0 << 12); } } static __inline int RGBToY(uint8 r, uint8 g, uint8 b) { return (66 * r + 129 * g + 25 * b + 0x1080) >> 8; } static __inline int RGBToU(uint8 r, uint8 g, uint8 b) { return (112 * b - 74 * g - 38 * r + 0x8080) >> 8; } static __inline int RGBToV(uint8 r, uint8 g, uint8 b) { return (112 * r - 94 * g - 18 * b + 0x8080) >> 8; } #define MAKEROWY(NAME, R, G, B, BPP) \ void NAME ## ToYRow_C(const uint8* src_argb0, uint8* dst_y, int width) { \ for (int x = 0; x < width; ++x) { \ dst_y[0] = RGBToY(src_argb0[R], src_argb0[G], src_argb0[B]); \ src_argb0 += BPP; \ dst_y += 1; \ } \ } \ void NAME ## ToUVRow_C(const uint8* src_rgb0, int src_stride_rgb, \ uint8* dst_u, uint8* dst_v, int width) { \ const uint8* src_rgb1 = src_rgb0 + src_stride_rgb; \ for (int x = 0; x < width - 1; x += 2) { \ uint8 ab = (src_rgb0[B] + src_rgb0[B + BPP] + \ src_rgb1[B] + src_rgb1[B + BPP]) >> 2; \ uint8 ag = (src_rgb0[G] + src_rgb0[G + BPP] + \ src_rgb1[G] + src_rgb1[G + BPP]) >> 2; \ uint8 ar = (src_rgb0[R] + src_rgb0[R + BPP] + \ src_rgb1[R] + src_rgb1[R + BPP]) >> 2; \ dst_u[0] = RGBToU(ar, ag, ab); \ dst_v[0] = RGBToV(ar, ag, ab); \ src_rgb0 += BPP * 2; \ src_rgb1 += BPP * 2; \ dst_u += 1; \ dst_v += 1; \ } \ if (width & 1) { \ uint8 ab = (src_rgb0[B] + src_rgb1[B]) >> 1; \ uint8 ag = (src_rgb0[G] + src_rgb1[G]) >> 1; \ uint8 ar = (src_rgb0[R] + src_rgb1[R]) >> 1; \ dst_u[0] = RGBToU(ar, ag, ab); \ dst_v[0] = RGBToV(ar, ag, ab); \ } \ } MAKEROWY(ARGB, 2, 1, 0, 4) MAKEROWY(BGRA, 1, 2, 3, 4) MAKEROWY(ABGR, 0, 1, 2, 4) MAKEROWY(RGBA, 3, 2, 1, 4) MAKEROWY(RGB24, 2, 1, 0, 3) MAKEROWY(RAW, 0, 1, 2, 3) #undef MAKEROWY // JPeg uses a variation on BT.601-1 full range // y = 0.29900 * r + 0.58700 * g + 0.11400 * b // u = -0.16874 * r - 0.33126 * g + 0.50000 * b + center // v = 0.50000 * r - 0.41869 * g - 0.08131 * b + center // BT.601 Mpeg range uses: // b 0.1016 * 255 = 25.908 = 25 // g 0.5078 * 255 = 129.489 = 129 // r 0.2578 * 255 = 65.739 = 66 // JPeg 8 bit Y (not used): // b 0.11400 * 256 = 29.184 = 29 // g 0.58700 * 256 = 150.272 = 150 // r 0.29900 * 256 = 76.544 = 77 // JPeg 7 bit Y: // b 0.11400 * 128 = 14.592 = 15 // g 0.58700 * 128 = 75.136 = 75 // r 0.29900 * 128 = 38.272 = 38 // JPeg 8 bit U: // b 0.50000 * 255 = 127.5 = 127 // g -0.33126 * 255 = -84.4713 = -84 // r -0.16874 * 255 = -43.0287 = -43 // JPeg 8 bit V: // b -0.08131 * 255 = -20.73405 = -20 // g -0.41869 * 255 = -106.76595 = -107 // r 0.50000 * 255 = 127.5 = 127 static __inline int RGBToYJ(uint8 r, uint8 g, uint8 b) { return (38 * r + 75 * g + 15 * b + 64) >> 7; } static __inline int RGBToUJ(uint8 r, uint8 g, uint8 b) { return (127 * b - 84 * g - 43 * r + 0x8080) >> 8; } static __inline int RGBToVJ(uint8 r, uint8 g, uint8 b) { return (127 * r - 107 * g - 20 * b + 0x8080) >> 8; } #define AVGB(a, b) (((a) + (b) + 1) >> 1) #define MAKEROWYJ(NAME, R, G, B, BPP) \ void NAME ## ToYJRow_C(const uint8* src_argb0, uint8* dst_y, int width) { \ for (int x = 0; x < width; ++x) { \ dst_y[0] = RGBToYJ(src_argb0[R], src_argb0[G], src_argb0[B]); \ src_argb0 += BPP; \ dst_y += 1; \ } \ } \ void NAME ## ToUVJRow_C(const uint8* src_rgb0, int src_stride_rgb, \ uint8* dst_u, uint8* dst_v, int width) { \ const uint8* src_rgb1 = src_rgb0 + src_stride_rgb; \ for (int x = 0; x < width - 1; x += 2) { \ uint8 ab = AVGB(AVGB(src_rgb0[B], src_rgb1[B]), \ AVGB(src_rgb0[B + BPP], src_rgb1[B + BPP])); \ uint8 ag = AVGB(AVGB(src_rgb0[G], src_rgb1[G]), \ AVGB(src_rgb0[G + BPP], src_rgb1[G + BPP])); \ uint8 ar = AVGB(AVGB(src_rgb0[R], src_rgb1[R]), \ AVGB(src_rgb0[R + BPP], src_rgb1[R + BPP])); \ dst_u[0] = RGBToUJ(ar, ag, ab); \ dst_v[0] = RGBToVJ(ar, ag, ab); \ src_rgb0 += BPP * 2; \ src_rgb1 += BPP * 2; \ dst_u += 1; \ dst_v += 1; \ } \ if (width & 1) { \ uint8 ab = AVGB(src_rgb0[B], src_rgb1[B]); \ uint8 ag = AVGB(src_rgb0[G], src_rgb1[G]); \ uint8 ar = AVGB(src_rgb0[R], src_rgb1[R]); \ dst_u[0] = RGBToUJ(ar, ag, ab); \ dst_v[0] = RGBToVJ(ar, ag, ab); \ } \ } MAKEROWYJ(ARGB, 2, 1, 0, 4) #undef MAKEROWYJ void RGB565ToYRow_C(const uint8* src_rgb565, uint8* dst_y, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_rgb565[0] & 0x1f; uint8 g = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r = src_rgb565[1] >> 3; b = (b << 3) | (b >> 2); g = (g << 2) | (g >> 4); r = (r << 3) | (r >> 2); dst_y[0] = RGBToY(r, g, b); src_rgb565 += 2; dst_y += 1; } } void ARGB1555ToYRow_C(const uint8* src_argb1555, uint8* dst_y, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb1555[0] & 0x1f; uint8 g = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r = (src_argb1555[1] & 0x7c) >> 2; b = (b << 3) | (b >> 2); g = (g << 3) | (g >> 2); r = (r << 3) | (r >> 2); dst_y[0] = RGBToY(r, g, b); src_argb1555 += 2; dst_y += 1; } } void ARGB4444ToYRow_C(const uint8* src_argb4444, uint8* dst_y, int width) { for (int x = 0; x < width; ++x) { uint8 b = src_argb4444[0] & 0x0f; uint8 g = src_argb4444[0] >> 4; uint8 r = src_argb4444[1] & 0x0f; b = (b << 4) | b; g = (g << 4) | g; r = (r << 4) | r; dst_y[0] = RGBToY(r, g, b); src_argb4444 += 2; dst_y += 1; } } void RGB565ToUVRow_C(const uint8* src_rgb565, int src_stride_rgb565, uint8* dst_u, uint8* dst_v, int width) { const uint8* next_rgb565 = src_rgb565 + src_stride_rgb565; for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_rgb565[0] & 0x1f; uint8 g0 = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r0 = src_rgb565[1] >> 3; uint8 b1 = src_rgb565[2] & 0x1f; uint8 g1 = (src_rgb565[2] >> 5) | ((src_rgb565[3] & 0x07) << 3); uint8 r1 = src_rgb565[3] >> 3; uint8 b2 = next_rgb565[0] & 0x1f; uint8 g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); uint8 r2 = next_rgb565[1] >> 3; uint8 b3 = next_rgb565[2] & 0x1f; uint8 g3 = (next_rgb565[2] >> 5) | ((next_rgb565[3] & 0x07) << 3); uint8 r3 = next_rgb565[3] >> 3; uint8 b = (b0 + b1 + b2 + b3); // 565 * 4 = 787. uint8 g = (g0 + g1 + g2 + g3); uint8 r = (r0 + r1 + r2 + r3); b = (b << 1) | (b >> 6); // 787 -> 888. r = (r << 1) | (r >> 6); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); src_rgb565 += 4; next_rgb565 += 4; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 b0 = src_rgb565[0] & 0x1f; uint8 g0 = (src_rgb565[0] >> 5) | ((src_rgb565[1] & 0x07) << 3); uint8 r0 = src_rgb565[1] >> 3; uint8 b2 = next_rgb565[0] & 0x1f; uint8 g2 = (next_rgb565[0] >> 5) | ((next_rgb565[1] & 0x07) << 3); uint8 r2 = next_rgb565[1] >> 3; uint8 b = (b0 + b2); // 565 * 2 = 676. uint8 g = (g0 + g2); uint8 r = (r0 + r2); b = (b << 2) | (b >> 4); // 676 -> 888 g = (g << 1) | (g >> 6); r = (r << 2) | (r >> 4); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); } } void ARGB1555ToUVRow_C(const uint8* src_argb1555, int src_stride_argb1555, uint8* dst_u, uint8* dst_v, int width) { const uint8* next_argb1555 = src_argb1555 + src_stride_argb1555; for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb1555[0] & 0x1f; uint8 g0 = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r0 = (src_argb1555[1] & 0x7c) >> 2; uint8 b1 = src_argb1555[2] & 0x1f; uint8 g1 = (src_argb1555[2] >> 5) | ((src_argb1555[3] & 0x03) << 3); uint8 r1 = (src_argb1555[3] & 0x7c) >> 2; uint8 b2 = next_argb1555[0] & 0x1f; uint8 g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); uint8 r2 = (next_argb1555[1] & 0x7c) >> 2; uint8 b3 = next_argb1555[2] & 0x1f; uint8 g3 = (next_argb1555[2] >> 5) | ((next_argb1555[3] & 0x03) << 3); uint8 r3 = (next_argb1555[3] & 0x7c) >> 2; uint8 b = (b0 + b1 + b2 + b3); // 555 * 4 = 777. uint8 g = (g0 + g1 + g2 + g3); uint8 r = (r0 + r1 + r2 + r3); b = (b << 1) | (b >> 6); // 777 -> 888. g = (g << 1) | (g >> 6); r = (r << 1) | (r >> 6); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); src_argb1555 += 4; next_argb1555 += 4; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 b0 = src_argb1555[0] & 0x1f; uint8 g0 = (src_argb1555[0] >> 5) | ((src_argb1555[1] & 0x03) << 3); uint8 r0 = (src_argb1555[1] & 0x7c) >> 2; uint8 b2 = next_argb1555[0] & 0x1f; uint8 g2 = (next_argb1555[0] >> 5) | ((next_argb1555[1] & 0x03) << 3); uint8 r2 = next_argb1555[1] >> 3; uint8 b = (b0 + b2); // 555 * 2 = 666. uint8 g = (g0 + g2); uint8 r = (r0 + r2); b = (b << 2) | (b >> 4); // 666 -> 888. g = (g << 2) | (g >> 4); r = (r << 2) | (r >> 4); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); } } void ARGB4444ToUVRow_C(const uint8* src_argb4444, int src_stride_argb4444, uint8* dst_u, uint8* dst_v, int width) { const uint8* next_argb4444 = src_argb4444 + src_stride_argb4444; for (int x = 0; x < width - 1; x += 2) { uint8 b0 = src_argb4444[0] & 0x0f; uint8 g0 = src_argb4444[0] >> 4; uint8 r0 = src_argb4444[1] & 0x0f; uint8 b1 = src_argb4444[2] & 0x0f; uint8 g1 = src_argb4444[2] >> 4; uint8 r1 = src_argb4444[3] & 0x0f; uint8 b2 = next_argb4444[0] & 0x0f; uint8 g2 = next_argb4444[0] >> 4; uint8 r2 = next_argb4444[1] & 0x0f; uint8 b3 = next_argb4444[2] & 0x0f; uint8 g3 = next_argb4444[2] >> 4; uint8 r3 = next_argb4444[3] & 0x0f; uint8 b = (b0 + b1 + b2 + b3); // 444 * 4 = 666. uint8 g = (g0 + g1 + g2 + g3); uint8 r = (r0 + r1 + r2 + r3); b = (b << 2) | (b >> 4); // 666 -> 888. g = (g << 2) | (g >> 4); r = (r << 2) | (r >> 4); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); src_argb4444 += 4; next_argb4444 += 4; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 b0 = src_argb4444[0] & 0x0f; uint8 g0 = src_argb4444[0] >> 4; uint8 r0 = src_argb4444[1] & 0x0f; uint8 b2 = next_argb4444[0] & 0x0f; uint8 g2 = next_argb4444[0] >> 4; uint8 r2 = next_argb4444[1] & 0x0f; uint8 b = (b0 + b2); // 444 * 2 = 555. uint8 g = (g0 + g2); uint8 r = (r0 + r2); b = (b << 3) | (b >> 2); // 555 -> 888. g = (g << 3) | (g >> 2); r = (r << 3) | (r >> 2); dst_u[0] = RGBToU(r, g, b); dst_v[0] = RGBToV(r, g, b); } } void ARGBToUV444Row_C(const uint8* src_argb, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width; ++x) { uint8 ab = src_argb[0]; uint8 ag = src_argb[1]; uint8 ar = src_argb[2]; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); src_argb += 4; dst_u += 1; dst_v += 1; } } void ARGBToUV422Row_C(const uint8* src_argb, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 ab = (src_argb[0] + src_argb[4]) >> 1; uint8 ag = (src_argb[1] + src_argb[5]) >> 1; uint8 ar = (src_argb[2] + src_argb[6]) >> 1; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); src_argb += 8; dst_u += 1; dst_v += 1; } if (width & 1) { uint8 ab = src_argb[0]; uint8 ag = src_argb[1]; uint8 ar = src_argb[2]; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } } void ARGBToUV411Row_C(const uint8* src_argb, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width - 3; x += 4) { uint8 ab = (src_argb[0] + src_argb[4] + src_argb[8] + src_argb[12]) >> 2; uint8 ag = (src_argb[1] + src_argb[5] + src_argb[9] + src_argb[13]) >> 2; uint8 ar = (src_argb[2] + src_argb[6] + src_argb[10] + src_argb[14]) >> 2; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); src_argb += 16; dst_u += 1; dst_v += 1; } if ((width & 3) == 3) { uint8 ab = (src_argb[0] + src_argb[4] + src_argb[8]) / 3; uint8 ag = (src_argb[1] + src_argb[5] + src_argb[9]) / 3; uint8 ar = (src_argb[2] + src_argb[6] + src_argb[10]) / 3; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } else if ((width & 3) == 2) { uint8 ab = (src_argb[0] + src_argb[4]) >> 1; uint8 ag = (src_argb[1] + src_argb[5]) >> 1; uint8 ar = (src_argb[2] + src_argb[6]) >> 1; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } else if ((width & 3) == 1) { uint8 ab = src_argb[0]; uint8 ag = src_argb[1]; uint8 ar = src_argb[2]; dst_u[0] = RGBToU(ar, ag, ab); dst_v[0] = RGBToV(ar, ag, ab); } } void ARGBGrayRow_C(const uint8* src_argb, uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { uint8 y = RGBToYJ(src_argb[2], src_argb[1], src_argb[0]); dst_argb[2] = dst_argb[1] = dst_argb[0] = y; dst_argb[3] = src_argb[3]; dst_argb += 4; src_argb += 4; } } // Convert a row of image to Sepia tone. void ARGBSepiaRow_C(uint8* dst_argb, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; int sb = (b * 17 + g * 68 + r * 35) >> 7; int sg = (b * 22 + g * 88 + r * 45) >> 7; int sr = (b * 24 + g * 98 + r * 50) >> 7; // b does not over flow. a is preserved from original. dst_argb[0] = sb; dst_argb[1] = clamp255(sg); dst_argb[2] = clamp255(sr); dst_argb += 4; } } // Apply color matrix to a row of image. Matrix is signed. void ARGBColorMatrixRow_C(uint8* dst_argb, const int8* matrix_argb, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; int a = dst_argb[3]; int sb = (b * matrix_argb[0] + g * matrix_argb[1] + r * matrix_argb[2] + a * matrix_argb[3]) >> 7; int sg = (b * matrix_argb[4] + g * matrix_argb[5] + r * matrix_argb[6] + a * matrix_argb[7]) >> 7; int sr = (b * matrix_argb[8] + g * matrix_argb[9] + r * matrix_argb[10] + a * matrix_argb[11]) >> 7; dst_argb[0] = Clamp(sb); dst_argb[1] = Clamp(sg); dst_argb[2] = Clamp(sr); dst_argb += 4; } } // Apply color table to a row of image. void ARGBColorTableRow_C(uint8* dst_argb, const uint8* table_argb, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; int a = dst_argb[3]; dst_argb[0] = table_argb[b * 4 + 0]; dst_argb[1] = table_argb[g * 4 + 1]; dst_argb[2] = table_argb[r * 4 + 2]; dst_argb[3] = table_argb[a * 4 + 3]; dst_argb += 4; } } void ARGBQuantizeRow_C(uint8* dst_argb, int scale, int interval_size, int interval_offset, int width) { for (int x = 0; x < width; ++x) { int b = dst_argb[0]; int g = dst_argb[1]; int r = dst_argb[2]; dst_argb[0] = (b * scale >> 16) * interval_size + interval_offset; dst_argb[1] = (g * scale >> 16) * interval_size + interval_offset; dst_argb[2] = (r * scale >> 16) * interval_size + interval_offset; dst_argb += 4; } } #define REPEAT8(v) (v) | ((v) << 8) #define SHADE(f, v) v * f >> 24 void ARGBShadeRow_C(const uint8* src_argb, uint8* dst_argb, int width, uint32 value) { const uint32 b_scale = REPEAT8(value & 0xff); const uint32 g_scale = REPEAT8((value >> 8) & 0xff); const uint32 r_scale = REPEAT8((value >> 16) & 0xff); const uint32 a_scale = REPEAT8(value >> 24); for (int i = 0; i < width; ++i) { const uint32 b = REPEAT8(src_argb[0]); const uint32 g = REPEAT8(src_argb[1]); const uint32 r = REPEAT8(src_argb[2]); const uint32 a = REPEAT8(src_argb[3]); dst_argb[0] = SHADE(b, b_scale); dst_argb[1] = SHADE(g, g_scale); dst_argb[2] = SHADE(r, r_scale); dst_argb[3] = SHADE(a, a_scale); src_argb += 4; dst_argb += 4; } } #undef REPEAT8 #undef SHADE #define REPEAT8(v) (v) | ((v) << 8) #define SHADE(f, v) v * f >> 16 void ARGBMultiplyRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { const uint32 b = REPEAT8(src_argb0[0]); const uint32 g = REPEAT8(src_argb0[1]); const uint32 r = REPEAT8(src_argb0[2]); const uint32 a = REPEAT8(src_argb0[3]); const uint32 b_scale = src_argb1[0]; const uint32 g_scale = src_argb1[1]; const uint32 r_scale = src_argb1[2]; const uint32 a_scale = src_argb1[3]; dst_argb[0] = SHADE(b, b_scale); dst_argb[1] = SHADE(g, g_scale); dst_argb[2] = SHADE(r, r_scale); dst_argb[3] = SHADE(a, a_scale); src_argb0 += 4; src_argb1 += 4; dst_argb += 4; } } #undef REPEAT8 #undef SHADE #define SHADE(f, v) clamp255(v + f) void ARGBAddRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { const int b = src_argb0[0]; const int g = src_argb0[1]; const int r = src_argb0[2]; const int a = src_argb0[3]; const int b_add = src_argb1[0]; const int g_add = src_argb1[1]; const int r_add = src_argb1[2]; const int a_add = src_argb1[3]; dst_argb[0] = SHADE(b, b_add); dst_argb[1] = SHADE(g, g_add); dst_argb[2] = SHADE(r, r_add); dst_argb[3] = SHADE(a, a_add); src_argb0 += 4; src_argb1 += 4; dst_argb += 4; } } #undef SHADE #define SHADE(f, v) clamp0(f - v) void ARGBSubtractRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { const int b = src_argb0[0]; const int g = src_argb0[1]; const int r = src_argb0[2]; const int a = src_argb0[3]; const int b_sub = src_argb1[0]; const int g_sub = src_argb1[1]; const int r_sub = src_argb1[2]; const int a_sub = src_argb1[3]; dst_argb[0] = SHADE(b, b_sub); dst_argb[1] = SHADE(g, g_sub); dst_argb[2] = SHADE(r, r_sub); dst_argb[3] = SHADE(a, a_sub); src_argb0 += 4; src_argb1 += 4; dst_argb += 4; } } #undef SHADE // Sobel functions which mimics SSSE3. void SobelXRow_C(const uint8* src_y0, const uint8* src_y1, const uint8* src_y2, uint8* dst_sobelx, int width) { for (int i = 0; i < width; ++i) { int a = src_y0[i]; int b = src_y1[i]; int c = src_y2[i]; int a_sub = src_y0[i + 2]; int b_sub = src_y1[i + 2]; int c_sub = src_y2[i + 2]; int a_diff = a - a_sub; int b_diff = b - b_sub; int c_diff = c - c_sub; int sobel = Abs(a_diff + b_diff * 2 + c_diff); dst_sobelx[i] = static_cast(clamp255(sobel)); } } void SobelYRow_C(const uint8* src_y0, const uint8* src_y1, uint8* dst_sobely, int width) { for (int i = 0; i < width; ++i) { int a = src_y0[i + 0]; int b = src_y0[i + 1]; int c = src_y0[i + 2]; int a_sub = src_y1[i + 0]; int b_sub = src_y1[i + 1]; int c_sub = src_y1[i + 2]; int a_diff = a - a_sub; int b_diff = b - b_sub; int c_diff = c - c_sub; int sobel = Abs(a_diff + b_diff * 2 + c_diff); dst_sobely[i] = static_cast(clamp255(sobel)); } } void SobelRow_C(const uint8* src_sobelx, const uint8* src_sobely, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { int r = src_sobelx[i]; int b = src_sobely[i]; int s = clamp255(r + b); dst_argb[0] = static_cast(s); dst_argb[1] = static_cast(s); dst_argb[2] = static_cast(s); dst_argb[3] = static_cast(255u); dst_argb += 4; } } void SobelXYRow_C(const uint8* src_sobelx, const uint8* src_sobely, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { int r = src_sobelx[i]; int b = src_sobely[i]; int g = clamp255(r + b); dst_argb[0] = static_cast(b); dst_argb[1] = static_cast(g); dst_argb[2] = static_cast(r); dst_argb[3] = static_cast(255u); dst_argb += 4; } } void I400ToARGBRow_C(const uint8* src_y, uint8* dst_argb, int width) { // Copy a Y to RGB. for (int x = 0; x < width; ++x) { uint8 y = src_y[0]; dst_argb[2] = dst_argb[1] = dst_argb[0] = y; dst_argb[3] = 255u; dst_argb += 4; ++src_y; } } // C reference code that mimics the YUV assembly. #define YG 74 /* static_cast(1.164 * 64 + 0.5) */ #define UB 127 /* min(63,static_cast(2.018 * 64)) */ #define UG -25 /* static_cast(-0.391 * 64 - 0.5) */ #define UR 0 #define VB 0 #define VG -52 /* static_cast(-0.813 * 64 - 0.5) */ #define VR 102 /* static_cast(1.596 * 64 + 0.5) */ // Bias #define BB UB * 128 + VB * 128 #define BG UG * 128 + VG * 128 #define BR UR * 128 + VR * 128 static __inline void YuvPixel(uint8 y, uint8 u, uint8 v, uint8* b, uint8* g, uint8* r) { int32 y1 = (static_cast(y) - 16) * YG; *b = Clamp(static_cast((u * UB + v * VB) - (BB) + y1) >> 6); *g = Clamp(static_cast((u * UG + v * VG) - (BG) + y1) >> 6); *r = Clamp(static_cast((u * UR + v * VR) - (BR) + y1) >> 6); } #if !defined(LIBYUV_DISABLE_NEON) && \ (defined(__ARM_NEON__) || defined(LIBYUV_NEON)) // C mimic assembly. // TODO(fbarchard): Remove subsampling from Neon. void I444ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { uint8 u = (src_u[0] + src_u[1] + 1) >> 1; uint8 v = (src_v[0] + src_v[1] + 1) >> 1; YuvPixel(src_y[0], u, v, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], u, v, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; src_u += 2; src_v += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); } } #else void I444ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width; ++x) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; src_y += 1; src_u += 1; src_v += 1; rgb_buf += 4; // Advance 1 pixel. } } #endif // Also used for 420 void I422ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void I422ToRGB24Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 3, rgb_buf + 4, rgb_buf + 5); src_y += 2; src_u += 1; src_v += 1; rgb_buf += 6; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); } } void I422ToRAWRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 5, rgb_buf + 4, rgb_buf + 3); src_y += 2; src_u += 1; src_v += 1; rgb_buf += 6; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); } } void I422ToARGB4444Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_argb4444, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); YuvPixel(src_y[1], src_u[0], src_v[0], &b1, &g1, &r1); b0 = b0 >> 4; g0 = g0 >> 4; r0 = r0 >> 4; b1 = b1 >> 4; g1 = g1 >> 4; r1 = r1 >> 4; *reinterpret_cast(dst_argb4444) = b0 | (g0 << 4) | (r0 << 8) | (b1 << 16) | (g1 << 20) | (r1 << 24) | 0xf000f000; src_y += 2; src_u += 1; src_v += 1; dst_argb4444 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); b0 = b0 >> 4; g0 = g0 >> 4; r0 = r0 >> 4; *reinterpret_cast(dst_argb4444) = b0 | (g0 << 4) | (r0 << 8) | 0xf000; } } void I422ToARGB1555Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_argb1555, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); YuvPixel(src_y[1], src_u[0], src_v[0], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 3; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 3; r1 = r1 >> 3; *reinterpret_cast(dst_argb1555) = b0 | (g0 << 5) | (r0 << 10) | (b1 << 16) | (g1 << 21) | (r1 << 26) | 0x80008000; src_y += 2; src_u += 1; src_v += 1; dst_argb1555 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 3; r0 = r0 >> 3; *reinterpret_cast(dst_argb1555) = b0 | (g0 << 5) | (r0 << 10) | 0x8000; } } void I422ToRGB565Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_rgb565, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); YuvPixel(src_y[1], src_u[0], src_v[0], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); src_y += 2; src_u += 1; src_v += 1; dst_rgb565 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); } } void I411ToARGBRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 3; x += 4) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; YuvPixel(src_y[2], src_u[0], src_v[0], rgb_buf + 8, rgb_buf + 9, rgb_buf + 10); rgb_buf[11] = 255; YuvPixel(src_y[3], src_u[0], src_v[0], rgb_buf + 12, rgb_buf + 13, rgb_buf + 14); rgb_buf[15] = 255; src_y += 4; src_u += 1; src_v += 1; rgb_buf += 16; // Advance 4 pixels. } if (width & 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void NV12ToARGBRow_C(const uint8* src_y, const uint8* usrc_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], usrc_v[0], usrc_v[1], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; usrc_v += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void NV21ToARGBRow_C(const uint8* src_y, const uint8* src_vu, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_vu[1], src_vu[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], src_vu[1], src_vu[0], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; src_vu += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_vu[1], src_vu[0], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void NV12ToRGB565Row_C(const uint8* src_y, const uint8* usrc_v, uint8* dst_rgb565, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], &b0, &g0, &r0); YuvPixel(src_y[1], usrc_v[0], usrc_v[1], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); src_y += 2; usrc_v += 2; dst_rgb565 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], usrc_v[0], usrc_v[1], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); } } void NV21ToRGB565Row_C(const uint8* src_y, const uint8* vsrc_u, uint8* dst_rgb565, int width) { uint8 b0; uint8 g0; uint8 r0; uint8 b1; uint8 g1; uint8 r1; for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], vsrc_u[1], vsrc_u[0], &b0, &g0, &r0); YuvPixel(src_y[1], vsrc_u[1], vsrc_u[0], &b1, &g1, &r1); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; b1 = b1 >> 3; g1 = g1 >> 2; r1 = r1 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11) | (b1 << 16) | (g1 << 21) | (r1 << 27); src_y += 2; vsrc_u += 2; dst_rgb565 += 4; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], vsrc_u[1], vsrc_u[0], &b0, &g0, &r0); b0 = b0 >> 3; g0 = g0 >> 2; r0 = r0 >> 3; *reinterpret_cast(dst_rgb565) = b0 | (g0 << 5) | (r0 << 11); } } void YUY2ToARGBRow_C(const uint8* src_yuy2, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_yuy2[0], src_yuy2[1], src_yuy2[3], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_yuy2[2], src_yuy2[1], src_yuy2[3], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_yuy2 += 4; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_yuy2[0], src_yuy2[1], src_yuy2[3], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void UYVYToARGBRow_C(const uint8* src_uyvy, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_uyvy[1], src_uyvy[0], src_uyvy[2], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_uyvy[3], src_uyvy[0], src_uyvy[2], rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_uyvy += 4; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_uyvy[1], src_uyvy[0], src_uyvy[2], rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void I422ToBGRARow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 3, rgb_buf + 2, rgb_buf + 1); rgb_buf[0] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 7, rgb_buf + 6, rgb_buf + 5); rgb_buf[4] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 3, rgb_buf + 2, rgb_buf + 1); rgb_buf[0] = 255; } } void I422ToABGRRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); rgb_buf[3] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 6, rgb_buf + 5, rgb_buf + 4); rgb_buf[7] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 2, rgb_buf + 1, rgb_buf + 0); rgb_buf[3] = 255; } } void I422ToRGBARow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 1, rgb_buf + 2, rgb_buf + 3); rgb_buf[0] = 255; YuvPixel(src_y[1], src_u[0], src_v[0], rgb_buf + 5, rgb_buf + 6, rgb_buf + 7); rgb_buf[4] = 255; src_y += 2; src_u += 1; src_v += 1; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], src_u[0], src_v[0], rgb_buf + 1, rgb_buf + 2, rgb_buf + 3); rgb_buf[0] = 255; } } void YToARGBRow_C(const uint8* src_y, uint8* rgb_buf, int width) { for (int x = 0; x < width - 1; x += 2) { YuvPixel(src_y[0], 128, 128, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; YuvPixel(src_y[1], 128, 128, rgb_buf + 4, rgb_buf + 5, rgb_buf + 6); rgb_buf[7] = 255; src_y += 2; rgb_buf += 8; // Advance 2 pixels. } if (width & 1) { YuvPixel(src_y[0], 128, 128, rgb_buf + 0, rgb_buf + 1, rgb_buf + 2); rgb_buf[3] = 255; } } void MirrorRow_C(const uint8* src, uint8* dst, int width) { src += width - 1; for (int x = 0; x < width - 1; x += 2) { dst[x] = src[0]; dst[x + 1] = src[-1]; src -= 2; } if (width & 1) { dst[width - 1] = src[0]; } } void MirrorUVRow_C(const uint8* src_uv, uint8* dst_u, uint8* dst_v, int width) { src_uv += (width - 1) << 1; for (int x = 0; x < width - 1; x += 2) { dst_u[x] = src_uv[0]; dst_u[x + 1] = src_uv[-2]; dst_v[x] = src_uv[1]; dst_v[x + 1] = src_uv[-2 + 1]; src_uv -= 4; } if (width & 1) { dst_u[width - 1] = src_uv[0]; dst_v[width - 1] = src_uv[1]; } } void ARGBMirrorRow_C(const uint8* src, uint8* dst, int width) { const uint32* src32 = reinterpret_cast(src); uint32* dst32 = reinterpret_cast(dst); src32 += width - 1; for (int x = 0; x < width - 1; x += 2) { dst32[x] = src32[0]; dst32[x + 1] = src32[-1]; src32 -= 2; } if (width & 1) { dst32[width - 1] = src32[0]; } } void SplitUVRow_C(const uint8* src_uv, uint8* dst_u, uint8* dst_v, int width) { for (int x = 0; x < width - 1; x += 2) { dst_u[x] = src_uv[0]; dst_u[x + 1] = src_uv[2]; dst_v[x] = src_uv[1]; dst_v[x + 1] = src_uv[3]; src_uv += 4; } if (width & 1) { dst_u[width - 1] = src_uv[0]; dst_v[width - 1] = src_uv[1]; } } void MergeUVRow_C(const uint8* src_u, const uint8* src_v, uint8* dst_uv, int width) { for (int x = 0; x < width - 1; x += 2) { dst_uv[0] = src_u[x]; dst_uv[1] = src_v[x]; dst_uv[2] = src_u[x + 1]; dst_uv[3] = src_v[x + 1]; dst_uv += 4; } if (width & 1) { dst_uv[0] = src_u[width - 1]; dst_uv[1] = src_v[width - 1]; } } void CopyRow_C(const uint8* src, uint8* dst, int count) { memcpy(dst, src, count); } void SetRow_C(uint8* dst, uint32 v8, int count) { #ifdef _MSC_VER // VC will generate rep stosb. for (int x = 0; x < count; ++x) { dst[x] = v8; } #else memset(dst, v8, count); #endif } void ARGBSetRows_C(uint8* dst, uint32 v32, int width, int dst_stride, int height) { for (int y = 0; y < height; ++y) { uint32* d = reinterpret_cast(dst); for (int x = 0; x < width; ++x) { d[x] = v32; } dst += dst_stride; } } // Filter 2 rows of YUY2 UV's (422) into U and V (420). void YUY2ToUVRow_C(const uint8* src_yuy2, int src_stride_yuy2, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values, filtering 2 rows of YUY2. for (int x = 0; x < width; x += 2) { dst_u[0] = (src_yuy2[1] + src_yuy2[src_stride_yuy2 + 1] + 1) >> 1; dst_v[0] = (src_yuy2[3] + src_yuy2[src_stride_yuy2 + 3] + 1) >> 1; src_yuy2 += 4; dst_u += 1; dst_v += 1; } } // Copy row of YUY2 UV's (422) into U and V (422). void YUY2ToUV422Row_C(const uint8* src_yuy2, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values. for (int x = 0; x < width; x += 2) { dst_u[0] = src_yuy2[1]; dst_v[0] = src_yuy2[3]; src_yuy2 += 4; dst_u += 1; dst_v += 1; } } // Copy row of YUY2 Y's (422) into Y (420/422). void YUY2ToYRow_C(const uint8* src_yuy2, uint8* dst_y, int width) { // Output a row of Y values. for (int x = 0; x < width - 1; x += 2) { dst_y[x] = src_yuy2[0]; dst_y[x + 1] = src_yuy2[2]; src_yuy2 += 4; } if (width & 1) { dst_y[width - 1] = src_yuy2[0]; } } // Filter 2 rows of UYVY UV's (422) into U and V (420). void UYVYToUVRow_C(const uint8* src_uyvy, int src_stride_uyvy, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values. for (int x = 0; x < width; x += 2) { dst_u[0] = (src_uyvy[0] + src_uyvy[src_stride_uyvy + 0] + 1) >> 1; dst_v[0] = (src_uyvy[2] + src_uyvy[src_stride_uyvy + 2] + 1) >> 1; src_uyvy += 4; dst_u += 1; dst_v += 1; } } // Copy row of UYVY UV's (422) into U and V (422). void UYVYToUV422Row_C(const uint8* src_uyvy, uint8* dst_u, uint8* dst_v, int width) { // Output a row of UV values. for (int x = 0; x < width; x += 2) { dst_u[0] = src_uyvy[0]; dst_v[0] = src_uyvy[2]; src_uyvy += 4; dst_u += 1; dst_v += 1; } } // Copy row of UYVY Y's (422) into Y (420/422). void UYVYToYRow_C(const uint8* src_uyvy, uint8* dst_y, int width) { // Output a row of Y values. for (int x = 0; x < width - 1; x += 2) { dst_y[x] = src_uyvy[1]; dst_y[x + 1] = src_uyvy[3]; src_uyvy += 4; } if (width & 1) { dst_y[width - 1] = src_uyvy[1]; } } #define BLEND(f, b, a) (((256 - a) * b) >> 8) + f // Blend src_argb0 over src_argb1 and store to dst_argb. // dst_argb may be src_argb0 or src_argb1. // This code mimics the SSSE3 version for better testability. void ARGBBlendRow_C(const uint8* src_argb0, const uint8* src_argb1, uint8* dst_argb, int width) { for (int x = 0; x < width - 1; x += 2) { uint32 fb = src_argb0[0]; uint32 fg = src_argb0[1]; uint32 fr = src_argb0[2]; uint32 a = src_argb0[3]; uint32 bb = src_argb1[0]; uint32 bg = src_argb1[1]; uint32 br = src_argb1[2]; dst_argb[0] = BLEND(fb, bb, a); dst_argb[1] = BLEND(fg, bg, a); dst_argb[2] = BLEND(fr, br, a); dst_argb[3] = 255u; fb = src_argb0[4 + 0]; fg = src_argb0[4 + 1]; fr = src_argb0[4 + 2]; a = src_argb0[4 + 3]; bb = src_argb1[4 + 0]; bg = src_argb1[4 + 1]; br = src_argb1[4 + 2]; dst_argb[4 + 0] = BLEND(fb, bb, a); dst_argb[4 + 1] = BLEND(fg, bg, a); dst_argb[4 + 2] = BLEND(fr, br, a); dst_argb[4 + 3] = 255u; src_argb0 += 8; src_argb1 += 8; dst_argb += 8; } if (width & 1) { uint32 fb = src_argb0[0]; uint32 fg = src_argb0[1]; uint32 fr = src_argb0[2]; uint32 a = src_argb0[3]; uint32 bb = src_argb1[0]; uint32 bg = src_argb1[1]; uint32 br = src_argb1[2]; dst_argb[0] = BLEND(fb, bb, a); dst_argb[1] = BLEND(fg, bg, a); dst_argb[2] = BLEND(fr, br, a); dst_argb[3] = 255u; } } #undef BLEND #define ATTENUATE(f, a) (a | (a << 8)) * (f | (f << 8)) >> 24 // Multiply source RGB by alpha and store to destination. // This code mimics the SSSE3 version for better testability. void ARGBAttenuateRow_C(const uint8* src_argb, uint8* dst_argb, int width) { for (int i = 0; i < width - 1; i += 2) { uint32 b = src_argb[0]; uint32 g = src_argb[1]; uint32 r = src_argb[2]; uint32 a = src_argb[3]; dst_argb[0] = ATTENUATE(b, a); dst_argb[1] = ATTENUATE(g, a); dst_argb[2] = ATTENUATE(r, a); dst_argb[3] = a; b = src_argb[4]; g = src_argb[5]; r = src_argb[6]; a = src_argb[7]; dst_argb[4] = ATTENUATE(b, a); dst_argb[5] = ATTENUATE(g, a); dst_argb[6] = ATTENUATE(r, a); dst_argb[7] = a; src_argb += 8; dst_argb += 8; } if (width & 1) { const uint32 b = src_argb[0]; const uint32 g = src_argb[1]; const uint32 r = src_argb[2]; const uint32 a = src_argb[3]; dst_argb[0] = ATTENUATE(b, a); dst_argb[1] = ATTENUATE(g, a); dst_argb[2] = ATTENUATE(r, a); dst_argb[3] = a; } } #undef ATTENUATE // Divide source RGB by alpha and store to destination. // b = (b * 255 + (a / 2)) / a; // g = (g * 255 + (a / 2)) / a; // r = (r * 255 + (a / 2)) / a; // Reciprocal method is off by 1 on some values. ie 125 // 8.8 fixed point inverse table with 1.0 in upper short and 1 / a in lower. #define T(a) 0x01000000 + (0x10000 / a) uint32 fixed_invtbl8[256] = { 0x01000000, 0x0100ffff, T(0x02), T(0x03), T(0x04), T(0x05), T(0x06), T(0x07), T(0x08), T(0x09), T(0x0a), T(0x0b), T(0x0c), T(0x0d), T(0x0e), T(0x0f), T(0x10), T(0x11), T(0x12), T(0x13), T(0x14), T(0x15), T(0x16), T(0x17), T(0x18), T(0x19), T(0x1a), T(0x1b), T(0x1c), T(0x1d), T(0x1e), T(0x1f), T(0x20), T(0x21), T(0x22), T(0x23), T(0x24), T(0x25), T(0x26), T(0x27), T(0x28), T(0x29), T(0x2a), T(0x2b), T(0x2c), T(0x2d), T(0x2e), T(0x2f), T(0x30), T(0x31), T(0x32), T(0x33), T(0x34), T(0x35), T(0x36), T(0x37), T(0x38), T(0x39), T(0x3a), T(0x3b), T(0x3c), T(0x3d), T(0x3e), T(0x3f), T(0x40), T(0x41), T(0x42), T(0x43), T(0x44), T(0x45), T(0x46), T(0x47), T(0x48), T(0x49), T(0x4a), T(0x4b), T(0x4c), T(0x4d), T(0x4e), T(0x4f), T(0x50), T(0x51), T(0x52), T(0x53), T(0x54), T(0x55), T(0x56), T(0x57), T(0x58), T(0x59), T(0x5a), T(0x5b), T(0x5c), T(0x5d), T(0x5e), T(0x5f), T(0x60), T(0x61), T(0x62), T(0x63), T(0x64), T(0x65), T(0x66), T(0x67), T(0x68), T(0x69), T(0x6a), T(0x6b), T(0x6c), T(0x6d), T(0x6e), T(0x6f), T(0x70), T(0x71), T(0x72), T(0x73), T(0x74), T(0x75), T(0x76), T(0x77), T(0x78), T(0x79), T(0x7a), T(0x7b), T(0x7c), T(0x7d), T(0x7e), T(0x7f), T(0x80), T(0x81), T(0x82), T(0x83), T(0x84), T(0x85), T(0x86), T(0x87), T(0x88), T(0x89), T(0x8a), T(0x8b), T(0x8c), T(0x8d), T(0x8e), T(0x8f), T(0x90), T(0x91), T(0x92), T(0x93), T(0x94), T(0x95), T(0x96), T(0x97), T(0x98), T(0x99), T(0x9a), T(0x9b), T(0x9c), T(0x9d), T(0x9e), T(0x9f), T(0xa0), T(0xa1), T(0xa2), T(0xa3), T(0xa4), T(0xa5), T(0xa6), T(0xa7), T(0xa8), T(0xa9), T(0xaa), T(0xab), T(0xac), T(0xad), T(0xae), T(0xaf), T(0xb0), T(0xb1), T(0xb2), T(0xb3), T(0xb4), T(0xb5), T(0xb6), T(0xb7), T(0xb8), T(0xb9), T(0xba), T(0xbb), T(0xbc), T(0xbd), T(0xbe), T(0xbf), T(0xc0), T(0xc1), T(0xc2), T(0xc3), T(0xc4), T(0xc5), T(0xc6), T(0xc7), T(0xc8), T(0xc9), T(0xca), T(0xcb), T(0xcc), T(0xcd), T(0xce), T(0xcf), T(0xd0), T(0xd1), T(0xd2), T(0xd3), T(0xd4), T(0xd5), T(0xd6), T(0xd7), T(0xd8), T(0xd9), T(0xda), T(0xdb), T(0xdc), T(0xdd), T(0xde), T(0xdf), T(0xe0), T(0xe1), T(0xe2), T(0xe3), T(0xe4), T(0xe5), T(0xe6), T(0xe7), T(0xe8), T(0xe9), T(0xea), T(0xeb), T(0xec), T(0xed), T(0xee), T(0xef), T(0xf0), T(0xf1), T(0xf2), T(0xf3), T(0xf4), T(0xf5), T(0xf6), T(0xf7), T(0xf8), T(0xf9), T(0xfa), T(0xfb), T(0xfc), T(0xfd), T(0xfe), 0x01000100 }; #undef T void ARGBUnattenuateRow_C(const uint8* src_argb, uint8* dst_argb, int width) { for (int i = 0; i < width; ++i) { uint32 b = src_argb[0]; uint32 g = src_argb[1]; uint32 r = src_argb[2]; const uint32 a = src_argb[3]; const uint32 ia = fixed_invtbl8[a] & 0xffff; // 8.8 fixed point b = (b * ia) >> 8; g = (g * ia) >> 8; r = (r * ia) >> 8; // Clamping should not be necessary but is free in assembly. dst_argb[0] = clamp255(b); dst_argb[1] = clamp255(g); dst_argb[2] = clamp255(r); dst_argb[3] = a; src_argb += 4; dst_argb += 4; } } void ComputeCumulativeSumRow_C(const uint8* row, int32* cumsum, const int32* previous_cumsum, int width) { int32 row_sum[4] = {0, 0, 0, 0}; for (int x = 0; x < width; ++x) { row_sum[0] += row[x * 4 + 0]; row_sum[1] += row[x * 4 + 1]; row_sum[2] += row[x * 4 + 2]; row_sum[3] += row[x * 4 + 3]; cumsum[x * 4 + 0] = row_sum[0] + previous_cumsum[x * 4 + 0]; cumsum[x * 4 + 1] = row_sum[1] + previous_cumsum[x * 4 + 1]; cumsum[x * 4 + 2] = row_sum[2] + previous_cumsum[x * 4 + 2]; cumsum[x * 4 + 3] = row_sum[3] + previous_cumsum[x * 4 + 3]; } } void CumulativeSumToAverageRow_C(const int32* tl, const int32* bl, int w, int area, uint8* dst, int count) { float ooa = 1.0f / area; for (int i = 0; i < count; ++i) { dst[0] = static_cast((bl[w + 0] + tl[0] - bl[0] - tl[w + 0]) * ooa); dst[1] = static_cast((bl[w + 1] + tl[1] - bl[1] - tl[w + 1]) * ooa); dst[2] = static_cast((bl[w + 2] + tl[2] - bl[2] - tl[w + 2]) * ooa); dst[3] = static_cast((bl[w + 3] + tl[3] - bl[3] - tl[w + 3]) * ooa); dst += 4; tl += 4; bl += 4; } } // Copy pixels from rotated source to destination row with a slope. LIBYUV_API void ARGBAffineRow_C(const uint8* src_argb, int src_argb_stride, uint8* dst_argb, const float* uv_dudv, int width) { // Render a row of pixels from source into a buffer. float uv[2]; uv[0] = uv_dudv[0]; uv[1] = uv_dudv[1]; for (int i = 0; i < width; ++i) { int x = static_cast(uv[0]); int y = static_cast(uv[1]); *reinterpret_cast(dst_argb) = *reinterpret_cast(src_argb + y * src_argb_stride + x * 4); dst_argb += 4; uv[0] += uv_dudv[2]; uv[1] += uv_dudv[3]; } } // C version 2x2 -> 2x1. void InterpolateRow_C(uint8* dst_ptr, const uint8* src_ptr, ptrdiff_t src_stride, int width, int source_y_fraction) { int y1_fraction = source_y_fraction; int y0_fraction = 256 - y1_fraction; const uint8* src_ptr1 = src_ptr + src_stride; for (int x = 0; x < width - 1; x += 2) { dst_ptr[0] = (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction) >> 8; dst_ptr[1] = (src_ptr[1] * y0_fraction + src_ptr1[1] * y1_fraction) >> 8; src_ptr += 2; src_ptr1 += 2; dst_ptr += 2; } if (width & 1) { dst_ptr[0] = (src_ptr[0] * y0_fraction + src_ptr1[0] * y1_fraction) >> 8; } } // Blend 2 rows into 1 for conversions such as I422ToI420. void HalfRow_C(const uint8* src_uv, int src_uv_stride, uint8* dst_uv, int pix) { for (int x = 0; x < pix; ++x) { dst_uv[x] = (src_uv[x] + src_uv[src_uv_stride + x] + 1) >> 1; } } // Select 2 channels from ARGB on alternating pixels. e.g. BGBGBGBG void ARGBToBayerRow_C(const uint8* src_argb, uint8* dst_bayer, uint32 selector, int pix) { int index0 = selector & 0xff; int index1 = (selector >> 8) & 0xff; // Copy a row of Bayer. for (int x = 0; x < pix - 1; x += 2) { dst_bayer[0] = src_argb[index0]; dst_bayer[1] = src_argb[index1]; src_argb += 8; dst_bayer += 2; } if (pix & 1) { dst_bayer[0] = src_argb[index0]; } } // Use first 4 shuffler values to reorder ARGB channels. void ARGBShuffleRow_C(const uint8* src_argb, uint8* dst_argb, const uint8* shuffler, int pix) { int index0 = shuffler[0]; int index1 = shuffler[1]; int index2 = shuffler[2]; int index3 = shuffler[3]; // Shuffle a row of ARGB. for (int x = 0; x < pix; ++x) { // To support in-place conversion. uint8 b = src_argb[index0]; uint8 g = src_argb[index1]; uint8 r = src_argb[index2]; uint8 a = src_argb[index3]; dst_argb[0] = b; dst_argb[1] = g; dst_argb[2] = r; dst_argb[3] = a; src_argb += 4; dst_argb += 4; } } void I422ToYUY2Row_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_frame, int width) { for (int x = 0; x < width - 1; x += 2) { dst_frame[0] = src_y[0]; dst_frame[1] = src_u[0]; dst_frame[2] = src_y[1]; dst_frame[3] = src_v[0]; dst_frame += 4; src_y += 2; src_u += 1; src_v += 1; } if (width & 1) { dst_frame[0] = src_y[0]; dst_frame[1] = src_u[0]; dst_frame[2] = src_y[0]; // duplicate last y dst_frame[3] = src_v[0]; } } void I422ToUYVYRow_C(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* dst_frame, int width) { for (int x = 0; x < width - 1; x += 2) { dst_frame[0] = src_u[0]; dst_frame[1] = src_y[0]; dst_frame[2] = src_v[0]; dst_frame[3] = src_y[1]; dst_frame += 4; src_y += 2; src_u += 1; src_v += 1; } if (width & 1) { dst_frame[0] = src_u[0]; dst_frame[1] = src_y[0]; dst_frame[2] = src_v[0]; dst_frame[3] = src_y[0]; // duplicate last y } } #if !defined(LIBYUV_DISABLE_X86) // row_win.cc has asm version, but GCC uses 2 step wrapper. 5% slower. // TODO(fbarchard): Handle width > kMaxStride here instead of calling code. #if defined(__x86_64__) || defined(__i386__) void I422ToRGB565Row_SSSE3(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); I422ToARGBRow_SSSE3(src_y, src_u, src_v, row, width); ARGBToRGB565Row_SSE2(row, rgb_buf, width); } #endif // defined(__x86_64__) || defined(__i386__) #if defined(_M_IX86) || defined(__x86_64__) || defined(__i386__) void I422ToARGB1555Row_SSSE3(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); I422ToARGBRow_SSSE3(src_y, src_u, src_v, row, width); ARGBToARGB1555Row_SSE2(row, rgb_buf, width); } void I422ToARGB4444Row_SSSE3(const uint8* src_y, const uint8* src_u, const uint8* src_v, uint8* rgb_buf, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); I422ToARGBRow_SSSE3(src_y, src_u, src_v, row, width); ARGBToARGB4444Row_SSE2(row, rgb_buf, width); } void NV12ToRGB565Row_SSSE3(const uint8* src_y, const uint8* src_uv, uint8* dst_rgb565, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); NV12ToARGBRow_SSSE3(src_y, src_uv, row, width); ARGBToRGB565Row_SSE2(row, dst_rgb565, width); } void NV21ToRGB565Row_SSSE3(const uint8* src_y, const uint8* src_vu, uint8* dst_rgb565, int width) { SIMD_ALIGNED(uint8 row[kMaxStride]); NV21ToARGBRow_SSSE3(src_y, src_vu, row, width); ARGBToRGB565Row_SSE2(row, dst_rgb565, width); } void YUY2ToARGBRow_SSSE3(const uint8* src_yuy2, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); YUY2ToUV422Row_SSE2(src_yuy2, row_u, row_v, width); YUY2ToYRow_SSE2(src_yuy2, row_y, width); I422ToARGBRow_SSSE3(row_y, row_u, row_v, dst_argb, width); } void YUY2ToARGBRow_Unaligned_SSSE3(const uint8* src_yuy2, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); YUY2ToUV422Row_Unaligned_SSE2(src_yuy2, row_u, row_v, width); YUY2ToYRow_Unaligned_SSE2(src_yuy2, row_y, width); I422ToARGBRow_Unaligned_SSSE3(row_y, row_u, row_v, dst_argb, width); } void UYVYToARGBRow_SSSE3(const uint8* src_uyvy, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); UYVYToUV422Row_SSE2(src_uyvy, row_u, row_v, width); UYVYToYRow_SSE2(src_uyvy, row_y, width); I422ToARGBRow_SSSE3(row_y, row_u, row_v, dst_argb, width); } void UYVYToARGBRow_Unaligned_SSSE3(const uint8* src_uyvy, uint8* dst_argb, int width) { SIMD_ALIGNED(uint8 row_y[kMaxStride]); SIMD_ALIGNED(uint8 row_u[kMaxStride / 2]); SIMD_ALIGNED(uint8 row_v[kMaxStride / 2]); UYVYToUV422Row_Unaligned_SSE2(src_uyvy, row_u, row_v, width); UYVYToYRow_Unaligned_SSE2(src_uyvy, row_y, width); I422ToARGBRow_Unaligned_SSSE3(row_y, row_u, row_v, dst_argb, width); } #endif // defined(_M_IX86) || defined(__x86_64__) || defined(__i386__) #endif // !defined(LIBYUV_DISABLE_X86) #undef clamp0 #undef clamp255 #ifdef __cplusplus } // extern "C" } // namespace libyuv #endif