1*437bfbebSnyanmisaka /* SPDX-License-Identifier: Apache-2.0 OR MIT */
2*437bfbebSnyanmisaka /*
3*437bfbebSnyanmisaka * Copyright (c) 2015 Rockchip Electronics Co., Ltd.
4*437bfbebSnyanmisaka */
5*437bfbebSnyanmisaka
6*437bfbebSnyanmisaka #include "mpp_common.h"
7*437bfbebSnyanmisaka
8*437bfbebSnyanmisaka static const RK_U8 log2_tab[256] = {
9*437bfbebSnyanmisaka 0, 0, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
10*437bfbebSnyanmisaka 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5,
11*437bfbebSnyanmisaka 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
12*437bfbebSnyanmisaka 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6,
13*437bfbebSnyanmisaka 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
14*437bfbebSnyanmisaka 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
15*437bfbebSnyanmisaka 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7,
16*437bfbebSnyanmisaka 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7
17*437bfbebSnyanmisaka };
18*437bfbebSnyanmisaka
mpp_log2(RK_U32 v)19*437bfbebSnyanmisaka RK_S32 mpp_log2(RK_U32 v)
20*437bfbebSnyanmisaka {
21*437bfbebSnyanmisaka RK_S32 n = 0;
22*437bfbebSnyanmisaka
23*437bfbebSnyanmisaka if (v & 0xffff0000) {
24*437bfbebSnyanmisaka v >>= 16;
25*437bfbebSnyanmisaka n += 16;
26*437bfbebSnyanmisaka }
27*437bfbebSnyanmisaka if (v & 0xff00) {
28*437bfbebSnyanmisaka v >>= 8;
29*437bfbebSnyanmisaka n += 8;
30*437bfbebSnyanmisaka }
31*437bfbebSnyanmisaka n += log2_tab[v];
32*437bfbebSnyanmisaka
33*437bfbebSnyanmisaka return n;
34*437bfbebSnyanmisaka }
35*437bfbebSnyanmisaka
mpp_log2_16bit(RK_U32 v)36*437bfbebSnyanmisaka RK_S32 mpp_log2_16bit(RK_U32 v)
37*437bfbebSnyanmisaka {
38*437bfbebSnyanmisaka RK_S32 n = 0;
39*437bfbebSnyanmisaka
40*437bfbebSnyanmisaka if (v & 0xff00) {
41*437bfbebSnyanmisaka v >>= 8;
42*437bfbebSnyanmisaka n += 8;
43*437bfbebSnyanmisaka }
44*437bfbebSnyanmisaka n += log2_tab[v];
45*437bfbebSnyanmisaka
46*437bfbebSnyanmisaka return n;
47*437bfbebSnyanmisaka }
48*437bfbebSnyanmisaka
axb_div_c(RK_S32 a,RK_S32 b,RK_S32 c)49*437bfbebSnyanmisaka RK_S32 axb_div_c(RK_S32 a, RK_S32 b, RK_S32 c)
50*437bfbebSnyanmisaka {
51*437bfbebSnyanmisaka RK_U32 left = 32;
52*437bfbebSnyanmisaka RK_U32 right = 0;
53*437bfbebSnyanmisaka RK_U32 shift;
54*437bfbebSnyanmisaka RK_S32 sign = 1;
55*437bfbebSnyanmisaka RK_S32 tmp;
56*437bfbebSnyanmisaka
57*437bfbebSnyanmisaka if (a == 0 || b == 0)
58*437bfbebSnyanmisaka return 0;
59*437bfbebSnyanmisaka else if ((a * b / b) == a && c != 0)
60*437bfbebSnyanmisaka return (a * b / c);
61*437bfbebSnyanmisaka
62*437bfbebSnyanmisaka if (a < 0) {
63*437bfbebSnyanmisaka sign = -1;
64*437bfbebSnyanmisaka a = -a;
65*437bfbebSnyanmisaka }
66*437bfbebSnyanmisaka if (b < 0) {
67*437bfbebSnyanmisaka sign *= -1;
68*437bfbebSnyanmisaka b = -b;
69*437bfbebSnyanmisaka }
70*437bfbebSnyanmisaka if (c < 0) {
71*437bfbebSnyanmisaka sign *= -1;
72*437bfbebSnyanmisaka c = -c;
73*437bfbebSnyanmisaka }
74*437bfbebSnyanmisaka
75*437bfbebSnyanmisaka if (c == 0)
76*437bfbebSnyanmisaka return 0x7FFFFFFF * sign;
77*437bfbebSnyanmisaka
78*437bfbebSnyanmisaka if (b > a) {
79*437bfbebSnyanmisaka tmp = b;
80*437bfbebSnyanmisaka b = a;
81*437bfbebSnyanmisaka a = tmp;
82*437bfbebSnyanmisaka }
83*437bfbebSnyanmisaka
84*437bfbebSnyanmisaka for (--left; (((RK_U32)a << left) >> left) != (RK_U32)a; --left)
85*437bfbebSnyanmisaka ;
86*437bfbebSnyanmisaka
87*437bfbebSnyanmisaka left--;
88*437bfbebSnyanmisaka
89*437bfbebSnyanmisaka while (((RK_U32)b >> right) > (RK_U32)c)
90*437bfbebSnyanmisaka right++;
91*437bfbebSnyanmisaka
92*437bfbebSnyanmisaka if (right > left) {
93*437bfbebSnyanmisaka return 0x7FFFFFFF * sign;
94*437bfbebSnyanmisaka } else {
95*437bfbebSnyanmisaka shift = left - right;
96*437bfbebSnyanmisaka return (RK_S32)((((RK_U32)a << shift) /
97*437bfbebSnyanmisaka (RK_U32)c * (RK_U32)b) >> shift) * sign;
98*437bfbebSnyanmisaka }
99*437bfbebSnyanmisaka }
100*437bfbebSnyanmisaka
mpp_align_16(RK_U32 val)101*437bfbebSnyanmisaka RK_U32 mpp_align_16(RK_U32 val)
102*437bfbebSnyanmisaka {
103*437bfbebSnyanmisaka return MPP_ALIGN(val, 16);
104*437bfbebSnyanmisaka }
105*437bfbebSnyanmisaka
mpp_align_64(RK_U32 val)106*437bfbebSnyanmisaka RK_U32 mpp_align_64(RK_U32 val)
107*437bfbebSnyanmisaka {
108*437bfbebSnyanmisaka return MPP_ALIGN(val, 64);
109*437bfbebSnyanmisaka }
110*437bfbebSnyanmisaka
mpp_align_128(RK_U32 val)111*437bfbebSnyanmisaka RK_U32 mpp_align_128(RK_U32 val)
112*437bfbebSnyanmisaka {
113*437bfbebSnyanmisaka return MPP_ALIGN(val, 128);
114*437bfbebSnyanmisaka }
115*437bfbebSnyanmisaka
mpp_align_256_odd(RK_U32 val)116*437bfbebSnyanmisaka RK_U32 mpp_align_256_odd(RK_U32 val)
117*437bfbebSnyanmisaka {
118*437bfbebSnyanmisaka return MPP_ALIGN(val, 256) | 256;
119*437bfbebSnyanmisaka }
120*437bfbebSnyanmisaka
mpp_align_128_odd_plus_64(RK_U32 val)121*437bfbebSnyanmisaka RK_U32 mpp_align_128_odd_plus_64(RK_U32 val)
122*437bfbebSnyanmisaka {
123*437bfbebSnyanmisaka val = MPP_ALIGN(val, 64);
124*437bfbebSnyanmisaka if (((val - 64) % 256 == 128))
125*437bfbebSnyanmisaka return val;
126*437bfbebSnyanmisaka else
127*437bfbebSnyanmisaka return ((MPP_ALIGN(val, 128) | 128) + 64);
128*437bfbebSnyanmisaka }
129