xref: /OK3568_Linux_fs/kernel/arch/sh/math-emu/sfp-util.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0 */
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * These are copied from glibc/stdlib/longlong.h
4*4882a593Smuzhiyun  */
5*4882a593Smuzhiyun 
6*4882a593Smuzhiyun #define add_ssaaaa(sh, sl, ah, al, bh, bl) \
7*4882a593Smuzhiyun   do {                                                                  \
8*4882a593Smuzhiyun     UWtype __x;                                                         \
9*4882a593Smuzhiyun     __x = (al) + (bl);                                                  \
10*4882a593Smuzhiyun     (sh) = (ah) + (bh) + (__x < (al));                                  \
11*4882a593Smuzhiyun     (sl) = __x;                                                         \
12*4882a593Smuzhiyun   } while (0)
13*4882a593Smuzhiyun 
14*4882a593Smuzhiyun #define sub_ddmmss(sh, sl, ah, al, bh, bl) \
15*4882a593Smuzhiyun   do {                                                                  \
16*4882a593Smuzhiyun     UWtype __x;                                                         \
17*4882a593Smuzhiyun     __x = (al) - (bl);                                                  \
18*4882a593Smuzhiyun     (sh) = (ah) - (bh) - (__x > (al));                                  \
19*4882a593Smuzhiyun     (sl) = __x;                                                         \
20*4882a593Smuzhiyun   } while (0)
21*4882a593Smuzhiyun 
22*4882a593Smuzhiyun #define umul_ppmm(w1, w0, u, v) \
23*4882a593Smuzhiyun   __asm__ ("dmulu.l %2,%3\n\tsts    macl,%1\n\tsts  mach,%0"	\
24*4882a593Smuzhiyun 	: "=r" ((u32)(w1)), "=r" ((u32)(w0))	\
25*4882a593Smuzhiyun 	:  "r" ((u32)(u)),   "r" ((u32)(v))	\
26*4882a593Smuzhiyun 	: "macl", "mach")
27*4882a593Smuzhiyun 
28*4882a593Smuzhiyun #define __ll_B ((UWtype) 1 << (W_TYPE_SIZE / 2))
29*4882a593Smuzhiyun #define __ll_lowpart(t) ((UWtype) (t) & (__ll_B - 1))
30*4882a593Smuzhiyun #define __ll_highpart(t) ((UWtype) (t) >> (W_TYPE_SIZE / 2))
31*4882a593Smuzhiyun 
32*4882a593Smuzhiyun #define udiv_qrnnd(q, r, n1, n0, d) \
33*4882a593Smuzhiyun   do {									\
34*4882a593Smuzhiyun     UWtype __d1, __d0, __q1, __q0;					\
35*4882a593Smuzhiyun     UWtype __r1, __r0, __m;						\
36*4882a593Smuzhiyun     __d1 = __ll_highpart (d);						\
37*4882a593Smuzhiyun     __d0 = __ll_lowpart (d);						\
38*4882a593Smuzhiyun 									\
39*4882a593Smuzhiyun     __r1 = (n1) % __d1;							\
40*4882a593Smuzhiyun     __q1 = (n1) / __d1;							\
41*4882a593Smuzhiyun     __m = (UWtype) __q1 * __d0;						\
42*4882a593Smuzhiyun     __r1 = __r1 * __ll_B | __ll_highpart (n0);				\
43*4882a593Smuzhiyun     if (__r1 < __m)							\
44*4882a593Smuzhiyun       {									\
45*4882a593Smuzhiyun 	__q1--, __r1 += (d);						\
46*4882a593Smuzhiyun 	if (__r1 >= (d)) /* i.e. we didn't get carry when adding to __r1 */\
47*4882a593Smuzhiyun 	  if (__r1 < __m)						\
48*4882a593Smuzhiyun 	    __q1--, __r1 += (d);					\
49*4882a593Smuzhiyun       }									\
50*4882a593Smuzhiyun     __r1 -= __m;							\
51*4882a593Smuzhiyun 									\
52*4882a593Smuzhiyun     __r0 = __r1 % __d1;							\
53*4882a593Smuzhiyun     __q0 = __r1 / __d1;							\
54*4882a593Smuzhiyun     __m = (UWtype) __q0 * __d0;						\
55*4882a593Smuzhiyun     __r0 = __r0 * __ll_B | __ll_lowpart (n0);				\
56*4882a593Smuzhiyun     if (__r0 < __m)							\
57*4882a593Smuzhiyun       {									\
58*4882a593Smuzhiyun 	__q0--, __r0 += (d);						\
59*4882a593Smuzhiyun 	if (__r0 >= (d))						\
60*4882a593Smuzhiyun 	  if (__r0 < __m)						\
61*4882a593Smuzhiyun 	    __q0--, __r0 += (d);					\
62*4882a593Smuzhiyun       }									\
63*4882a593Smuzhiyun     __r0 -= __m;							\
64*4882a593Smuzhiyun 									\
65*4882a593Smuzhiyun     (q) = (UWtype) __q1 * __ll_B | __q0;				\
66*4882a593Smuzhiyun     (r) = __r0;								\
67*4882a593Smuzhiyun   } while (0)
68*4882a593Smuzhiyun 
69*4882a593Smuzhiyun #define abort()	return 0
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun #define __BYTE_ORDER __LITTLE_ENDIAN
72*4882a593Smuzhiyun 
73*4882a593Smuzhiyun 
74