1*819833afSPeter Tyser #ifndef _MICROBLAZE_BITOPS_H 2*819833afSPeter Tyser #define _MICROBLAZE_BITOPS_H 3*819833afSPeter Tyser 4*819833afSPeter Tyser /* 5*819833afSPeter Tyser * Copyright 1992, Linus Torvalds. 6*819833afSPeter Tyser */ 7*819833afSPeter Tyser 8*819833afSPeter Tyser #include <linux/config.h> 9*819833afSPeter Tyser #include <asm/byteorder.h> /* swab32 */ 10*819833afSPeter Tyser #include <asm/system.h> /* save_flags */ 11*819833afSPeter Tyser 12*819833afSPeter Tyser #ifdef __KERNEL__ 13*819833afSPeter Tyser /* 14*819833afSPeter Tyser * Function prototypes to keep gcc -Wall happy 15*819833afSPeter Tyser */ 16*819833afSPeter Tyser 17*819833afSPeter Tyser /* 18*819833afSPeter Tyser * The __ functions are not atomic 19*819833afSPeter Tyser */ 20*819833afSPeter Tyser 21*819833afSPeter Tyser extern void set_bit(int nr, volatile void * addr); 22*819833afSPeter Tyser extern void __set_bit(int nr, volatile void * addr); 23*819833afSPeter Tyser 24*819833afSPeter Tyser extern void clear_bit(int nr, volatile void * addr); 25*819833afSPeter Tyser #define __clear_bit(nr, addr) clear_bit(nr, addr) 26*819833afSPeter Tyser #define PLATFORM__CLEAR_BIT 27*819833afSPeter Tyser 28*819833afSPeter Tyser extern void change_bit(int nr, volatile void * addr); 29*819833afSPeter Tyser extern void __change_bit(int nr, volatile void * addr); 30*819833afSPeter Tyser extern int test_and_set_bit(int nr, volatile void * addr); 31*819833afSPeter Tyser extern int __test_and_set_bit(int nr, volatile void * addr); 32*819833afSPeter Tyser extern int test_and_clear_bit(int nr, volatile void * addr); 33*819833afSPeter Tyser extern int __test_and_clear_bit(int nr, volatile void * addr); 34*819833afSPeter Tyser extern int test_and_change_bit(int nr, volatile void * addr); 35*819833afSPeter Tyser extern int __test_and_change_bit(int nr, volatile void * addr); 36*819833afSPeter Tyser extern int __constant_test_bit(int nr, const volatile void * addr); 37*819833afSPeter Tyser extern int __test_bit(int nr, volatile void * addr); 38*819833afSPeter Tyser extern int find_first_zero_bit(void * addr, unsigned size); 39*819833afSPeter Tyser extern int find_next_zero_bit (void * addr, int size, int offset); 40*819833afSPeter Tyser 41*819833afSPeter Tyser /* 42*819833afSPeter Tyser * ffz = Find First Zero in word. Undefined if no zero exists, 43*819833afSPeter Tyser * so code should check against ~0UL first.. 44*819833afSPeter Tyser */ 45*819833afSPeter Tyser extern __inline__ unsigned long ffz(unsigned long word) 46*819833afSPeter Tyser { 47*819833afSPeter Tyser unsigned long result = 0; 48*819833afSPeter Tyser 49*819833afSPeter Tyser while(word & 1) { 50*819833afSPeter Tyser result++; 51*819833afSPeter Tyser word >>= 1; 52*819833afSPeter Tyser } 53*819833afSPeter Tyser return result; 54*819833afSPeter Tyser } 55*819833afSPeter Tyser 56*819833afSPeter Tyser 57*819833afSPeter Tyser extern __inline__ void set_bit(int nr, volatile void * addr) 58*819833afSPeter Tyser { 59*819833afSPeter Tyser int * a = (int *) addr; 60*819833afSPeter Tyser int mask; 61*819833afSPeter Tyser unsigned long flags; 62*819833afSPeter Tyser 63*819833afSPeter Tyser a += nr >> 5; 64*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 65*819833afSPeter Tyser save_flags_cli(flags); 66*819833afSPeter Tyser *a |= mask; 67*819833afSPeter Tyser restore_flags(flags); 68*819833afSPeter Tyser } 69*819833afSPeter Tyser 70*819833afSPeter Tyser extern __inline__ void __set_bit(int nr, volatile void * addr) 71*819833afSPeter Tyser { 72*819833afSPeter Tyser int * a = (int *) addr; 73*819833afSPeter Tyser int mask; 74*819833afSPeter Tyser 75*819833afSPeter Tyser a += nr >> 5; 76*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 77*819833afSPeter Tyser *a |= mask; 78*819833afSPeter Tyser } 79*819833afSPeter Tyser #define PLATFORM__SET_BIT 80*819833afSPeter Tyser 81*819833afSPeter Tyser /* 82*819833afSPeter Tyser * clear_bit() doesn't provide any barrier for the compiler. 83*819833afSPeter Tyser */ 84*819833afSPeter Tyser #define smp_mb__before_clear_bit() barrier() 85*819833afSPeter Tyser #define smp_mb__after_clear_bit() barrier() 86*819833afSPeter Tyser 87*819833afSPeter Tyser extern __inline__ void clear_bit(int nr, volatile void * addr) 88*819833afSPeter Tyser { 89*819833afSPeter Tyser int * a = (int *) addr; 90*819833afSPeter Tyser int mask; 91*819833afSPeter Tyser unsigned long flags; 92*819833afSPeter Tyser 93*819833afSPeter Tyser a += nr >> 5; 94*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 95*819833afSPeter Tyser save_flags_cli(flags); 96*819833afSPeter Tyser *a &= ~mask; 97*819833afSPeter Tyser restore_flags(flags); 98*819833afSPeter Tyser } 99*819833afSPeter Tyser 100*819833afSPeter Tyser extern __inline__ void change_bit(int nr, volatile void * addr) 101*819833afSPeter Tyser { 102*819833afSPeter Tyser int mask; 103*819833afSPeter Tyser unsigned long flags; 104*819833afSPeter Tyser unsigned long *ADDR = (unsigned long *) addr; 105*819833afSPeter Tyser 106*819833afSPeter Tyser ADDR += nr >> 5; 107*819833afSPeter Tyser mask = 1 << (nr & 31); 108*819833afSPeter Tyser save_flags_cli(flags); 109*819833afSPeter Tyser *ADDR ^= mask; 110*819833afSPeter Tyser restore_flags(flags); 111*819833afSPeter Tyser } 112*819833afSPeter Tyser 113*819833afSPeter Tyser extern __inline__ void __change_bit(int nr, volatile void * addr) 114*819833afSPeter Tyser { 115*819833afSPeter Tyser int mask; 116*819833afSPeter Tyser unsigned long *ADDR = (unsigned long *) addr; 117*819833afSPeter Tyser 118*819833afSPeter Tyser ADDR += nr >> 5; 119*819833afSPeter Tyser mask = 1 << (nr & 31); 120*819833afSPeter Tyser *ADDR ^= mask; 121*819833afSPeter Tyser } 122*819833afSPeter Tyser 123*819833afSPeter Tyser extern __inline__ int test_and_set_bit(int nr, volatile void * addr) 124*819833afSPeter Tyser { 125*819833afSPeter Tyser int mask, retval; 126*819833afSPeter Tyser volatile unsigned int *a = (volatile unsigned int *) addr; 127*819833afSPeter Tyser unsigned long flags; 128*819833afSPeter Tyser 129*819833afSPeter Tyser a += nr >> 5; 130*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 131*819833afSPeter Tyser save_flags_cli(flags); 132*819833afSPeter Tyser retval = (mask & *a) != 0; 133*819833afSPeter Tyser *a |= mask; 134*819833afSPeter Tyser restore_flags(flags); 135*819833afSPeter Tyser 136*819833afSPeter Tyser return retval; 137*819833afSPeter Tyser } 138*819833afSPeter Tyser 139*819833afSPeter Tyser extern __inline__ int __test_and_set_bit(int nr, volatile void * addr) 140*819833afSPeter Tyser { 141*819833afSPeter Tyser int mask, retval; 142*819833afSPeter Tyser volatile unsigned int *a = (volatile unsigned int *) addr; 143*819833afSPeter Tyser 144*819833afSPeter Tyser a += nr >> 5; 145*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 146*819833afSPeter Tyser retval = (mask & *a) != 0; 147*819833afSPeter Tyser *a |= mask; 148*819833afSPeter Tyser return retval; 149*819833afSPeter Tyser } 150*819833afSPeter Tyser 151*819833afSPeter Tyser extern __inline__ int test_and_clear_bit(int nr, volatile void * addr) 152*819833afSPeter Tyser { 153*819833afSPeter Tyser int mask, retval; 154*819833afSPeter Tyser volatile unsigned int *a = (volatile unsigned int *) addr; 155*819833afSPeter Tyser unsigned long flags; 156*819833afSPeter Tyser 157*819833afSPeter Tyser a += nr >> 5; 158*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 159*819833afSPeter Tyser save_flags_cli(flags); 160*819833afSPeter Tyser retval = (mask & *a) != 0; 161*819833afSPeter Tyser *a &= ~mask; 162*819833afSPeter Tyser restore_flags(flags); 163*819833afSPeter Tyser 164*819833afSPeter Tyser return retval; 165*819833afSPeter Tyser } 166*819833afSPeter Tyser 167*819833afSPeter Tyser extern __inline__ int __test_and_clear_bit(int nr, volatile void * addr) 168*819833afSPeter Tyser { 169*819833afSPeter Tyser int mask, retval; 170*819833afSPeter Tyser volatile unsigned int *a = (volatile unsigned int *) addr; 171*819833afSPeter Tyser 172*819833afSPeter Tyser a += nr >> 5; 173*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 174*819833afSPeter Tyser retval = (mask & *a) != 0; 175*819833afSPeter Tyser *a &= ~mask; 176*819833afSPeter Tyser return retval; 177*819833afSPeter Tyser } 178*819833afSPeter Tyser 179*819833afSPeter Tyser extern __inline__ int test_and_change_bit(int nr, volatile void * addr) 180*819833afSPeter Tyser { 181*819833afSPeter Tyser int mask, retval; 182*819833afSPeter Tyser volatile unsigned int *a = (volatile unsigned int *) addr; 183*819833afSPeter Tyser unsigned long flags; 184*819833afSPeter Tyser 185*819833afSPeter Tyser a += nr >> 5; 186*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 187*819833afSPeter Tyser save_flags_cli(flags); 188*819833afSPeter Tyser retval = (mask & *a) != 0; 189*819833afSPeter Tyser *a ^= mask; 190*819833afSPeter Tyser restore_flags(flags); 191*819833afSPeter Tyser 192*819833afSPeter Tyser return retval; 193*819833afSPeter Tyser } 194*819833afSPeter Tyser 195*819833afSPeter Tyser extern __inline__ int __test_and_change_bit(int nr, volatile void * addr) 196*819833afSPeter Tyser { 197*819833afSPeter Tyser int mask, retval; 198*819833afSPeter Tyser volatile unsigned int *a = (volatile unsigned int *) addr; 199*819833afSPeter Tyser 200*819833afSPeter Tyser a += nr >> 5; 201*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 202*819833afSPeter Tyser retval = (mask & *a) != 0; 203*819833afSPeter Tyser *a ^= mask; 204*819833afSPeter Tyser return retval; 205*819833afSPeter Tyser } 206*819833afSPeter Tyser 207*819833afSPeter Tyser /* 208*819833afSPeter Tyser * This routine doesn't need to be atomic. 209*819833afSPeter Tyser */ 210*819833afSPeter Tyser extern __inline__ int __constant_test_bit(int nr, const volatile void * addr) 211*819833afSPeter Tyser { 212*819833afSPeter Tyser return ((1UL << (nr & 31)) & (((const volatile unsigned int *) addr)[nr >> 5])) != 0; 213*819833afSPeter Tyser } 214*819833afSPeter Tyser 215*819833afSPeter Tyser extern __inline__ int __test_bit(int nr, volatile void * addr) 216*819833afSPeter Tyser { 217*819833afSPeter Tyser int * a = (int *) addr; 218*819833afSPeter Tyser int mask; 219*819833afSPeter Tyser 220*819833afSPeter Tyser a += nr >> 5; 221*819833afSPeter Tyser mask = 1 << (nr & 0x1f); 222*819833afSPeter Tyser return ((mask & *a) != 0); 223*819833afSPeter Tyser } 224*819833afSPeter Tyser 225*819833afSPeter Tyser #define test_bit(nr,addr) \ 226*819833afSPeter Tyser (__builtin_constant_p(nr) ? \ 227*819833afSPeter Tyser __constant_test_bit((nr),(addr)) : \ 228*819833afSPeter Tyser __test_bit((nr),(addr))) 229*819833afSPeter Tyser 230*819833afSPeter Tyser #define find_first_zero_bit(addr, size) \ 231*819833afSPeter Tyser find_next_zero_bit((addr), (size), 0) 232*819833afSPeter Tyser 233*819833afSPeter Tyser extern __inline__ int find_next_zero_bit (void * addr, int size, int offset) 234*819833afSPeter Tyser { 235*819833afSPeter Tyser unsigned long *p = ((unsigned long *) addr) + (offset >> 5); 236*819833afSPeter Tyser unsigned long result = offset & ~31UL; 237*819833afSPeter Tyser unsigned long tmp; 238*819833afSPeter Tyser 239*819833afSPeter Tyser if (offset >= size) 240*819833afSPeter Tyser return size; 241*819833afSPeter Tyser size -= result; 242*819833afSPeter Tyser offset &= 31UL; 243*819833afSPeter Tyser if (offset) { 244*819833afSPeter Tyser tmp = *(p++); 245*819833afSPeter Tyser tmp |= ~0UL >> (32-offset); 246*819833afSPeter Tyser if (size < 32) 247*819833afSPeter Tyser goto found_first; 248*819833afSPeter Tyser if (~tmp) 249*819833afSPeter Tyser goto found_middle; 250*819833afSPeter Tyser size -= 32; 251*819833afSPeter Tyser result += 32; 252*819833afSPeter Tyser } 253*819833afSPeter Tyser while (size & ~31UL) { 254*819833afSPeter Tyser if (~(tmp = *(p++))) 255*819833afSPeter Tyser goto found_middle; 256*819833afSPeter Tyser result += 32; 257*819833afSPeter Tyser size -= 32; 258*819833afSPeter Tyser } 259*819833afSPeter Tyser if (!size) 260*819833afSPeter Tyser return result; 261*819833afSPeter Tyser tmp = *p; 262*819833afSPeter Tyser 263*819833afSPeter Tyser found_first: 264*819833afSPeter Tyser tmp |= ~0UL >> size; 265*819833afSPeter Tyser found_middle: 266*819833afSPeter Tyser return result + ffz(tmp); 267*819833afSPeter Tyser } 268*819833afSPeter Tyser 269*819833afSPeter Tyser /* 270*819833afSPeter Tyser * hweightN: returns the hamming weight (i.e. the number 271*819833afSPeter Tyser * of bits set) of a N-bit word 272*819833afSPeter Tyser */ 273*819833afSPeter Tyser 274*819833afSPeter Tyser #define hweight32(x) generic_hweight32(x) 275*819833afSPeter Tyser #define hweight16(x) generic_hweight16(x) 276*819833afSPeter Tyser #define hweight8(x) generic_hweight8(x) 277*819833afSPeter Tyser 278*819833afSPeter Tyser 279*819833afSPeter Tyser extern __inline__ int ext2_set_bit(int nr, volatile void * addr) 280*819833afSPeter Tyser { 281*819833afSPeter Tyser int mask, retval; 282*819833afSPeter Tyser unsigned long flags; 283*819833afSPeter Tyser volatile unsigned char *ADDR = (unsigned char *) addr; 284*819833afSPeter Tyser 285*819833afSPeter Tyser ADDR += nr >> 3; 286*819833afSPeter Tyser mask = 1 << (nr & 0x07); 287*819833afSPeter Tyser save_flags_cli(flags); 288*819833afSPeter Tyser retval = (mask & *ADDR) != 0; 289*819833afSPeter Tyser *ADDR |= mask; 290*819833afSPeter Tyser restore_flags(flags); 291*819833afSPeter Tyser return retval; 292*819833afSPeter Tyser } 293*819833afSPeter Tyser 294*819833afSPeter Tyser extern __inline__ int ext2_clear_bit(int nr, volatile void * addr) 295*819833afSPeter Tyser { 296*819833afSPeter Tyser int mask, retval; 297*819833afSPeter Tyser unsigned long flags; 298*819833afSPeter Tyser volatile unsigned char *ADDR = (unsigned char *) addr; 299*819833afSPeter Tyser 300*819833afSPeter Tyser ADDR += nr >> 3; 301*819833afSPeter Tyser mask = 1 << (nr & 0x07); 302*819833afSPeter Tyser save_flags_cli(flags); 303*819833afSPeter Tyser retval = (mask & *ADDR) != 0; 304*819833afSPeter Tyser *ADDR &= ~mask; 305*819833afSPeter Tyser restore_flags(flags); 306*819833afSPeter Tyser return retval; 307*819833afSPeter Tyser } 308*819833afSPeter Tyser 309*819833afSPeter Tyser extern __inline__ int ext2_test_bit(int nr, const volatile void * addr) 310*819833afSPeter Tyser { 311*819833afSPeter Tyser int mask; 312*819833afSPeter Tyser const volatile unsigned char *ADDR = (const unsigned char *) addr; 313*819833afSPeter Tyser 314*819833afSPeter Tyser ADDR += nr >> 3; 315*819833afSPeter Tyser mask = 1 << (nr & 0x07); 316*819833afSPeter Tyser return ((mask & *ADDR) != 0); 317*819833afSPeter Tyser } 318*819833afSPeter Tyser 319*819833afSPeter Tyser #define ext2_find_first_zero_bit(addr, size) \ 320*819833afSPeter Tyser ext2_find_next_zero_bit((addr), (size), 0) 321*819833afSPeter Tyser 322*819833afSPeter Tyser extern __inline__ unsigned long ext2_find_next_zero_bit(void *addr, unsigned long size, unsigned long offset) 323*819833afSPeter Tyser { 324*819833afSPeter Tyser unsigned long *p = ((unsigned long *) addr) + (offset >> 5); 325*819833afSPeter Tyser unsigned long result = offset & ~31UL; 326*819833afSPeter Tyser unsigned long tmp; 327*819833afSPeter Tyser 328*819833afSPeter Tyser if (offset >= size) 329*819833afSPeter Tyser return size; 330*819833afSPeter Tyser size -= result; 331*819833afSPeter Tyser offset &= 31UL; 332*819833afSPeter Tyser if(offset) { 333*819833afSPeter Tyser /* We hold the little endian value in tmp, but then the 334*819833afSPeter Tyser * shift is illegal. So we could keep a big endian value 335*819833afSPeter Tyser * in tmp, like this: 336*819833afSPeter Tyser * 337*819833afSPeter Tyser * tmp = __swab32(*(p++)); 338*819833afSPeter Tyser * tmp |= ~0UL >> (32-offset); 339*819833afSPeter Tyser * 340*819833afSPeter Tyser * but this would decrease preformance, so we change the 341*819833afSPeter Tyser * shift: 342*819833afSPeter Tyser */ 343*819833afSPeter Tyser tmp = *(p++); 344*819833afSPeter Tyser tmp |= __swab32(~0UL >> (32-offset)); 345*819833afSPeter Tyser if(size < 32) 346*819833afSPeter Tyser goto found_first; 347*819833afSPeter Tyser if(~tmp) 348*819833afSPeter Tyser goto found_middle; 349*819833afSPeter Tyser size -= 32; 350*819833afSPeter Tyser result += 32; 351*819833afSPeter Tyser } 352*819833afSPeter Tyser while(size & ~31UL) { 353*819833afSPeter Tyser if(~(tmp = *(p++))) 354*819833afSPeter Tyser goto found_middle; 355*819833afSPeter Tyser result += 32; 356*819833afSPeter Tyser size -= 32; 357*819833afSPeter Tyser } 358*819833afSPeter Tyser if(!size) 359*819833afSPeter Tyser return result; 360*819833afSPeter Tyser tmp = *p; 361*819833afSPeter Tyser 362*819833afSPeter Tyser found_first: 363*819833afSPeter Tyser /* tmp is little endian, so we would have to swab the shift, 364*819833afSPeter Tyser * see above. But then we have to swab tmp below for ffz, so 365*819833afSPeter Tyser * we might as well do this here. 366*819833afSPeter Tyser */ 367*819833afSPeter Tyser return result + ffz(__swab32(tmp) | (~0UL << size)); 368*819833afSPeter Tyser found_middle: 369*819833afSPeter Tyser return result + ffz(__swab32(tmp)); 370*819833afSPeter Tyser } 371*819833afSPeter Tyser 372*819833afSPeter Tyser /* Bitmap functions for the minix filesystem. */ 373*819833afSPeter Tyser #define minix_test_and_set_bit(nr,addr) test_and_set_bit(nr,addr) 374*819833afSPeter Tyser #define minix_set_bit(nr,addr) set_bit(nr,addr) 375*819833afSPeter Tyser #define minix_test_and_clear_bit(nr,addr) test_and_clear_bit(nr,addr) 376*819833afSPeter Tyser #define minix_test_bit(nr,addr) test_bit(nr,addr) 377*819833afSPeter Tyser #define minix_find_first_zero_bit(addr,size) find_first_zero_bit(addr,size) 378*819833afSPeter Tyser 379*819833afSPeter Tyser /** 380*819833afSPeter Tyser * hweightN - returns the hamming weight of a N-bit word 381*819833afSPeter Tyser * @x: the word to weigh 382*819833afSPeter Tyser * 383*819833afSPeter Tyser * The Hamming Weight of a number is the total number of bits set in it. 384*819833afSPeter Tyser */ 385*819833afSPeter Tyser 386*819833afSPeter Tyser #define hweight32(x) generic_hweight32(x) 387*819833afSPeter Tyser #define hweight16(x) generic_hweight16(x) 388*819833afSPeter Tyser #define hweight8(x) generic_hweight8(x) 389*819833afSPeter Tyser 390*819833afSPeter Tyser #endif /* __KERNEL__ */ 391*819833afSPeter Tyser 392*819833afSPeter Tyser #endif /* _MICROBLAZE_BITOPS_H */ 393