bitops.h 1.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061
  1. #ifndef _LINUX_BITOPS_H
  2. #define _LINUX_BITOPS_H
  3. #include <asm/types.h>
  4. /*
  5. * Include this here because some architectures need generic_ffs/fls in
  6. * scope
  7. */
  8. #include <asm/bitops.h>
  9. static __inline__ int get_bitmask_order(unsigned int count)
  10. {
  11. int order;
  12. order = fls(count);
  13. return order; /* We could be slightly more clever with -1 here... */
  14. }
  15. static __inline__ int get_count_order(unsigned int count)
  16. {
  17. int order;
  18. order = fls(count) - 1;
  19. if (count & (count - 1))
  20. order++;
  21. return order;
  22. }
  23. static inline unsigned long hweight_long(unsigned long w)
  24. {
  25. return sizeof(w) == 4 ? hweight32(w) : hweight64(w);
  26. }
  27. /**
  28. * rol32 - rotate a 32-bit value left
  29. * @word: value to rotate
  30. * @shift: bits to roll
  31. */
  32. static inline __u32 rol32(__u32 word, unsigned int shift)
  33. {
  34. return (word << shift) | (word >> (32 - shift));
  35. }
  36. /**
  37. * ror32 - rotate a 32-bit value right
  38. * @word: value to rotate
  39. * @shift: bits to roll
  40. */
  41. static inline __u32 ror32(__u32 word, unsigned int shift)
  42. {
  43. return (word >> shift) | (word << (32 - shift));
  44. }
  45. static inline unsigned fls_long(unsigned long l)
  46. {
  47. if (sizeof(l) == 4)
  48. return fls(l);
  49. return fls64(l);
  50. }
  51. #endif