arch_hweight.h 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * Based on arch/x86/include/asm/arch_hweight.h
  4. */
  5. #ifndef _ASM_RISCV_HWEIGHT_H
  6. #define _ASM_RISCV_HWEIGHT_H
  7. #include <asm/alternative-macros.h>
  8. #include <asm/hwcap.h>
  9. #if (BITS_PER_LONG == 64)
  10. #define CPOPW "cpopw "
  11. #elif (BITS_PER_LONG == 32)
  12. #define CPOPW "cpop "
  13. #else
  14. #error "Unexpected BITS_PER_LONG"
  15. #endif
  16. static __always_inline unsigned int __arch_hweight32(unsigned int w)
  17. {
  18. #ifdef CONFIG_RISCV_ISA_ZBB
  19. asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
  20. RISCV_ISA_EXT_ZBB, 1)
  21. : : : : legacy);
  22. asm (".option push\n"
  23. ".option arch,+zbb\n"
  24. CPOPW "%0, %1\n"
  25. ".option pop\n"
  26. : "=r" (w) : "r" (w) :);
  27. return w;
  28. legacy:
  29. #endif
  30. return __sw_hweight32(w);
  31. }
  32. static inline unsigned int __arch_hweight16(unsigned int w)
  33. {
  34. return __arch_hweight32(w & 0xffff);
  35. }
  36. static inline unsigned int __arch_hweight8(unsigned int w)
  37. {
  38. return __arch_hweight32(w & 0xff);
  39. }
  40. #if BITS_PER_LONG == 64
  41. static __always_inline unsigned long __arch_hweight64(__u64 w)
  42. {
  43. # ifdef CONFIG_RISCV_ISA_ZBB
  44. asm goto(ALTERNATIVE("j %l[legacy]", "nop", 0,
  45. RISCV_ISA_EXT_ZBB, 1)
  46. : : : : legacy);
  47. asm (".option push\n"
  48. ".option arch,+zbb\n"
  49. "cpop %0, %1\n"
  50. ".option pop\n"
  51. : "=r" (w) : "r" (w) :);
  52. return w;
  53. legacy:
  54. # endif
  55. return __sw_hweight64(w);
  56. }
  57. #else /* BITS_PER_LONG == 64 */
  58. static inline unsigned long __arch_hweight64(__u64 w)
  59. {
  60. return __arch_hweight32((u32)w) +
  61. __arch_hweight32((u32)(w >> 32));
  62. }
  63. #endif /* !(BITS_PER_LONG == 64) */
  64. #endif /* _ASM_RISCV_HWEIGHT_H */