bitops.c 1.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. // SPDX-License-Identifier: GPL-2.0
  2. /*
  3. * bitops.c: atomic operations which got too long to be inlined all over
  4. * the place.
  5. *
  6. * Copyright 1999 Philipp Rumpf (prumpf@tux.org)
  7. * Copyright 2000 Grant Grundler (grundler@cup.hp.com)
  8. */
  9. #include <linux/kernel.h>
  10. #include <linux/spinlock.h>
  11. #include <linux/atomic.h>
  12. #ifdef CONFIG_SMP
  13. arch_spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] __lock_aligned = {
  14. [0 ... (ATOMIC_HASH_SIZE-1)] = __ARCH_SPIN_LOCK_UNLOCKED
  15. };
  16. #endif
  17. #ifdef CONFIG_64BIT
  18. unsigned long notrace __xchg64(unsigned long x, volatile unsigned long *ptr)
  19. {
  20. unsigned long temp, flags;
  21. _atomic_spin_lock_irqsave(ptr, flags);
  22. temp = *ptr;
  23. *ptr = x;
  24. _atomic_spin_unlock_irqrestore(ptr, flags);
  25. return temp;
  26. }
  27. #endif
  28. unsigned long notrace __xchg32(int x, volatile int *ptr)
  29. {
  30. unsigned long flags;
  31. long temp;
  32. _atomic_spin_lock_irqsave(ptr, flags);
  33. temp = (long) *ptr; /* XXX - sign extension wanted? */
  34. *ptr = x;
  35. _atomic_spin_unlock_irqrestore(ptr, flags);
  36. return (unsigned long)temp;
  37. }
  38. unsigned long notrace __xchg8(char x, volatile char *ptr)
  39. {
  40. unsigned long flags;
  41. long temp;
  42. _atomic_spin_lock_irqsave(ptr, flags);
  43. temp = (long) *ptr; /* XXX - sign extension wanted? */
  44. *ptr = x;
  45. _atomic_spin_unlock_irqrestore(ptr, flags);
  46. return (unsigned long)temp;
  47. }
  48. #define CMPXCHG(T) \
  49. T notrace __cmpxchg_##T(volatile T *ptr, T old, T new) \
  50. { \
  51. unsigned long flags; \
  52. T prev; \
  53. \
  54. _atomic_spin_lock_irqsave(ptr, flags); \
  55. if ((prev = *ptr) == old) \
  56. *ptr = new; \
  57. _atomic_spin_unlock_irqrestore(ptr, flags); \
  58. return prev; \
  59. }
  60. CMPXCHG(u64)
  61. CMPXCHG(u32)
  62. CMPXCHG(u16)
  63. CMPXCHG(u8)