atomic.h 4.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199
  1. /* MN10300 Atomic counter operations
  2. *
  3. * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
  4. * Written by David Howells (dhowells@redhat.com)
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public Licence
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the Licence, or (at your option) any later version.
  10. */
  11. #ifndef _ASM_ATOMIC_H
  12. #define _ASM_ATOMIC_H
  13. #include <asm/irqflags.h>
  14. #include <asm/cmpxchg.h>
  15. #include <asm/barrier.h>
  16. #ifndef CONFIG_SMP
  17. #include <asm-generic/atomic.h>
  18. #else
  19. /*
  20. * Atomic operations that C can't guarantee us. Useful for
  21. * resource counting etc..
  22. */
  23. #define ATOMIC_INIT(i) { (i) }
  24. #ifdef __KERNEL__
  25. /**
  26. * atomic_read - read atomic variable
  27. * @v: pointer of type atomic_t
  28. *
  29. * Atomically reads the value of @v. Note that the guaranteed
  30. */
  31. #define atomic_read(v) (ACCESS_ONCE((v)->counter))
  32. /**
  33. * atomic_set - set atomic variable
  34. * @v: pointer of type atomic_t
  35. * @i: required value
  36. *
  37. * Atomically sets the value of @v to @i. Note that the guaranteed
  38. */
  39. #define atomic_set(v, i) (((v)->counter) = (i))
  40. #define ATOMIC_OP(op) \
  41. static inline void atomic_##op(int i, atomic_t *v) \
  42. { \
  43. int retval, status; \
  44. \
  45. asm volatile( \
  46. "1: mov %4,(_AAR,%3) \n" \
  47. " mov (_ADR,%3),%1 \n" \
  48. " " #op " %5,%1 \n" \
  49. " mov %1,(_ADR,%3) \n" \
  50. " mov (_ADR,%3),%0 \n" /* flush */ \
  51. " mov (_ASR,%3),%0 \n" \
  52. " or %0,%0 \n" \
  53. " bne 1b \n" \
  54. : "=&r"(status), "=&r"(retval), "=m"(v->counter) \
  55. : "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i) \
  56. : "memory", "cc"); \
  57. }
  58. #define ATOMIC_OP_RETURN(op) \
  59. static inline int atomic_##op##_return(int i, atomic_t *v) \
  60. { \
  61. int retval, status; \
  62. \
  63. asm volatile( \
  64. "1: mov %4,(_AAR,%3) \n" \
  65. " mov (_ADR,%3),%1 \n" \
  66. " " #op " %5,%1 \n" \
  67. " mov %1,(_ADR,%3) \n" \
  68. " mov (_ADR,%3),%0 \n" /* flush */ \
  69. " mov (_ASR,%3),%0 \n" \
  70. " or %0,%0 \n" \
  71. " bne 1b \n" \
  72. : "=&r"(status), "=&r"(retval), "=m"(v->counter) \
  73. : "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i) \
  74. : "memory", "cc"); \
  75. return retval; \
  76. }
  77. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
  78. ATOMIC_OPS(add)
  79. ATOMIC_OPS(sub)
  80. #undef ATOMIC_OPS
  81. #undef ATOMIC_OP_RETURN
  82. #undef ATOMIC_OP
  83. static inline int atomic_add_negative(int i, atomic_t *v)
  84. {
  85. return atomic_add_return(i, v) < 0;
  86. }
  87. static inline void atomic_inc(atomic_t *v)
  88. {
  89. atomic_add_return(1, v);
  90. }
  91. static inline void atomic_dec(atomic_t *v)
  92. {
  93. atomic_sub_return(1, v);
  94. }
  95. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  96. #define atomic_inc_return(v) atomic_add_return(1, (v))
  97. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  98. #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
  99. #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
  100. #define __atomic_add_unless(v, a, u) \
  101. ({ \
  102. int c, old; \
  103. c = atomic_read(v); \
  104. while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
  105. c = old; \
  106. c; \
  107. })
  108. #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
  109. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  110. /**
  111. * atomic_clear_mask - Atomically clear bits in memory
  112. * @mask: Mask of the bits to be cleared
  113. * @v: pointer to word in memory
  114. *
  115. * Atomically clears the bits set in mask from the memory word specified.
  116. */
  117. static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
  118. {
  119. #ifdef CONFIG_SMP
  120. int status;
  121. asm volatile(
  122. "1: mov %3,(_AAR,%2) \n"
  123. " mov (_ADR,%2),%0 \n"
  124. " and %4,%0 \n"
  125. " mov %0,(_ADR,%2) \n"
  126. " mov (_ADR,%2),%0 \n" /* flush */
  127. " mov (_ASR,%2),%0 \n"
  128. " or %0,%0 \n"
  129. " bne 1b \n"
  130. : "=&r"(status), "=m"(*addr)
  131. : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(~mask)
  132. : "memory", "cc");
  133. #else
  134. unsigned long flags;
  135. mask = ~mask;
  136. flags = arch_local_cli_save();
  137. *addr &= mask;
  138. arch_local_irq_restore(flags);
  139. #endif
  140. }
  141. /**
  142. * atomic_set_mask - Atomically set bits in memory
  143. * @mask: Mask of the bits to be set
  144. * @v: pointer to word in memory
  145. *
  146. * Atomically sets the bits set in mask from the memory word specified.
  147. */
  148. static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)
  149. {
  150. #ifdef CONFIG_SMP
  151. int status;
  152. asm volatile(
  153. "1: mov %3,(_AAR,%2) \n"
  154. " mov (_ADR,%2),%0 \n"
  155. " or %4,%0 \n"
  156. " mov %0,(_ADR,%2) \n"
  157. " mov (_ADR,%2),%0 \n" /* flush */
  158. " mov (_ASR,%2),%0 \n"
  159. " or %0,%0 \n"
  160. " bne 1b \n"
  161. : "=&r"(status), "=m"(*addr)
  162. : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(mask)
  163. : "memory", "cc");
  164. #else
  165. unsigned long flags;
  166. flags = arch_local_cli_save();
  167. *addr |= mask;
  168. arch_local_irq_restore(flags);
  169. #endif
  170. }
  171. #endif /* __KERNEL__ */
  172. #endif /* CONFIG_SMP */
  173. #endif /* _ASM_ATOMIC_H */