atomic-irq.h 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263
  1. #ifndef __ASM_SH_ATOMIC_IRQ_H
  2. #define __ASM_SH_ATOMIC_IRQ_H
  3. #include <linux/irqflags.h>
  4. /*
  5. * To get proper branch prediction for the main line, we must branch
  6. * forward to code at the end of this object's .text section, then
  7. * branch back to restart the operation.
  8. */
  9. #define ATOMIC_OP(op, c_op) \
  10. static inline void atomic_##op(int i, atomic_t *v) \
  11. { \
  12. unsigned long flags; \
  13. \
  14. raw_local_irq_save(flags); \
  15. v->counter c_op i; \
  16. raw_local_irq_restore(flags); \
  17. }
  18. #define ATOMIC_OP_RETURN(op, c_op) \
  19. static inline int atomic_##op##_return(int i, atomic_t *v) \
  20. { \
  21. unsigned long temp, flags; \
  22. \
  23. raw_local_irq_save(flags); \
  24. temp = v->counter; \
  25. temp c_op i; \
  26. v->counter = temp; \
  27. raw_local_irq_restore(flags); \
  28. \
  29. return temp; \
  30. }
  31. #define ATOMIC_OPS(op, c_op) ATOMIC_OP(op, c_op) ATOMIC_OP_RETURN(op, c_op)
  32. ATOMIC_OPS(add, +=)
  33. ATOMIC_OPS(sub, -=)
  34. #undef ATOMIC_OPS
  35. #undef ATOMIC_OP_RETURN
  36. #undef ATOMIC_OP
  37. static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
  38. {
  39. unsigned long flags;
  40. raw_local_irq_save(flags);
  41. v->counter &= ~mask;
  42. raw_local_irq_restore(flags);
  43. }
  44. static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
  45. {
  46. unsigned long flags;
  47. raw_local_irq_save(flags);
  48. v->counter |= mask;
  49. raw_local_irq_restore(flags);
  50. }
  51. #endif /* __ASM_SH_ATOMIC_IRQ_H */