123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182 |
- /*
- * atomic32.c: 32-bit atomic_t implementation
- *
- * Copyright (C) 2004 Keith M Wesolowski
- * Copyright (C) 2007 Kyle McMartin
- *
- * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
- */
- #include <linux/atomic.h>
- #include <linux/spinlock.h>
- #include <linux/module.h>
- #ifdef CONFIG_SMP
- #define ATOMIC_HASH_SIZE 4
- #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])
- spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
- [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
- };
- #else /* SMP */
- static DEFINE_SPINLOCK(dummy);
- #define ATOMIC_HASH_SIZE 1
- #define ATOMIC_HASH(a) (&dummy)
- #endif /* SMP */
- #define ATOMIC_OP_RETURN(op, c_op) \
- int atomic_##op##_return(int i, atomic_t *v) \
- { \
- int ret; \
- unsigned long flags; \
- spin_lock_irqsave(ATOMIC_HASH(v), flags); \
- \
- ret = (v->counter c_op i); \
- \
- spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
- return ret; \
- } \
- EXPORT_SYMBOL(atomic_##op##_return);
- #define ATOMIC_OP(op, c_op) \
- void atomic_##op(int i, atomic_t *v) \
- { \
- unsigned long flags; \
- spin_lock_irqsave(ATOMIC_HASH(v), flags); \
- \
- v->counter c_op i; \
- \
- spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \
- } \
- EXPORT_SYMBOL(atomic_##op);
- ATOMIC_OP_RETURN(add, +=)
- ATOMIC_OP(and, &=)
- ATOMIC_OP(or, |=)
- ATOMIC_OP(xor, ^=)
- #undef ATOMIC_OP_RETURN
- #undef ATOMIC_OP
- int atomic_xchg(atomic_t *v, int new)
- {
- int ret;
- unsigned long flags;
- spin_lock_irqsave(ATOMIC_HASH(v), flags);
- ret = v->counter;
- v->counter = new;
- spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
- return ret;
- }
- EXPORT_SYMBOL(atomic_xchg);
- int atomic_cmpxchg(atomic_t *v, int old, int new)
- {
- int ret;
- unsigned long flags;
- spin_lock_irqsave(ATOMIC_HASH(v), flags);
- ret = v->counter;
- if (likely(ret == old))
- v->counter = new;
- spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
- return ret;
- }
- EXPORT_SYMBOL(atomic_cmpxchg);
- int __atomic_add_unless(atomic_t *v, int a, int u)
- {
- int ret;
- unsigned long flags;
- spin_lock_irqsave(ATOMIC_HASH(v), flags);
- ret = v->counter;
- if (ret != u)
- v->counter += a;
- spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
- return ret;
- }
- EXPORT_SYMBOL(__atomic_add_unless);
- /* Atomic operations are already serializing */
- void atomic_set(atomic_t *v, int i)
- {
- unsigned long flags;
- spin_lock_irqsave(ATOMIC_HASH(v), flags);
- v->counter = i;
- spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
- }
- EXPORT_SYMBOL(atomic_set);
- unsigned long ___set_bit(unsigned long *addr, unsigned long mask)
- {
- unsigned long old, flags;
- spin_lock_irqsave(ATOMIC_HASH(addr), flags);
- old = *addr;
- *addr = old | mask;
- spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
- return old & mask;
- }
- EXPORT_SYMBOL(___set_bit);
- unsigned long ___clear_bit(unsigned long *addr, unsigned long mask)
- {
- unsigned long old, flags;
- spin_lock_irqsave(ATOMIC_HASH(addr), flags);
- old = *addr;
- *addr = old & ~mask;
- spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
- return old & mask;
- }
- EXPORT_SYMBOL(___clear_bit);
- unsigned long ___change_bit(unsigned long *addr, unsigned long mask)
- {
- unsigned long old, flags;
- spin_lock_irqsave(ATOMIC_HASH(addr), flags);
- old = *addr;
- *addr = old ^ mask;
- spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);
- return old & mask;
- }
- EXPORT_SYMBOL(___change_bit);
- unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new)
- {
- unsigned long flags;
- u32 prev;
- spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
- if ((prev = *ptr) == old)
- *ptr = new;
- spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
- return (unsigned long)prev;
- }
- EXPORT_SYMBOL(__cmpxchg_u32);
- unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
- {
- unsigned long flags;
- u32 prev;
- spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
- prev = *ptr;
- *ptr = new;
- spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);
- return (unsigned long)prev;
- }
- EXPORT_SYMBOL(__xchg_u32);
|