atomic32.c 4.31 KB
Newer Older
1
// SPDX-License-Identifier: GPL-2.0
Linus Torvalds's avatar
Linus Torvalds committed
2 3 4 5
/*
 * atomic32.c: 32-bit atomic_t implementation
 *
 * Copyright (C) 2004 Keith M Wesolowski
6
 * Copyright (C) 2007 Kyle McMartin
Linus Torvalds's avatar
Linus Torvalds committed
7 8 9 10
 * 
 * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf
 */

Arun Sharma's avatar
Arun Sharma committed
11
#include <linux/atomic.h>
Linus Torvalds's avatar
Linus Torvalds committed
12 13 14 15 16 17 18 19
#include <linux/spinlock.h>
#include <linux/module.h>

#ifdef CONFIG_SMP
#define ATOMIC_HASH_SIZE	4
#define ATOMIC_HASH(a)	(&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)])

spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = {
20
	[0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash)
Linus Torvalds's avatar
Linus Torvalds committed
21 22 23 24
};

#else /* SMP */

25
static DEFINE_SPINLOCK(dummy);
Linus Torvalds's avatar
Linus Torvalds committed
26 27 28 29 30
#define ATOMIC_HASH_SIZE	1
#define ATOMIC_HASH(a)		(&dummy)

#endif /* SMP */

31
#define ATOMIC_FETCH_OP(op, c_op)					\
32
int arch_atomic_fetch_##op(int i, atomic_t *v)				\
33 34 35 36 37
{									\
	int ret;							\
	unsigned long flags;						\
	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
									\
38 39
	ret = v->counter;						\
	v->counter c_op i;						\
40 41 42 43
									\
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
	return ret;							\
}									\
44
EXPORT_SYMBOL(arch_atomic_fetch_##op);
45

46
#define ATOMIC_OP_RETURN(op, c_op)					\
47
int arch_atomic_##op##_return(int i, atomic_t *v)			\
48
{									\
49
	int ret;							\
50 51 52
	unsigned long flags;						\
	spin_lock_irqsave(ATOMIC_HASH(v), flags);			\
									\
53
	ret = (v->counter c_op i);					\
54 55
									\
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);			\
56
	return ret;							\
57
}									\
58
EXPORT_SYMBOL(arch_atomic_##op##_return);
59 60

ATOMIC_OP_RETURN(add, +=)
61

62 63 64 65 66 67
ATOMIC_FETCH_OP(add, +=)
ATOMIC_FETCH_OP(and, &=)
ATOMIC_FETCH_OP(or, |=)
ATOMIC_FETCH_OP(xor, ^=)

#undef ATOMIC_FETCH_OP
68
#undef ATOMIC_OP_RETURN
Linus Torvalds's avatar
Linus Torvalds committed
69

70
int arch_atomic_xchg(atomic_t *v, int new)
71 72 73 74 75 76 77 78 79 80
{
	int ret;
	unsigned long flags;

	spin_lock_irqsave(ATOMIC_HASH(v), flags);
	ret = v->counter;
	v->counter = new;
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
	return ret;
}
81
EXPORT_SYMBOL(arch_atomic_xchg);
82

83
int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
Linus Torvalds's avatar
Linus Torvalds committed
84
{
Nick Piggin's avatar
Nick Piggin committed
85
	int ret;
Linus Torvalds's avatar
Linus Torvalds committed
86 87
	unsigned long flags;

Nick Piggin's avatar
Nick Piggin committed
88 89 90 91
	spin_lock_irqsave(ATOMIC_HASH(v), flags);
	ret = v->counter;
	if (likely(ret == old))
		v->counter = new;
Linus Torvalds's avatar
Linus Torvalds committed
92 93

	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
Nick Piggin's avatar
Nick Piggin committed
94
	return ret;
Linus Torvalds's avatar
Linus Torvalds committed
95
}
96
EXPORT_SYMBOL(arch_atomic_cmpxchg);
Linus Torvalds's avatar
Linus Torvalds committed
97

98
int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
Nick Piggin's avatar
Nick Piggin committed
99 100 101 102 103 104 105 106 107
{
	int ret;
	unsigned long flags;

	spin_lock_irqsave(ATOMIC_HASH(v), flags);
	ret = v->counter;
	if (ret != u)
		v->counter += a;
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
108
	return ret;
Nick Piggin's avatar
Nick Piggin committed
109
}
110
EXPORT_SYMBOL(arch_atomic_fetch_add_unless);
Nick Piggin's avatar
Nick Piggin committed
111 112

/* Atomic operations are already serializing */
113
void arch_atomic_set(atomic_t *v, int i)
Nick Piggin's avatar
Nick Piggin committed
114 115
{
	unsigned long flags;
Linus Torvalds's avatar
Linus Torvalds committed
116

Nick Piggin's avatar
Nick Piggin committed
117 118 119 120
	spin_lock_irqsave(ATOMIC_HASH(v), flags);
	v->counter = i;
	spin_unlock_irqrestore(ATOMIC_HASH(v), flags);
}
121
EXPORT_SYMBOL(arch_atomic_set);
122

123
unsigned long sp32___set_bit(unsigned long *addr, unsigned long mask)
124 125 126 127 128 129 130 131 132 133
{
	unsigned long old, flags;

	spin_lock_irqsave(ATOMIC_HASH(addr), flags);
	old = *addr;
	*addr = old | mask;
	spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);

	return old & mask;
}
134
EXPORT_SYMBOL(sp32___set_bit);
135

136
unsigned long sp32___clear_bit(unsigned long *addr, unsigned long mask)
137 138 139 140 141 142 143 144 145 146
{
	unsigned long old, flags;

	spin_lock_irqsave(ATOMIC_HASH(addr), flags);
	old = *addr;
	*addr = old & ~mask;
	spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);

	return old & mask;
}
147
EXPORT_SYMBOL(sp32___clear_bit);
148

149
unsigned long sp32___change_bit(unsigned long *addr, unsigned long mask)
150 151 152 153 154 155 156 157 158 159
{
	unsigned long old, flags;

	spin_lock_irqsave(ATOMIC_HASH(addr), flags);
	old = *addr;
	*addr = old ^ mask;
	spin_unlock_irqrestore(ATOMIC_HASH(addr), flags);

	return old & mask;
}
160
EXPORT_SYMBOL(sp32___change_bit);
161

Al Viro's avatar
Al Viro committed
162 163 164 165 166 167 168 169 170 171 172 173 174 175
#define CMPXCHG(T)						\
	T __cmpxchg_##T(volatile T *ptr, T old, T new)		\
	{							\
		unsigned long flags;				\
		T prev;						\
								\
		spin_lock_irqsave(ATOMIC_HASH(ptr), flags);	\
		if ((prev = *ptr) == old)			\
			*ptr = new;				\
		spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);\
								\
		return prev;					\
	}

176 177
CMPXCHG(u8)
CMPXCHG(u16)
Al Viro's avatar
Al Viro committed
178 179
CMPXCHG(u32)
CMPXCHG(u64)
180 181
EXPORT_SYMBOL(__cmpxchg_u8);
EXPORT_SYMBOL(__cmpxchg_u16);
182
EXPORT_SYMBOL(__cmpxchg_u32);
183 184
EXPORT_SYMBOL(__cmpxchg_u64);

185 186 187 188 189 190 191 192 193 194 195 196 197
unsigned long __xchg_u32(volatile u32 *ptr, u32 new)
{
	unsigned long flags;
	u32 prev;

	spin_lock_irqsave(ATOMIC_HASH(ptr), flags);
	prev = *ptr;
	*ptr = new;
	spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags);

	return (unsigned long)prev;
}
EXPORT_SYMBOL(__xchg_u32);