Commit e746c994 authored by Linus Torvalds's avatar Linus Torvalds

Make "atomic_dec_and_lock()" a macro.

We rename the actual architecture-specific low-level implementation
to have a prepended underscore: "_atomic_dec_and_lock()".

This extra macro indirection is so that we can make the macro do
the lock context counting. That will be the next patch.
parent 9056b559
......@@ -185,7 +185,7 @@ EXPORT_SYMBOL(cpu_data);
EXPORT_SYMBOL(smp_num_cpus);
EXPORT_SYMBOL(smp_call_function);
EXPORT_SYMBOL(smp_call_function_on_cpu);
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#ifdef CONFIG_DEBUG_SPINLOCK
EXPORT_SYMBOL(_raw_spin_unlock);
EXPORT_SYMBOL(debug_spin_lock);
......
......@@ -9,10 +9,10 @@
#include <asm/atomic.h>
asm (".text \n\
.global atomic_dec_and_lock \n\
.ent atomic_dec_and_lock \n\
.global _atomic_dec_and_lock \n\
.ent _atomic_dec_and_lock \n\
.align 4 \n\
atomic_dec_and_lock: \n\
_atomic_dec_and_lock: \n\
.prologue 0 \n\
1: ldl_l $1, 0($16) \n\
subl $1, 1, $1 \n\
......@@ -28,7 +28,7 @@ atomic_dec_and_lock: \n\
.subsection 2 \n\
4: br 1b \n\
.previous \n\
.end atomic_dec_and_lock");
.end _atomic_dec_and_lock");
static int __attribute_used__
atomic_dec_and_lock_1(atomic_t *atomic, spinlock_t *lock)
......
......@@ -174,7 +174,7 @@ EXPORT_SYMBOL(memcmp);
EXPORT_SYMBOL(register_die_notifier);
#ifdef CONFIG_HAVE_DEC_LOCK
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif
EXPORT_SYMBOL(__PAGE_KERNEL);
......
......@@ -10,7 +10,7 @@
#include <linux/spinlock.h>
#include <asm/atomic.h>
int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
int counter;
int newcount;
......
......@@ -19,7 +19,7 @@
* acquiring the spinlock first.
*/
int
atomic_dec_and_lock (atomic_t *refcount, spinlock_t *lock)
_atomic_dec_and_lock (atomic_t *refcount, spinlock_t *lock)
{
int old, new;
......@@ -39,4 +39,4 @@ atomic_dec_and_lock (atomic_t *refcount, spinlock_t *lock)
return 0;
}
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
......@@ -28,7 +28,7 @@
*/
#ifndef ATOMIC_DEC_AND_LOCK
int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
int counter;
int newcount;
......@@ -51,5 +51,5 @@ int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
return 0;
}
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif /* ATOMIC_DEC_AND_LOCK */
......@@ -19,7 +19,7 @@
*/
#ifndef ATOMIC_DEC_AND_LOCK
int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
int counter;
int newcount;
......@@ -42,5 +42,5 @@ int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
return 0;
}
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif /* ATOMIC_DEC_AND_LOCK */
......@@ -28,7 +28,7 @@
*/
#ifndef ATOMIC_DEC_AND_LOCK
int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
int counter;
int newcount;
......@@ -51,5 +51,5 @@ int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
return 0;
}
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif /* ATOMIC_DEC_AND_LOCK */
......@@ -177,7 +177,7 @@ EXPORT_SYMBOL(__atomic_sub);
EXPORT_SYMBOL(__atomic64_add);
EXPORT_SYMBOL(__atomic64_sub);
#ifdef CONFIG_SMP
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif
/* Atomic bit operations. */
......
......@@ -25,8 +25,8 @@
* }
*/
.globl atomic_dec_and_lock
atomic_dec_and_lock: /* %o0 = counter, %o1 = lock */
.globl _atomic_dec_and_lock
_atomic_dec_and_lock: /* %o0 = counter, %o1 = lock */
loop1: lduw [%o0], %g5
subcc %g5, 1, %g7
be,pn %icc, start_to_zero
......
......@@ -193,7 +193,7 @@ EXPORT_SYMBOL(rwsem_down_write_failed_thunk);
EXPORT_SYMBOL(empty_zero_page);
#ifdef CONFIG_HAVE_DEC_LOCK
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif
EXPORT_SYMBOL(die_chain);
......
......@@ -10,7 +10,7 @@
#include <linux/spinlock.h>
#include <asm/atomic.h>
int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
int counter;
int newcount;
......
......@@ -80,7 +80,7 @@ int in_lock_functions(unsigned long addr);
#define in_lock_functions(ADDR) 0
#if !defined(CONFIG_PREEMPT) && !defined(CONFIG_DEBUG_SPINLOCK)
# define atomic_dec_and_lock(atomic,lock) atomic_dec_and_test(atomic)
# define _atomic_dec_and_lock(atomic,lock) atomic_dec_and_test(atomic)
# define ATOMIC_DEC_AND_LOCK
#endif
......@@ -464,9 +464,11 @@ extern int _metered_write_trylock(rwlock_t *lock);
/* "lock on reference count zero" */
#ifndef ATOMIC_DEC_AND_LOCK
#include <asm/atomic.h>
extern int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock);
extern int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock);
#endif
#define atomic_dec_and_lock(atomic,lock) _atomic_dec_and_lock(atomic,lock)
/*
* bit-based spin_lock()
*
......
......@@ -27,7 +27,7 @@
*/
#ifndef ATOMIC_DEC_AND_LOCK
int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
int _atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
{
spin_lock(lock);
if (atomic_dec_and_test(atomic))
......@@ -36,5 +36,5 @@ int atomic_dec_and_lock(atomic_t *atomic, spinlock_t *lock)
return 0;
}
EXPORT_SYMBOL(atomic_dec_and_lock);
EXPORT_SYMBOL(_atomic_dec_and_lock);
#endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment