Commit 844bdf1b authored by Tobias Klauser's avatar Tobias Klauser Committed by David S. Miller

sparc64: Fix old style declaration GCC warnings

Fix [-Wold-style-declaration] GCC warnings by moving the inline keyword
before the return type.
Signed-off-by: default avatarTobias Klnuser <tklauser@distanz.ch>
Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent d624716b
...@@ -96,7 +96,7 @@ static inline void arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long fla ...@@ -96,7 +96,7 @@ static inline void arch_spin_lock_flags(arch_spinlock_t *lock, unsigned long fla
/* Multi-reader locks, these are much saner than the 32-bit Sparc ones... */ /* Multi-reader locks, these are much saner than the 32-bit Sparc ones... */
static void inline arch_read_lock(arch_rwlock_t *lock) static inline void arch_read_lock(arch_rwlock_t *lock)
{ {
unsigned long tmp1, tmp2; unsigned long tmp1, tmp2;
...@@ -119,7 +119,7 @@ static void inline arch_read_lock(arch_rwlock_t *lock) ...@@ -119,7 +119,7 @@ static void inline arch_read_lock(arch_rwlock_t *lock)
: "memory"); : "memory");
} }
static int inline arch_read_trylock(arch_rwlock_t *lock) static inline int arch_read_trylock(arch_rwlock_t *lock)
{ {
int tmp1, tmp2; int tmp1, tmp2;
...@@ -140,7 +140,7 @@ static int inline arch_read_trylock(arch_rwlock_t *lock) ...@@ -140,7 +140,7 @@ static int inline arch_read_trylock(arch_rwlock_t *lock)
return tmp1; return tmp1;
} }
static void inline arch_read_unlock(arch_rwlock_t *lock) static inline void arch_read_unlock(arch_rwlock_t *lock)
{ {
unsigned long tmp1, tmp2; unsigned long tmp1, tmp2;
...@@ -156,7 +156,7 @@ static void inline arch_read_unlock(arch_rwlock_t *lock) ...@@ -156,7 +156,7 @@ static void inline arch_read_unlock(arch_rwlock_t *lock)
: "memory"); : "memory");
} }
static void inline arch_write_lock(arch_rwlock_t *lock) static inline void arch_write_lock(arch_rwlock_t *lock)
{ {
unsigned long mask, tmp1, tmp2; unsigned long mask, tmp1, tmp2;
...@@ -181,7 +181,7 @@ static void inline arch_write_lock(arch_rwlock_t *lock) ...@@ -181,7 +181,7 @@ static void inline arch_write_lock(arch_rwlock_t *lock)
: "memory"); : "memory");
} }
static void inline arch_write_unlock(arch_rwlock_t *lock) static inline void arch_write_unlock(arch_rwlock_t *lock)
{ {
__asm__ __volatile__( __asm__ __volatile__(
" stw %%g0, [%0]" " stw %%g0, [%0]"
...@@ -190,7 +190,7 @@ static void inline arch_write_unlock(arch_rwlock_t *lock) ...@@ -190,7 +190,7 @@ static void inline arch_write_unlock(arch_rwlock_t *lock)
: "memory"); : "memory");
} }
static int inline arch_write_trylock(arch_rwlock_t *lock) static inline int arch_write_trylock(arch_rwlock_t *lock)
{ {
unsigned long mask, tmp1, tmp2, result; unsigned long mask, tmp1, tmp2, result;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment