Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
L
linux
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
Analytics
Analytics
Repository
Value Stream
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Commits
Issue Boards
Open sidebar
nexedi
linux
Commits
ff72b7a6
Commit
ff72b7a6
authored
Jun 07, 2007
by
Ralf Baechle
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[MIPS] Fix smp barriers in test_and_{change,clear,set}_bit
Signed-off-by:
Ralf Baechle
<
ralf@linux-mips.org
>
parent
e10e0cc8
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
19 additions
and
32 deletions
+19
-32
include/asm-mips/bitops.h
include/asm-mips/bitops.h
+19
-32
No files found.
include/asm-mips/bitops.h
View file @
ff72b7a6
...
...
@@ -238,10 +238,11 @@ static inline int test_and_set_bit(unsigned long nr,
volatile
unsigned
long
*
addr
)
{
unsigned
short
bit
=
nr
&
SZLONG_MASK
;
unsigned
long
res
;
if
(
cpu_has_llsc
&&
R10000_LLSC_WAR
)
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
unsigned
long
temp
,
res
;
unsigned
long
temp
;
__asm__
__volatile__
(
" .set mips3
\n
"
...
...
@@ -254,11 +255,9 @@ static inline int test_and_set_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"r"
(
1UL
<<
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
!=
0
;
}
else
if
(
cpu_has_llsc
)
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
unsigned
long
temp
,
res
;
unsigned
long
temp
;
__asm__
__volatile__
(
" .set push
\n
"
...
...
@@ -277,25 +276,22 @@ static inline int test_and_set_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"r"
(
1UL
<<
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
!=
0
;
}
else
{
volatile
unsigned
long
*
a
=
addr
;
unsigned
long
mask
;
int
retval
;
unsigned
long
flags
;
a
+=
nr
>>
SZLONG_LOG
;
mask
=
1UL
<<
bit
;
raw_local_irq_save
(
flags
);
re
tval
=
(
mask
&
*
a
)
!=
0
;
re
s
=
(
mask
&
*
a
)
;
*
a
|=
mask
;
raw_local_irq_restore
(
flags
);
return
retval
;
}
smp_mb
();
return
res
!=
0
;
}
/*
...
...
@@ -310,6 +306,7 @@ static inline int test_and_clear_bit(unsigned long nr,
volatile
unsigned
long
*
addr
)
{
unsigned
short
bit
=
nr
&
SZLONG_MASK
;
unsigned
long
res
;
if
(
cpu_has_llsc
&&
R10000_LLSC_WAR
)
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
...
...
@@ -327,12 +324,10 @@ static inline int test_and_clear_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"r"
(
1UL
<<
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
!=
0
;
#ifdef CONFIG_CPU_MIPSR2
}
else
if
(
__builtin_constant_p
(
nr
))
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
unsigned
long
temp
,
res
;
unsigned
long
temp
;
__asm__
__volatile__
(
"1: "
__LL
"%0, %1 # test_and_clear_bit
\n
"
...
...
@@ -346,12 +341,10 @@ static inline int test_and_clear_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"ri"
(
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
;
#endif
}
else
if
(
cpu_has_llsc
)
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
unsigned
long
temp
,
res
;
unsigned
long
temp
;
__asm__
__volatile__
(
" .set push
\n
"
...
...
@@ -371,25 +364,22 @@ static inline int test_and_clear_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"r"
(
1UL
<<
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
!=
0
;
}
else
{
volatile
unsigned
long
*
a
=
addr
;
unsigned
long
mask
;
int
retval
;
unsigned
long
flags
;
a
+=
nr
>>
SZLONG_LOG
;
mask
=
1UL
<<
bit
;
raw_local_irq_save
(
flags
);
re
tval
=
(
mask
&
*
a
)
!=
0
;
re
s
=
(
mask
&
*
a
)
;
*
a
&=
~
mask
;
raw_local_irq_restore
(
flags
);
return
retval
;
}
smp_mb
();
return
res
!=
0
;
}
/*
...
...
@@ -404,10 +394,11 @@ static inline int test_and_change_bit(unsigned long nr,
volatile
unsigned
long
*
addr
)
{
unsigned
short
bit
=
nr
&
SZLONG_MASK
;
unsigned
long
res
;
if
(
cpu_has_llsc
&&
R10000_LLSC_WAR
)
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
unsigned
long
temp
,
res
;
unsigned
long
temp
;
__asm__
__volatile__
(
" .set mips3
\n
"
...
...
@@ -420,11 +411,9 @@ static inline int test_and_change_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"r"
(
1UL
<<
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
!=
0
;
}
else
if
(
cpu_has_llsc
)
{
unsigned
long
*
m
=
((
unsigned
long
*
)
addr
)
+
(
nr
>>
SZLONG_LOG
);
unsigned
long
temp
,
res
;
unsigned
long
temp
;
__asm__
__volatile__
(
" .set push
\n
"
...
...
@@ -443,24 +432,22 @@ static inline int test_and_change_bit(unsigned long nr,
:
"=&r"
(
temp
),
"=m"
(
*
m
),
"=&r"
(
res
)
:
"r"
(
1UL
<<
bit
),
"m"
(
*
m
)
:
"memory"
);
return
res
!=
0
;
}
else
{
volatile
unsigned
long
*
a
=
addr
;
unsigned
long
mask
,
retval
;
unsigned
long
mask
;
unsigned
long
flags
;
a
+=
nr
>>
SZLONG_LOG
;
mask
=
1UL
<<
bit
;
raw_local_irq_save
(
flags
);
re
tval
=
(
mask
&
*
a
)
!=
0
;
re
s
=
(
mask
&
*
a
)
;
*
a
^=
mask
;
raw_local_irq_restore
(
flags
);
return
retval
;
}
smp_mb
();
return
res
!=
0
;
}
#include <asm-generic/bitops/non-atomic.h>
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment