Commit 459b6e62 authored by David S. Miller's avatar David S. Miller

[SPARC64]: Fix some SUN4V TLB miss bugs.

Code patching did not sign extend negative branch
offsets correctly.

Kernel TLB miss path needs patching and %g4 register
preservation in order to handle SUN4V correctly.
Signed-off-by: default avatarDavid S. Miller <davem@davemloft.net>
parent fd05068d
...@@ -48,7 +48,7 @@ kvmap_itlb_tsb_miss: ...@@ -48,7 +48,7 @@ kvmap_itlb_tsb_miss:
kvmap_itlb_vmalloc_addr: kvmap_itlb_vmalloc_addr:
KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath) KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
KTSB_LOCK_TAG(%g1, %g2, %g4) KTSB_LOCK_TAG(%g1, %g2, %g7)
/* Load and check PTE. */ /* Load and check PTE. */
ldxa [%g5] ASI_PHYS_USE_EC, %g5 ldxa [%g5] ASI_PHYS_USE_EC, %g5
...@@ -60,8 +60,29 @@ kvmap_itlb_vmalloc_addr: ...@@ -60,8 +60,29 @@ kvmap_itlb_vmalloc_addr:
/* fallthrough to TLB load */ /* fallthrough to TLB load */
kvmap_itlb_load: kvmap_itlb_load:
stxa %g5, [%g0] ASI_ITLB_DATA_IN ! Reload TLB
661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
retry retry
.section .sun4v_2insn_patch, "ax"
.word 661b
nop
nop
.previous
/* For sun4v the ASI_ITLB_DATA_IN store and the retry
* instruction get nop'd out and we get here to branch
* to the sun4v tlb load code. The registers are setup
* as follows:
*
* %g4: vaddr
* %g5: PTE
* %g6: TAG
*
* The sun4v TLB load wants the PTE in %g3 so we fix that
* up here.
*/
ba,pt %xcc, sun4v_itlb_load
mov %g5, %g3
kvmap_itlb_longpath: kvmap_itlb_longpath:
...@@ -80,7 +101,7 @@ kvmap_itlb_longpath: ...@@ -80,7 +101,7 @@ kvmap_itlb_longpath:
kvmap_itlb_obp: kvmap_itlb_obp:
OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath) OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
KTSB_LOCK_TAG(%g1, %g2, %g4) KTSB_LOCK_TAG(%g1, %g2, %g7)
KTSB_WRITE(%g1, %g5, %g6) KTSB_WRITE(%g1, %g5, %g6)
...@@ -90,7 +111,7 @@ kvmap_itlb_obp: ...@@ -90,7 +111,7 @@ kvmap_itlb_obp:
kvmap_dtlb_obp: kvmap_dtlb_obp:
OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath) OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
KTSB_LOCK_TAG(%g1, %g2, %g4) KTSB_LOCK_TAG(%g1, %g2, %g7)
KTSB_WRITE(%g1, %g5, %g6) KTSB_WRITE(%g1, %g5, %g6)
...@@ -129,7 +150,7 @@ kvmap_linear_patch: ...@@ -129,7 +150,7 @@ kvmap_linear_patch:
kvmap_dtlb_vmalloc_addr: kvmap_dtlb_vmalloc_addr:
KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath) KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
KTSB_LOCK_TAG(%g1, %g2, %g4) KTSB_LOCK_TAG(%g1, %g2, %g7)
/* Load and check PTE. */ /* Load and check PTE. */
ldxa [%g5] ASI_PHYS_USE_EC, %g5 ldxa [%g5] ASI_PHYS_USE_EC, %g5
...@@ -141,8 +162,29 @@ kvmap_dtlb_vmalloc_addr: ...@@ -141,8 +162,29 @@ kvmap_dtlb_vmalloc_addr:
/* fallthrough to TLB load */ /* fallthrough to TLB load */
kvmap_dtlb_load: kvmap_dtlb_load:
stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
retry retry
.section .sun4v_2insn_patch, "ax"
.word 661b
nop
nop
.previous
/* For sun4v the ASI_DTLB_DATA_IN store and the retry
* instruction get nop'd out and we get here to branch
* to the sun4v tlb load code. The registers are setup
* as follows:
*
* %g4: vaddr
* %g5: PTE
* %g6: TAG
*
* The sun4v TLB load wants the PTE in %g3 so we fix that
* up here.
*/
ba,pt %xcc, sun4v_dtlb_load
mov %g5, %g3
kvmap_dtlb_nonlinear: kvmap_dtlb_nonlinear:
/* Catch kernel NULL pointer derefs. */ /* Catch kernel NULL pointer derefs. */
...@@ -185,10 +227,17 @@ kvmap_dtlb_longpath: ...@@ -185,10 +227,17 @@ kvmap_dtlb_longpath:
nop nop
.previous .previous
rdpr %tl, %g4 rdpr %tl, %g3
cmp %g4, 1 cmp %g3, 1
mov TLB_TAG_ACCESS, %g4
661: mov TLB_TAG_ACCESS, %g4
ldxa [%g4] ASI_DMMU, %g5 ldxa [%g4] ASI_DMMU, %g5
.section .sun4v_2insn_patch, "ax"
.word 661b
mov %g4, %g5
nop
.previous
be,pt %xcc, sparc64_realfault_common be,pt %xcc, sparc64_realfault_common
mov FAULT_CODE_DTLB, %g4 mov FAULT_CODE_DTLB, %g4
ba,pt %xcc, winfix_trampoline ba,pt %xcc, winfix_trampoline
......
...@@ -96,7 +96,7 @@ sun4v_dtlb_miss: ...@@ -96,7 +96,7 @@ sun4v_dtlb_miss:
/* Load UTSB reg into %g1. */ /* Load UTSB reg into %g1. */
mov SCRATCHPAD_UTSBREG1, %g1 mov SCRATCHPAD_UTSBREG1, %g1
ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1 ldxa [%g1] ASI_SCRATCHPAD, %g1
LOAD_DTLB_INFO(%g2, %g4, %g5) LOAD_DTLB_INFO(%g2, %g4, %g5)
COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v) COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
...@@ -149,13 +149,18 @@ sun4v_dtlb_prot: ...@@ -149,13 +149,18 @@ sun4v_dtlb_prot:
* SCRATCHPAD_MMU_MISS contents in %g2. * SCRATCHPAD_MMU_MISS contents in %g2.
*/ */
sun4v_itsb_miss: sun4v_itsb_miss:
ba,pt %xcc, sun4v_tsb_miss_common mov SCRATCHPAD_UTSBREG1, %g1
ldxa [%g1] ASI_SCRATCHPAD, %g1
brz,pn %g5, kvmap_itlb_4v
mov FAULT_CODE_ITLB, %g3 mov FAULT_CODE_ITLB, %g3
/* Called from trap table with TAG TARGET placed into /* Called from trap table with TAG TARGET placed into
* %g6 and SCRATCHPAD_UTSBREG1 contents in %g1. * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
*/ */
sun4v_dtsb_miss: sun4v_dtsb_miss:
mov SCRATCHPAD_UTSBREG1, %g1
ldxa [%g1] ASI_SCRATCHPAD, %g1
brz,pn %g5, kvmap_dtlb_4v
mov FAULT_CODE_DTLB, %g3 mov FAULT_CODE_DTLB, %g3
/* Create TSB pointer into %g1. This is something like: /* Create TSB pointer into %g1. This is something like:
...@@ -312,7 +317,8 @@ sun4v_stdfmna: ...@@ -312,7 +317,8 @@ sun4v_stdfmna:
or %g2, %lo(OLD), %g2; \ or %g2, %lo(OLD), %g2; \
sub %g1, %g2, %g1; \ sub %g1, %g2, %g1; \
sethi %hi(BRANCH_ALWAYS), %g3; \ sethi %hi(BRANCH_ALWAYS), %g3; \
srl %g1, 2, %g1; \ sll %g1, 11, %g1; \
srl %g1, 11 + 2, %g1; \
or %g3, %lo(BRANCH_ALWAYS), %g3; \ or %g3, %lo(BRANCH_ALWAYS), %g3; \
or %g3, %g1, %g3; \ or %g3, %g1, %g3; \
stw %g3, [%g2]; \ stw %g3, [%g2]; \
......
...@@ -186,9 +186,9 @@ ...@@ -186,9 +186,9 @@
ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5; \ ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5; \
srlx %g4, 22, %g7; \ srlx %g4, 22, %g7; \
sllx %g5, 48, %g6; \ sllx %g5, 48, %g6; \
brz,pn %g5, kvmap_itlb_4v; \ ba,pt %xcc, sun4v_itsb_miss; \
or %g6, %g7, %g6; \ or %g6, %g7, %g6; \
ba,a,pt %xcc, sun4v_itsb_miss; nop;
#define SUN4V_DTSB_MISS \ #define SUN4V_DTSB_MISS \
ldxa [%g0] ASI_SCRATCHPAD, %g2; \ ldxa [%g0] ASI_SCRATCHPAD, %g2; \
...@@ -196,9 +196,9 @@ ...@@ -196,9 +196,9 @@
ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5; \ ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5; \
srlx %g4, 22, %g7; \ srlx %g4, 22, %g7; \
sllx %g5, 48, %g6; \ sllx %g5, 48, %g6; \
brz,pn %g5, kvmap_dtlb_4v; \ ba,pt %xcc, sun4v_dtsb_miss; \
or %g6, %g7, %g6; \ or %g6, %g7, %g6; \
ba,a,pt %xcc, sun4v_dtsb_miss; nop;
/* Before touching these macros, you owe it to yourself to go and /* Before touching these macros, you owe it to yourself to go and
* see how arch/sparc64/kernel/winfixup.S works... -DaveM * see how arch/sparc64/kernel/winfixup.S works... -DaveM
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment