summaryrefslogtreecommitdiffstats
path: root/kernel/arch/powerpc/mm/tlb_low_64e.S
diff options
context:
space:
mode:
Diffstat (limited to 'kernel/arch/powerpc/mm/tlb_low_64e.S')
-rw-r--r--kernel/arch/powerpc/mm/tlb_low_64e.S86
1 files changed, 70 insertions, 16 deletions
diff --git a/kernel/arch/powerpc/mm/tlb_low_64e.S b/kernel/arch/powerpc/mm/tlb_low_64e.S
index 89bf95bd6..29d6987c3 100644
--- a/kernel/arch/powerpc/mm/tlb_low_64e.S
+++ b/kernel/arch/powerpc/mm/tlb_low_64e.S
@@ -68,11 +68,17 @@ END_FTR_SECTION_IFSET(CPU_FTR_EMB_HV)
ld r14,PACAPGD(r13)
std r15,EX_TLB_R15(r12)
std r10,EX_TLB_CR(r12)
+#ifdef CONFIG_PPC_FSL_BOOK3E
+ std r7,EX_TLB_R7(r12)
+#endif
TLB_MISS_PROLOG_STATS
.endm
.macro tlb_epilog_bolted
ld r14,EX_TLB_CR(r12)
+#ifdef CONFIG_PPC_FSL_BOOK3E
+ ld r7,EX_TLB_R7(r12)
+#endif
ld r10,EX_TLB_R10(r12)
ld r11,EX_TLB_R11(r12)
ld r13,EX_TLB_R13(r12)
@@ -297,6 +303,7 @@ itlb_miss_fault_bolted:
* r13 = PACA
* r11 = tlb_per_core ptr
* r10 = crap (free to use)
+ * r7 = esel_next
*/
tlb_miss_common_e6500:
crmove cr2*4+2,cr0*4+2 /* cr2.eq != 0 if kernel address */
@@ -308,11 +315,11 @@ BEGIN_FTR_SECTION /* CPU_FTR_SMT */
*
* MAS6:IND should be already set based on MAS4
*/
-1: lbarx r15,0,r11
lhz r10,PACAPACAINDEX(r13)
- cmpdi r15,0
- cmpdi cr1,r15,1 /* set cr1.eq = 0 for non-recursive */
addi r10,r10,1
+ crclr cr1*4+eq /* set cr1.eq = 0 for non-recursive */
+1: lbarx r15,0,r11
+ cmpdi r15,0
bne 2f
stbcx. r10,0,r11
bne 1b
@@ -320,12 +327,16 @@ BEGIN_FTR_SECTION /* CPU_FTR_SMT */
.subsection 1
2: cmpd cr1,r15,r10 /* recursive lock due to mcheck/crit/etc? */
beq cr1,3b /* unlock will happen if cr1.eq = 0 */
- lbz r15,0(r11)
+10: lbz r15,0(r11)
cmpdi r15,0
- bne 2b
+ bne 10b
b 1b
.previous
+END_FTR_SECTION_IFSET(CPU_FTR_SMT)
+
+ lbz r7,TCD_ESEL_NEXT(r11)
+BEGIN_FTR_SECTION /* CPU_FTR_SMT */
/*
* Erratum A-008139 says that we can't use tlbwe to change
* an indirect entry in any way (including replacing or
@@ -334,8 +345,7 @@ BEGIN_FTR_SECTION /* CPU_FTR_SMT */
* with tlbilx before overwriting.
*/
- lbz r15,TCD_ESEL_NEXT(r11)
- rlwinm r10,r15,16,0xff0000
+ rlwinm r10,r7,16,0xff0000
oris r10,r10,MAS0_TLBSEL(1)@h
mtspr SPRN_MAS0,r10
isync
@@ -398,18 +408,18 @@ ALT_FTR_SECTION_END_IFSET(CPU_FTR_SMT)
rldicl r15,r16,64-PUD_SHIFT+3,64-PUD_INDEX_SIZE-3
clrrdi r15,r15,3
cmpdi cr0,r14,0
- bge tlb_miss_fault_e6500 /* Bad pgd entry or hugepage; bail */
+ bge tlb_miss_huge_e6500 /* Bad pgd entry or hugepage; bail */
ldx r14,r14,r15 /* grab pud entry */
rldicl r15,r16,64-PMD_SHIFT+3,64-PMD_INDEX_SIZE-3
clrrdi r15,r15,3
cmpdi cr0,r14,0
- bge tlb_miss_fault_e6500
+ bge tlb_miss_huge_e6500
ldx r14,r14,r15 /* Grab pmd entry */
mfspr r10,SPRN_MAS0
cmpdi cr0,r14,0
- bge tlb_miss_fault_e6500
+ bge tlb_miss_huge_e6500
/* Now we build the MAS for a 2M indirect page:
*
@@ -428,15 +438,15 @@ ALT_FTR_SECTION_END_IFSET(CPU_FTR_SMT)
clrrdi r15,r16,21 /* make EA 2M-aligned */
mtspr SPRN_MAS2,r15
- lbz r15,TCD_ESEL_NEXT(r11)
+tlb_miss_huge_done_e6500:
lbz r16,TCD_ESEL_MAX(r11)
lbz r14,TCD_ESEL_FIRST(r11)
- rlwimi r10,r15,16,0x00ff0000 /* insert esel_next into MAS0 */
- addi r15,r15,1 /* increment esel_next */
+ rlwimi r10,r7,16,0x00ff0000 /* insert esel_next into MAS0 */
+ addi r7,r7,1 /* increment esel_next */
mtspr SPRN_MAS0,r10
- cmpw r15,r16
- iseleq r15,r14,r15 /* if next == last use first */
- stb r15,TCD_ESEL_NEXT(r11)
+ cmpw r7,r16
+ iseleq r7,r14,r7 /* if next == last use first */
+ stb r7,TCD_ESEL_NEXT(r11)
tlbwe
@@ -456,6 +466,50 @@ END_FTR_SECTION_IFSET(CPU_FTR_SMT)
tlb_epilog_bolted
rfi
+tlb_miss_huge_e6500:
+ beq tlb_miss_fault_e6500
+ li r10,1
+ andi. r15,r14,HUGEPD_SHIFT_MASK@l /* r15 = psize */
+ rldimi r14,r10,63,0 /* Set PD_HUGE */
+ xor r14,r14,r15 /* Clear size bits */
+ ldx r14,0,r14
+
+ /*
+ * Now we build the MAS for a huge page.
+ *
+ * MAS 0 : ESEL needs to be filled by software round-robin
+ * - can be handled by indirect code
+ * MAS 1 : Need to clear IND and set TSIZE
+ * MAS 2,3+7: Needs to be redone similar to non-tablewalk handler
+ */
+
+ subi r15,r15,10 /* Convert psize to tsize */
+ mfspr r10,SPRN_MAS1
+ rlwinm r10,r10,0,~MAS1_IND
+ rlwimi r10,r15,MAS1_TSIZE_SHIFT,MAS1_TSIZE_MASK
+ mtspr SPRN_MAS1,r10
+
+ li r10,-0x400
+ sld r15,r10,r15 /* Generate mask based on size */
+ and r10,r16,r15
+ rldicr r15,r14,64-(PTE_RPN_SHIFT-PAGE_SHIFT),63-PAGE_SHIFT
+ rlwimi r10,r14,32-19,27,31 /* Insert WIMGE */
+ clrldi r15,r15,PAGE_SHIFT /* Clear crap at the top */
+ rlwimi r15,r14,32-8,22,25 /* Move in U bits */
+ mtspr SPRN_MAS2,r10
+ andi. r10,r14,_PAGE_DIRTY
+ rlwimi r15,r14,32-2,26,31 /* Move in BAP bits */
+
+ /* Mask out SW and UW if !DIRTY (XXX optimize this !) */
+ bne 1f
+ li r10,MAS3_SW|MAS3_UW
+ andc r15,r15,r10
+1:
+ mtspr SPRN_MAS7_MAS3,r15
+
+ mfspr r10,SPRN_MAS0
+ b tlb_miss_huge_done_e6500
+
tlb_miss_kernel_e6500:
ld r14,PACA_KERNELPGD(r13)
cmpldi cr1,r15,8 /* Check for vmalloc region */