linux-cvs-patches
[Top] [All Lists]

CVS Update@linux-mips.org: linux

To: linux-cvs-patches@linux-mips.org
Subject: CVS Update@linux-mips.org: linux
From: ths@linux-mips.org
Date: Mon, 06 Jun 2005 15:46:28 +0100
Reply-to: linux-mips@linux-mips.org
Sender: linux-cvs-patches-bounce@linux-mips.org
CVSROOT:        /home/cvs
Module name:    linux
Changes by:     ths@ftp.linux-mips.org  05/06/06 15:46:22

Modified files:
        include/asm-mips: Tag: linux_2_4 hazards.h 
        include/asm-mips64: Tag: linux_2_4 hazards.h 
        arch/mips/mm   : Tag: linux_2_4 tlbex-r4k.S 
        arch/mips64/mm : Tag: linux_2_4 tlbex-r4k.S 

Log message:
        TLB micro-optimization.

diff -urN linux/include/asm-mips/hazards.h linux/include/asm-mips/hazards.h
--- linux/include/asm-mips/hazards.h    2005/06/03 02:21:07     1.1.2.3
+++ linux/include/asm-mips/hazards.h    2005/06/06 14:46:22     1.1.2.4
@@ -45,7 +45,8 @@
  */
 #define mtc0_tlbw_hazard                                               \
        b       . + 8
-#define tlbw_eret_hazard
+#define tlbw_eret_hazard                                               \
+       nop
 #endif
 
 /*
diff -urN linux/include/asm-mips64/hazards.h linux/include/asm-mips64/hazards.h
--- linux/include/asm-mips64/Attic/hazards.h    2005/06/03 02:21:07     1.1.2.3
+++ linux/include/asm-mips64/Attic/hazards.h    2005/06/06 14:46:22     1.1.2.4
@@ -45,7 +45,8 @@
  */
 #define mtc0_tlbw_hazard                                               \
        b       . + 8
-#define tlbw_eret_hazard
+#define tlbw_eret_hazard                                               \
+       nop
 #endif
 
 /*
diff -urN linux/arch/mips/mm/tlbex-r4k.S linux/arch/mips/mm/tlbex-r4k.S
--- linux/arch/mips/mm/Attic/tlbex-r4k.S        2005/06/03 02:21:06     1.2.2.20
+++ linux/arch/mips/mm/Attic/tlbex-r4k.S        2005/06/06 14:46:22     1.2.2.21
@@ -186,7 +186,6 @@
        P_MTC0  k1, CP0_ENTRYLO1                # load it
        mtc0_tlbw_hazard
        tlbwr                                   # write random tlb entry
-       nop
        tlbw_eret_hazard
        eret
        END(except_vec0_r4000)
@@ -468,7 +467,6 @@
        PTE_RELOAD(k1, k0)
        mtc0_tlbw_hazard
        tlbwi
-       nop
        tlbw_eret_hazard
        .set    mips3
        eret
@@ -493,7 +491,6 @@
        PTE_RELOAD(k1, k0)
        mtc0_tlbw_hazard
        tlbwi
-       nop
        tlbw_eret_hazard
        .set    mips3
        eret
@@ -523,7 +520,6 @@
        PTE_RELOAD(k1, k0)
        mtc0_tlbw_hazard
        tlbwi
-       nop
        tlbw_eret_hazard
        .set    mips3
        eret
diff -urN linux/arch/mips64/mm/tlbex-r4k.S linux/arch/mips64/mm/tlbex-r4k.S
--- linux/arch/mips64/mm/Attic/tlbex-r4k.S      2005/06/03 02:21:07     1.1.2.20
+++ linux/arch/mips64/mm/Attic/tlbex-r4k.S      2005/06/06 14:46:22     1.1.2.21
@@ -125,6 +125,33 @@
         nop
 END(except_vec1_r4k)
 
+       __FINIT
+
+       .align  5
+LEAF(handle_vec1_r4k)
+       .set    noat
+       LOAD_PTE2 k1 k0 9f
+       ld      k0, 0(k1)                       # get even pte
+       ld      k1, 8(k1)                       # get odd pte
+       PTE_RELOAD k0 k1
+       mtc0_tlbw_hazard
+       tlbwr
+       tlbw_eret_hazard
+       eret
+
+9:                                             # handle the vmalloc range
+       LOAD_KPTE2 k1 k0 invalid_vmalloc_address
+       ld      k0, 0(k1)                       # get even pte
+       ld      k1, 8(k1)                       # get odd pte
+       PTE_RELOAD k0 k1
+       mtc0_tlbw_hazard
+       tlbwr
+       tlbw_eret_hazard
+       eret
+END(handle_vec1_r4k)
+
+       __INIT
+
 LEAF(except_vec1_sb1)
 #if BCM1250_M3_WAR
        dmfc0   k0, CP0_BADVADDR
@@ -134,18 +161,17 @@
        bnez    k0, 1f
 #endif
        .set    noat
-       dla     k0, handle_vec1_r4k
+       dla     k0, handle_vec1_sb1
        jr      k0
         nop
 
 1:     eret
-       nop
 END(except_vec1_sb1)
 
        __FINIT
 
        .align  5
-LEAF(handle_vec1_r4k)
+LEAF(handle_vec1_sb1)
        .set    noat
        LOAD_PTE2 k1 k0 9f
        ld      k0, 0(k1)                       # get even pte
@@ -153,7 +179,6 @@
        PTE_RELOAD k0 k1
        mtc0_tlbw_hazard
        tlbwr
-1:     tlbw_eret_hazard
        eret
 
 9:                                             # handle the vmalloc range
@@ -163,10 +188,8 @@
        PTE_RELOAD k0 k1
        mtc0_tlbw_hazard
        tlbwr
-       nop
-1:     tlbw_eret_hazard
        eret
-END(handle_vec1_r4k)
+END(handle_vec1_sb1)
 
 
        __INIT
@@ -194,7 +217,6 @@
        PTE_RELOAD k0 k1
        mtc0_tlbw_hazard
        tlbwr
-       tlbw_eret_hazard
        eret
 
 9:                                             # handle the vmalloc range
@@ -204,7 +226,6 @@
        PTE_RELOAD k0 k1
        mtc0_tlbw_hazard
        tlbwr
-       tlbw_eret_hazard
        eret
 END(handle_vec1_r10k)
 

<Prev in Thread] Current Thread [Next in Thread>