linux-cvs-patches
[Top] [All Lists]

CVS Update@linux-mips.org: linux

To: linux-cvs-patches@linux-mips.org
Subject: CVS Update@linux-mips.org: linux
From: ths@linux-mips.org
Date: Thu, 01 Sep 2005 10:00:02 +0100
Reply-to: linux-mips@linux-mips.org
Sender: linux-cvs-patches-bounce@linux-mips.org
CVSROOT:        /home/cvs
Module name:    linux
Changes by:     ths@ftp.linux-mips.org  05/09/01 09:59:55

Modified files:
        include/asm-mips: r4kcache.h 

Log message:
        More .set push/pop encapsulation, more eyefriendly code formatting.

diff -urN linux/include/asm-mips/r4kcache.h linux/include/asm-mips/r4kcache.h
--- linux/include/asm-mips/r4kcache.h   2005/07/11 10:03:32     1.25
+++ linux/include/asm-mips/r4kcache.h   2005/09/01 08:59:55     1.26
@@ -21,7 +21,7 @@
  *
  *  - The MIPS32 and MIPS64 specs permit an implementation to directly derive
  *    the index bits from the virtual address.  This breaks with tradition
- *    set by the R4000.  To keep unpleassant surprises from happening we pick
+ *    set by the R4000.  To keep unpleasant surprises from happening we pick
  *    an address in KSEG0 / CKSEG0.
  *  - We need a properly sign extended address for 64-bit code.  To get away
  *    without ifdefs we let the compiler do it by a type cast.
@@ -30,11 +30,11 @@
 
 #define cache_op(op,addr)                                              \
        __asm__ __volatile__(                                           \
+       "       .set    push                                    \n"     \
        "       .set    noreorder                               \n"     \
        "       .set    mips3\n\t                               \n"     \
        "       cache   %0, %1                                  \n"     \
-       "       .set    mips0                                   \n"     \
-       "       .set    reorder"                                        \
+       "       .set    pop                                     \n"     \
        :                                                               \
        : "i" (op), "m" (*(unsigned char *)(addr)))
 
@@ -84,14 +84,14 @@
 static inline void protected_flush_icache_line(unsigned long addr)
 {
        __asm__ __volatile__(
-               ".set noreorder\n\t"
-               ".set mips3\n"
-               "1:\tcache %0,(%1)\n"
-               "2:\t.set mips0\n\t"
-               ".set reorder\n\t"
-               ".section\t__ex_table,\"a\"\n\t"
-               STR(PTR)"\t1b,2b\n\t"
-               ".previous"
+               "       .set    push                    \n"
+               "       .set    noreorder               \n"
+               "       .set    mips3                   \n"
+               "1:     cache   %0, (%1)                \n"
+               "2:     .set    pop                     \n"
+               "       .section __ex_table,\"a\"       \n"
+               "       "STR(PTR)" 1b, 2b               \n"
+               "       .previous"
                :
                : "i" (Hit_Invalidate_I), "r" (addr));
 }
@@ -100,19 +100,19 @@
  * R10000 / R12000 hazard - these processors don't support the Hit_Writeback_D
  * cacheop so we use Hit_Writeback_Inv_D which is supported by all R4000-style
  * caches.  We're talking about one cacheline unnecessarily getting invalidated
- * here so the penaltiy isn't overly hard.
+ * here so the penalty isn't overly hard.
  */
 static inline void protected_writeback_dcache_line(unsigned long addr)
 {
        __asm__ __volatile__(
-               ".set noreorder\n\t"
-               ".set mips3\n"
-               "1:\tcache %0,(%1)\n"
-               "2:\t.set mips0\n\t"
-               ".set reorder\n\t"
-               ".section\t__ex_table,\"a\"\n\t"
-               STR(PTR)"\t1b,2b\n\t"
-               ".previous"
+               "       .set    push                    \n"
+               "       .set    noreorder               \n"
+               "       .set    mips3                   \n"
+               "1:     cache   %0, (%1)                \n"
+               "2:     .set    pop                     \n"
+               "       .section __ex_table,\"a\"       \n"
+               "       "STR(PTR)" 1b, 2b               \n"
+               "       .previous"
                :
                : "i" (Hit_Writeback_Inv_D), "r" (addr));
 }
@@ -120,14 +120,14 @@
 static inline void protected_writeback_scache_line(unsigned long addr)
 {
        __asm__ __volatile__(
-               ".set noreorder\n\t"
-               ".set mips3\n"
-               "1:\tcache %0,(%1)\n"
-               "2:\t.set mips0\n\t"
-               ".set reorder\n\t"
-               ".section\t__ex_table,\"a\"\n\t"
-               STR(PTR)"\t1b,2b\n\t"
-               ".previous"
+               "       .set    push                    \n"
+               "       .set    noreorder               \n"
+               "       .set    mips3                   \n"
+               "1:     cache   %0, (%1)                \n"
+               "2:     .set    pop                     \n"
+               "       .section __ex_table,\"a\"       \n"
+               "       "STR(PTR)" 1b, 2b               \n"
+               "       .previous"
                :
                : "i" (Hit_Writeback_Inv_SD), "r" (addr));
 }
@@ -142,6 +142,7 @@
 
 #define cache16_unroll32(base,op)                                      \
        __asm__ __volatile__(                                           \
+       "       .set push                                       \n"     \
        "       .set noreorder                                  \n"     \
        "       .set mips3                                      \n"     \
        "       cache %1, 0x000(%0); cache %1, 0x010(%0)        \n"     \
@@ -160,8 +161,7 @@
        "       cache %1, 0x1a0(%0); cache %1, 0x1b0(%0)        \n"     \
        "       cache %1, 0x1c0(%0); cache %1, 0x1d0(%0)        \n"     \
        "       cache %1, 0x1e0(%0); cache %1, 0x1f0(%0)        \n"     \
-       "       .set mips0                                      \n"     \
-       "       .set reorder                                    \n"     \
+       "       .set pop                                        \n"     \
                :                                                       \
                : "r" (base),                                           \
                  "i" (op));
@@ -285,6 +285,7 @@
 
 #define cache32_unroll32(base,op)                                      \
        __asm__ __volatile__(                                           \
+       "       .set push                                       \n"     \
        "       .set noreorder                                  \n"     \
        "       .set mips3                                      \n"     \
        "       cache %1, 0x000(%0); cache %1, 0x020(%0)        \n"     \
@@ -303,8 +304,7 @@
        "       cache %1, 0x340(%0); cache %1, 0x360(%0)        \n"     \
        "       cache %1, 0x380(%0); cache %1, 0x3a0(%0)        \n"     \
        "       cache %1, 0x3c0(%0); cache %1, 0x3e0(%0)        \n"     \
-       "       .set mips0                                      \n"     \
-       "       .set reorder                                    \n"     \
+       "       .set pop                                        \n"     \
                :                                                       \
                : "r" (base),                                           \
                  "i" (op));
@@ -428,6 +428,7 @@
 
 #define cache64_unroll32(base,op)                                      \
        __asm__ __volatile__(                                           \
+       "       .set push                                       \n"     \
        "       .set noreorder                                  \n"     \
        "       .set mips3                                      \n"     \
        "       cache %1, 0x000(%0); cache %1, 0x040(%0)        \n"     \
@@ -446,8 +447,7 @@
        "       cache %1, 0x680(%0); cache %1, 0x6c0(%0)        \n"     \
        "       cache %1, 0x700(%0); cache %1, 0x740(%0)        \n"     \
        "       cache %1, 0x780(%0); cache %1, 0x7c0(%0)        \n"     \
-       "       .set mips0                                      \n"     \
-       "       .set reorder                                    \n"     \
+       "       .set pop                                        \n"     \
                :                                                       \
                : "r" (base),                                           \
                  "i" (op));
@@ -532,6 +532,7 @@
 
 #define cache128_unroll32(base,op)                                     \
        __asm__ __volatile__(                                           \
+       "       .set push                                       \n"     \
        "       .set noreorder                                  \n"     \
        "       .set mips3                                      \n"     \
        "       cache %1, 0x000(%0); cache %1, 0x080(%0)        \n"     \
@@ -550,8 +551,7 @@
        "       cache %1, 0xd00(%0); cache %1, 0xd80(%0)        \n"     \
        "       cache %1, 0xe00(%0); cache %1, 0xe80(%0)        \n"     \
        "       cache %1, 0xf00(%0); cache %1, 0xf80(%0)        \n"     \
-       "       .set mips0                                      \n"     \
-       "       .set reorder                                    \n"     \
+       "       .set pop                                        \n"     \
                :                                                       \
                : "r" (base),                                           \
                  "i" (op));

<Prev in Thread] Current Thread [Next in Thread>