patch-2.3.99-pre4 linux/include/asm-sh/system.h
Next file: linux/include/asm-sh/uaccess.h
Previous file: linux/include/asm-sh/string.h
Back to the patch index
Back to the overall index
- Lines: 127
- Date:
Mon Mar 27 10:26:15 2000
- Orig file:
v2.3.99-pre3/linux/include/asm-sh/system.h
- Orig date:
Tue Mar 7 14:32:26 2000
diff -u --recursive --new-file v2.3.99-pre3/linux/include/asm-sh/system.h linux/include/asm-sh/system.h
@@ -2,7 +2,7 @@
#define __ASM_SH_SYSTEM_H
/*
- * Copyright (C) 1999, 2000 Niibe Yutaka
+ * Copyright (C) 1999, 2000 Niibe Yutaka & Kaz Kojima
*/
/*
@@ -93,13 +93,15 @@
/* Interrupt Control */
extern __inline__ void __sti(void)
{
- unsigned long __dummy;
+ unsigned long __dummy0, __dummy1;
__asm__ __volatile__("stc $sr, %0\n\t"
"and %1, %0\n\t"
+ "stc $r5_bank, %1\n\t"
+ "or %1, %0\n\t"
"ldc %0, $sr"
- : "=&r" (__dummy)
- : "r" (0xefffffff)
+ : "=&r" (__dummy0), "=r" (__dummy1)
+ : "1" (~0xf0)
: "memory");
}
@@ -107,10 +109,10 @@
{
unsigned long __dummy;
__asm__ __volatile__("stc $sr, %0\n\t"
- "or %1, %0\n\t"
+ "or #0xf0, %0\n\t"
"ldc %0, $sr"
- : "=&r" (__dummy)
- : "r" (0x10000000)
+ : "=&z" (__dummy)
+ : /* no inputs */
: "memory");
}
@@ -118,33 +120,70 @@
x = (__extension__ ({ unsigned long __sr; \
__asm__ __volatile__( \
"stc $sr, %0" \
- : "=r" (__sr) \
+ : "=&r" (__sr) \
: /* no inputs */ \
: "memory"); \
- (__sr & 0xffff7f0f);}))
+ (__sr & 0x000000f0);}))
#define __save_and_cli(x) \
x = (__extension__ ({ unsigned long __dummy,__sr; \
__asm__ __volatile__( \
"stc $sr, %1\n\t" \
- "or %0, %1\n\t" \
- "stc $sr, %0\n\t" \
- "ldc %1, $sr" \
- : "=r" (__sr), "=&r" (__dummy) \
- : "0" (0x10000000) \
- : "memory"); (__sr & 0xffff7f0f); }))
+ "mov %1, %0\n\t" \
+ "or #0xf0, %0\n\t" \
+ "ldc %0, $sr" \
+ : "=&z" (__dummy), "=&r" (__sr) \
+ : /* no inputs */ \
+ : "memory"); (__sr & 0x000000f0); }))
#define __restore_flags(x) do { \
- unsigned long __dummy; \
- __asm__ __volatile__( \
+ unsigned long __dummy0, __dummy1; \
+ if (x != 0xf0) /* not CLI-ed? */ \
+ __asm__ __volatile__( \
"stc $sr, %0\n\t" \
"and %1, %0\n\t" \
- "or %2, %0\n\t" \
+ "stc $r5_bank, %1\n\t" \
+ "or %1, %0\n\t" \
"ldc %0, $sr" \
- : "=&r" (__dummy) \
- : "r" (0x000080f0), /* IMASK+FD */ \
- "r" (x) \
+ : "=&r" (__dummy0), "=r" (__dummy1) \
+ : "1" (0xffffff0f) \
: "memory"); \
+} while (0)
+
+/*
+ * Jump to P2 area.
+ * When handling TLB or caches, we need to do it from P2 area.
+ */
+#define jump_to_P2() \
+do { \
+ unsigned long __dummy; \
+ __asm__ __volatile__( \
+ "mov.l 1f, %0\n\t" \
+ "or %1, %0\n\t" \
+ "jmp @%0\n\t" \
+ " nop\n\t" \
+ ".balign 4\n" \
+ "1: .long 2f\n" \
+ "2:" \
+ : "=&r" (__dummy) \
+ : "r" (0x20000000)); \
+} while (0)
+
+/*
+ * Back to P1 area.
+ */
+#define back_to_P1() \
+do { \
+ unsigned long __dummy; \
+ __asm__ __volatile__( \
+ "nop;nop;nop;nop;nop;nop\n\t" \
+ "mov.l 1f, %0\n\t" \
+ "jmp @%0\n\t" \
+ " nop\n\t" \
+ ".balign 4\n" \
+ "1: .long 2f\n" \
+ "2:" \
+ : "=&r" (__dummy)); \
} while (0)
/* For spinlocks etc */
FUNET's LINUX-ADM group, linux-adm@nic.funet.fi
TCL-scripts by Sam Shen (who was at: slshen@lbl.gov)