1 /*
2 * This file is subject to the terms and conditions of the GNU General Public
3 * License. See the file "COPYING" in the main directory of this archive
4 * for more details.
5 *
6 * Copyright (C) 2005 Thiemo Seufer
7 * Copyright (C) 2005 MIPS Technologies, Inc. All rights reserved.
8 * Author: Maciej W. Rozycki <macro@mips.com>
9 */
10
11 #include <linux/init.h>
12
13 #include <asm/addrspace.h>
14 #include <asm/bug.h>
15 #include <asm/cacheflush.h>
16
17 #ifndef CKSEG2
18 #define CKSEG2 CKSSEG
19 #endif
20 #ifndef TO_PHYS_MASK
21 #define TO_PHYS_MASK -1
22 #endif
23
24 /*
25 * FUNC is executed in one of the uncached segments, depending on its
26 * original address as follows:
27 *
28 * 1. If the original address is in CKSEG0 or CKSEG1, then the uncached
29 * segment used is CKSEG1.
30 * 2. If the original address is in XKPHYS, then the uncached segment
31 * used is XKPHYS(2).
32 * 3. Otherwise it's a bug.
33 *
34 * The same remapping is done with the stack pointer. Stack handling
35 * works because we don't handle stack arguments or more complex return
36 * values, so we can avoid sharing the same stack area between a cached
37 * and the uncached mode.
38 */
run_uncached(void * func)39 unsigned long __cpuinit run_uncached(void *func)
40 {
41 register long sp __asm__("$sp");
42 register long ret __asm__("$2");
43 long lfunc = (long)func, ufunc;
44 long usp;
45
46 if (sp >= (long)CKSEG0 && sp < (long)CKSEG2)
47 usp = CKSEG1ADDR(sp);
48 #ifdef CONFIG_64BIT
49 else if ((long long)sp >= (long long)PHYS_TO_XKPHYS(0, 0) &&
50 (long long)sp < (long long)PHYS_TO_XKPHYS(8, 0))
51 usp = PHYS_TO_XKPHYS(K_CALG_UNCACHED,
52 XKPHYS_TO_PHYS((long long)sp));
53 #endif
54 else {
55 BUG();
56 usp = sp;
57 }
58 if (lfunc >= (long)CKSEG0 && lfunc < (long)CKSEG2)
59 ufunc = CKSEG1ADDR(lfunc);
60 #ifdef CONFIG_64BIT
61 else if ((long long)lfunc >= (long long)PHYS_TO_XKPHYS(0, 0) &&
62 (long long)lfunc < (long long)PHYS_TO_XKPHYS(8, 0))
63 ufunc = PHYS_TO_XKPHYS(K_CALG_UNCACHED,
64 XKPHYS_TO_PHYS((long long)lfunc));
65 #endif
66 else {
67 BUG();
68 ufunc = lfunc;
69 }
70
71 __asm__ __volatile__ (
72 " move $16, $sp\n"
73 " move $sp, %1\n"
74 " jalr %2\n"
75 " move $sp, $16"
76 : "=r" (ret)
77 : "r" (usp), "r" (ufunc)
78 : "$16", "$31");
79
80 return ret;
81 }
82