xref: /DragonStub/lib/x86_64/efi_stub.S (revision e7db4418b1fe6ea0220974b77c3b10918ab9b7a0)
1/*
2 * Function calling ABI conversion from Linux to EFI for x86_64
3 *
4 * Copyright (C) 2007 Intel Corp
5 *	Bibo Mao <bibo.mao@intel.com>
6 *	Huang Ying <ying.huang@intel.com>
7 * Copyright (C) 2012 Felipe Contreras <felipe.contreras@gmail.com>
8 */
9
10#if !defined(HAVE_USE_MS_ABI)
11/*
12 * EFI calling conventions are documented at:
13 *   http://msdn.microsoft.com/en-us/library/ms235286%28v=vs.80%29.aspx
14 * ELF calling conventions are documented at:
15 *   http://www.x86-64.org/documentation/abi.pdf
16 *
17 * Basically here are the conversion rules:
18 * a) our function pointer is in %rdi
19 * b) rsi through r8 (elf) aka rcx through r9 (ms) require stack space
20 *    on the MS side even though it's not getting used at all.
21 * c) 8(%rsp) is always aligned to 16 in ELF, so %rsp is shifted 8 bytes extra
22 * d) arguments are as follows: (elf -> ms)
23 *   1) rdi -> rcx (32 saved)
24 *   2) rsi -> rdx (32 saved)
25 *   3) rdx -> r8 (32 saved)
26 *   4) rcx -> r9 (32 saved)
27 *   5) r8 -> 32(%rsp) (32 saved)
28 *   6) r9 -> 40(%rsp) (48 saved)
29 *   7) 8(%rsp) -> 48(%rsp) (48 saved)
30 *   8) 16(%rsp) -> 56(%rsp) (64 saved)
31 *   9) 24(%rsp) -> 64(%rsp) (64 saved)
32 *  10) 32(%rsp) -> 72(%rsp) (80 saved)
33 * e) because the first argument we recieve in a thunker is actually the
34 *    function to be called, arguments are offset as such:
35 *   0) rdi -> caller
36 *   1) rsi -> rcx (32 saved)
37 *   2) rdx -> rdx (32 saved)
38 *   3) rcx -> r8 (32 saved)
39 *   4) r8 -> r9 (32 saved)
40 *   5) r9 -> 32(%rsp) (32 saved)
41 *   6) 8(%rsp) -> 40(%rsp) (48 saved)
42 *   7) 16(%rsp) -> 48(%rsp) (48 saved)
43 *   8) 24(%rsp) -> 56(%rsp) (64 saved)
44 *   9) 32(%rsp) -> 64(%rsp) (64 saved)
45 *  10) 40(%rsp) -> 72(%rsp) (80 saved)
46 * f) arguments need to be moved in opposite order to avoid clobbering
47 */
48
49#define ENTRY(name)	\
50	.globl name;	\
51	name:
52
53ENTRY(efi_call0)
54	subq $40, %rsp
55	call *%rdi
56	addq $40, %rsp
57	ret
58
59ENTRY(efi_call1)
60	subq $40, %rsp
61	mov  %rsi, %rcx
62	call *%rdi
63	addq $40, %rsp
64	ret
65
66ENTRY(efi_call2)
67	subq $40, %rsp
68	/* mov %rdx, %rdx */
69	mov  %rsi, %rcx
70	call *%rdi
71	addq $40, %rsp
72	ret
73
74ENTRY(efi_call3)
75	subq $40, %rsp
76	mov  %rcx, %r8
77	/* mov %rdx, %rdx */
78	mov  %rsi, %rcx
79	call *%rdi
80	addq $40, %rsp
81	ret
82
83ENTRY(efi_call4)
84	subq $40, %rsp
85	mov %r8, %r9
86	mov %rcx, %r8
87	/* mov %rdx, %rdx */
88	mov %rsi, %rcx
89	call *%rdi
90	addq $40, %rsp
91	ret
92
93ENTRY(efi_call5)
94	subq $40, %rsp
95	mov %r9, 32(%rsp)
96	mov %r8, %r9
97	mov %rcx, %r8
98	/* mov %rdx, %rdx */
99	mov %rsi, %rcx
100	call *%rdi
101	addq $40, %rsp
102	ret
103
104ENTRY(efi_call6)
105	subq $56, %rsp
106	mov 56+8(%rsp), %rax
107	mov %rax, 40(%rsp)
108	mov %r9, 32(%rsp)
109	mov %r8, %r9
110	mov %rcx, %r8
111	/* mov %rdx, %rdx */
112	mov %rsi, %rcx
113	call *%rdi
114	addq $56, %rsp
115	ret
116
117ENTRY(efi_call7)
118	subq $56, %rsp
119	mov 56+16(%rsp), %rax
120	mov %rax, 48(%rsp)
121	mov 56+8(%rsp), %rax
122	mov %rax, 40(%rsp)
123	mov %r9, 32(%rsp)
124	mov %r8, %r9
125	mov %rcx, %r8
126	/* mov %rdx, %rdx */
127	mov %rsi, %rcx
128	call *%rdi
129	addq $56, %rsp
130	ret
131
132ENTRY(efi_call8)
133	subq $72, %rsp
134	mov 72+24(%rsp), %rax
135	mov %rax, 56(%rsp)
136	mov 72+16(%rsp), %rax
137	mov %rax, 48(%rsp)
138	mov 72+8(%rsp), %rax
139	mov %rax, 40(%rsp)
140	mov %r9, 32(%rsp)
141	mov %r8, %r9
142	mov %rcx, %r8
143	/* mov %rdx, %rdx */
144	mov %rsi, %rcx
145	call *%rdi
146	addq $72, %rsp
147	ret
148
149ENTRY(efi_call9)
150	subq $72, %rsp
151	mov 72+32(%rsp), %rax
152	mov %rax, 64(%rsp)
153	mov 72+24(%rsp), %rax
154	mov %rax, 56(%rsp)
155	mov 72+16(%rsp), %rax
156	mov %rax, 48(%rsp)
157	mov 72+8(%rsp), %rax
158	mov %rax, 40(%rsp)
159	mov %r9, 32(%rsp)
160	mov %r8, %r9
161	mov %rcx, %r8
162	/* mov %rdx, %rdx */
163	mov %rsi, %rcx
164	call *%rdi
165	addq $72, %rsp
166	ret
167
168ENTRY(efi_call10)
169	subq $88, %rsp
170	mov 88+40(%rsp), %rax
171	mov %rax, 72(%rsp)
172	mov 88+32(%rsp), %rax
173	mov %rax, 64(%rsp)
174	mov 88+24(%rsp), %rax
175	mov %rax, 56(%rsp)
176	mov 88+16(%rsp), %rax
177	mov %rax, 48(%rsp)
178	mov 88+8(%rsp), %rax
179	mov %rax, 40(%rsp)
180	mov %r9, 32(%rsp)
181	mov %r8, %r9
182	mov %rcx, %r8
183	/* mov %rdx, %rdx */
184	mov %rsi, %rcx
185	call *%rdi
186	addq $88, %rsp
187	ret
188
189#endif
190