xref: /titanic_50/usr/src/uts/i86pc/ml/bios_call_src.s (revision ae115bc77f6fcde83175c75b4206dc2e50747966)
1*ae115bc7Smrj/*
2*ae115bc7Smrj * CDDL HEADER START
3*ae115bc7Smrj *
4*ae115bc7Smrj * The contents of this file are subject to the terms of the
5*ae115bc7Smrj * Common Development and Distribution License (the "License").
6*ae115bc7Smrj * You may not use this file except in compliance with the License.
7*ae115bc7Smrj *
8*ae115bc7Smrj * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9*ae115bc7Smrj * or http://www.opensolaris.org/os/licensing.
10*ae115bc7Smrj * See the License for the specific language governing permissions
11*ae115bc7Smrj * and limitations under the License.
12*ae115bc7Smrj *
13*ae115bc7Smrj * When distributing Covered Code, include this CDDL HEADER in each
14*ae115bc7Smrj * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15*ae115bc7Smrj * If applicable, add the following below this CDDL HEADER, with the
16*ae115bc7Smrj * fields enclosed by brackets "[]" replaced with your own identifying
17*ae115bc7Smrj * information: Portions Copyright [yyyy] [name of copyright owner]
18*ae115bc7Smrj *
19*ae115bc7Smrj * CDDL HEADER END
20*ae115bc7Smrj */
21*ae115bc7Smrj
22*ae115bc7Smrj/*
23*ae115bc7Smrj * Copyright 2007 Sun Microsystems, Inc.  All rights reserved.
24*ae115bc7Smrj * Use is subject to license terms.
25*ae115bc7Smrj */
26*ae115bc7Smrj
27*ae115bc7Smrj#pragma ident	"%Z%%M%	%I%	%E% SMI"
28*ae115bc7Smrj
29*ae115bc7Smrj#if defined(__lint)
30*ae115bc7Smrj
31*ae115bc7Smrjint silence_lint = 0;
32*ae115bc7Smrj
33*ae115bc7Smrj#else
34*ae115bc7Smrj
35*ae115bc7Smrj#include <sys/segments.h>
36*ae115bc7Smrj#include <sys/controlregs.h>
37*ae115bc7Smrj
38*ae115bc7Smrj/*
39*ae115bc7Smrj * Do a call into BIOS.  This goes down to 16 bit real mode and back again.
40*ae115bc7Smrj */
41*ae115bc7Smrj
42*ae115bc7Smrj/*
43*ae115bc7Smrj * instruction prefix to change operand size in instruction
44*ae115bc7Smrj */
45*ae115bc7Smrj#define DATASZ	.byte 0x66;
46*ae115bc7Smrj
47*ae115bc7Smrj#if defined(__amd64)
48*ae115bc7Smrj#define	MOVCR(x, y)	movq  x,%rax; movq  %rax, y
49*ae115bc7Smrj#define LOAD_XAX(sym)	leaq	sym, %rax
50*ae115bc7Smrj#elif defined(__i386)
51*ae115bc7Smrj#define	MOVCR(x, y)	movl  x,%eax; movl  %eax, y
52*ae115bc7Smrj#define LOAD_XAX(sym)	leal	sym, %eax
53*ae115bc7Smrj#endif
54*ae115bc7Smrj
55*ae115bc7Smrj	.globl	_start
56*ae115bc7Smrj_start:
57*ae115bc7Smrj
58*ae115bc7Smrj#if defined(__i386)
59*ae115bc7Smrj
60*ae115bc7Smrj	/*
61*ae115bc7Smrj	 * Save caller registers
62*ae115bc7Smrj	 */
63*ae115bc7Smrj	movl	%ebp, save_ebp
64*ae115bc7Smrj	movl	%esp, save_esp
65*ae115bc7Smrj	movl	%ebx, save_ebx
66*ae115bc7Smrj	movl	%esi, save_esi
67*ae115bc7Smrj	movl	%edi, save_edi
68*ae115bc7Smrj
69*ae115bc7Smrj	/* get registers argument into esi */
70*ae115bc7Smrj	movl	8(%esp), %esi
71*ae115bc7Smrj
72*ae115bc7Smrj	/* put interrupt number in %bl */
73*ae115bc7Smrj	movl	4(%esp), %ebx
74*ae115bc7Smrj
75*ae115bc7Smrj	/* Switch to a low memory stack */
76*ae115bc7Smrj	movl	$_start, %esp
77*ae115bc7Smrj
78*ae115bc7Smrj	/* allocate space for args on stack */
79*ae115bc7Smrj	subl	$18, %esp
80*ae115bc7Smrj	movl	%esp, %edi
81*ae115bc7Smrj
82*ae115bc7Smrj#elif defined(__amd64)
83*ae115bc7Smrj
84*ae115bc7Smrj	/*
85*ae115bc7Smrj	 * Save caller registers
86*ae115bc7Smrj	 */
87*ae115bc7Smrj	movq	%rbp, save_rbp
88*ae115bc7Smrj	movq	%rsp, save_rsp
89*ae115bc7Smrj	movq	%rbx, save_rbx
90*ae115bc7Smrj	movq	%rsi, save_rsi
91*ae115bc7Smrj	movq	%r12, save_r12
92*ae115bc7Smrj	movq	%r13, save_r13
93*ae115bc7Smrj	movq	%r14, save_r14
94*ae115bc7Smrj	movq	%r15, save_r15
95*ae115bc7Smrj
96*ae115bc7Smrj	/* Switch to a low memory stack */
97*ae115bc7Smrj	movq	$_start, %rsp
98*ae115bc7Smrj
99*ae115bc7Smrj	/* put interrupt number in %bl */
100*ae115bc7Smrj	movq	%rdi, %rbx
101*ae115bc7Smrj
102*ae115bc7Smrj	/* allocate space for args on stack */
103*ae115bc7Smrj	subq	$18, %rsp
104*ae115bc7Smrj	movq	%rsp, %rdi
105*ae115bc7Smrj
106*ae115bc7Smrj#endif
107*ae115bc7Smrj
108*ae115bc7Smrj	/* copy args from high memory to stack in low memory */
109*ae115bc7Smrj	cld
110*ae115bc7Smrj	movl	$18, %ecx
111*ae115bc7Smrj	rep
112*ae115bc7Smrj	movsb
113*ae115bc7Smrj
114*ae115bc7Smrj	/*
115*ae115bc7Smrj	 * Save system registers
116*ae115bc7Smrj	 */
117*ae115bc7Smrj	sidt	save_idt
118*ae115bc7Smrj	sgdt	save_gdt
119*ae115bc7Smrj	str	save_tr
120*ae115bc7Smrj	movw	%cs, save_cs
121*ae115bc7Smrj	movw	%ds, save_ds
122*ae115bc7Smrj	movw	%ss, save_ss
123*ae115bc7Smrj	movw	%es, save_es
124*ae115bc7Smrj	movw	%fs, save_fs
125*ae115bc7Smrj	movw	%gs, save_gs
126*ae115bc7Smrj	MOVCR(	%cr4, save_cr4)
127*ae115bc7Smrj	MOVCR(	%cr3, save_cr3)
128*ae115bc7Smrj	MOVCR(	%cr0, save_cr0)
129*ae115bc7Smrj
130*ae115bc7Smrj#if defined(__amd64)
131*ae115bc7Smrj	/*
132*ae115bc7Smrj	 * save/clear the extension parts of the fs/gs base registers and cr8
133*ae115bc7Smrj	 */
134*ae115bc7Smrj	movl	$MSR_AMD_FSBASE, %ecx
135*ae115bc7Smrj	rdmsr
136*ae115bc7Smrj	movl	%eax, save_fsbase
137*ae115bc7Smrj	movl	%edx, save_fsbase + 4
138*ae115bc7Smrj	xorl	%eax, %eax
139*ae115bc7Smrj	xorl	%edx, %edx
140*ae115bc7Smrj	wrmsr
141*ae115bc7Smrj
142*ae115bc7Smrj	movl	$MSR_AMD_GSBASE, %ecx
143*ae115bc7Smrj	rdmsr
144*ae115bc7Smrj	movl	%eax, save_gsbase
145*ae115bc7Smrj	movl	%edx, save_gsbase + 4
146*ae115bc7Smrj	xorl	%eax, %eax
147*ae115bc7Smrj	xorl	%edx, %edx
148*ae115bc7Smrj	wrmsr
149*ae115bc7Smrj
150*ae115bc7Smrj	movl	$MSR_AMD_KGSBASE, %ecx
151*ae115bc7Smrj	rdmsr
152*ae115bc7Smrj	movl	%eax, save_kgsbase
153*ae115bc7Smrj	movl	%edx, save_kgsbase + 4
154*ae115bc7Smrj	xorl	%eax, %eax
155*ae115bc7Smrj	xorl	%edx, %edx
156*ae115bc7Smrj	wrmsr
157*ae115bc7Smrj
158*ae115bc7Smrj	movq	%cr8, %rax
159*ae115bc7Smrj	movq	%rax, save_cr8
160*ae115bc7Smrj#endif
161*ae115bc7Smrj
162*ae115bc7Smrj	/*
163*ae115bc7Smrj	 * set offsets in 16 bit ljmp instructions below
164*ae115bc7Smrj	 */
165*ae115bc7Smrj	LOAD_XAX(enter_real)
166*ae115bc7Smrj	movw	%ax, enter_real_ljmp
167*ae115bc7Smrj
168*ae115bc7Smrj	LOAD_XAX(enter_protected)
169*ae115bc7Smrj	movw	%ax, enter_protected_ljmp
170*ae115bc7Smrj
171*ae115bc7Smrj	LOAD_XAX(gdt_info)
172*ae115bc7Smrj	movw	%ax, gdt_info_load
173*ae115bc7Smrj
174*ae115bc7Smrj	/*
175*ae115bc7Smrj	 * insert BIOS interrupt number into later instruction
176*ae115bc7Smrj	 */
177*ae115bc7Smrj	movb    %bl, int_instr+1
178*ae115bc7Smrj	jmp     1f
179*ae115bc7Smrj1:
180*ae115bc7Smrj
181*ae115bc7Smrj	/*
182*ae115bc7Smrj	 * zero out all the registers to make sure they're 16 bit clean
183*ae115bc7Smrj	 */
184*ae115bc7Smrj#if defined(__amd64)
185*ae115bc7Smrj	xorq	%r8, %r8
186*ae115bc7Smrj	xorq	%r9, %r9
187*ae115bc7Smrj	xorq	%r10, %r10
188*ae115bc7Smrj	xorq	%r11, %r11
189*ae115bc7Smrj	xorq	%r12, %r12
190*ae115bc7Smrj	xorq	%r13, %r13
191*ae115bc7Smrj	xorq	%r14, %r14
192*ae115bc7Smrj	xorq	%r15, %r15
193*ae115bc7Smrj#endif
194*ae115bc7Smrj	xorl	%eax, %eax
195*ae115bc7Smrj	xorl	%ebx, %ebx
196*ae115bc7Smrj	xorl	%ecx, %ecx
197*ae115bc7Smrj	xorl	%edx, %edx
198*ae115bc7Smrj	xorl	%ebp, %ebp
199*ae115bc7Smrj	xorl	%esi, %esi
200*ae115bc7Smrj	xorl	%edi, %edi
201*ae115bc7Smrj
202*ae115bc7Smrj	/*
203*ae115bc7Smrj	 * Load our own GDT/IDT
204*ae115bc7Smrj	 */
205*ae115bc7Smrj	lgdt	gdt_info
206*ae115bc7Smrj	lidt	idt_info
207*ae115bc7Smrj
208*ae115bc7Smrj#if defined(__amd64)
209*ae115bc7Smrj	/*
210*ae115bc7Smrj	 * Shut down 64 bit mode. First get into compatiblity mode.
211*ae115bc7Smrj	 */
212*ae115bc7Smrj	movq	%rsp, %rax
213*ae115bc7Smrj	pushq	$B32DATA_SEL
214*ae115bc7Smrj	pushq	%rax
215*ae115bc7Smrj	pushf
216*ae115bc7Smrj	pushq	$B32CODE_SEL
217*ae115bc7Smrj	pushq	$1f
218*ae115bc7Smrj	iretq
219*ae115bc7Smrj1:
220*ae115bc7Smrj	.code32
221*ae115bc7Smrj
222*ae115bc7Smrj	/*
223*ae115bc7Smrj	 * disable long mode by:
224*ae115bc7Smrj	 * - shutting down paging (bit 31 of cr0)
225*ae115bc7Smrj	 * - flushing the TLB
226*ae115bc7Smrj	 * - disabling LME (long made enable) in EFER (extended feature reg)
227*ae115bc7Smrj	 */
228*ae115bc7Smrj	movl	%cr0, %eax
229*ae115bc7Smrj	btcl	$31, %eax		/* disable paging */
230*ae115bc7Smrj	movl	%eax, %cr0
231*ae115bc7Smrj	ljmp	$B32CODE_SEL, $1f
232*ae115bc7Smrj1:
233*ae115bc7Smrj
234*ae115bc7Smrj	xorl	%eax, %eax
235*ae115bc7Smrj	movl	%eax, %cr3		/* flushes TLB */
236*ae115bc7Smrj
237*ae115bc7Smrj	movl	$MSR_AMD_EFER, %ecx	/* Extended Feature Enable */
238*ae115bc7Smrj	rdmsr
239*ae115bc7Smrj	btcl	$8, %eax		/* bit 8 Long Mode Enable bit */
240*ae115bc7Smrj	wrmsr
241*ae115bc7Smrj#endif
242*ae115bc7Smrj
243*ae115bc7Smrj	/*
244*ae115bc7Smrj	 * ok.. now enter 16 bit mode, so we can shut down protected mode
245*ae115bc7Smrj	 *
246*ae115bc7Smrj	 * We'll have to act like we're still in a 32 bit section.
247*ae115bc7Smrj	 * So the code from this point has DATASZ in front of it to get 32 bit
248*ae115bc7Smrj	 * operands. If DATASZ is missing the operands will be 16 bit.
249*ae115bc7Smrj	 *
250*ae115bc7Smrj	 * Now shut down paging and protected (ie. segmentation) modes.
251*ae115bc7Smrj	 */
252*ae115bc7Smrj	ljmp	$B16CODE_SEL, $enter_16_bit
253*ae115bc7Smrjenter_16_bit:
254*ae115bc7Smrj
255*ae115bc7Smrj	/*
256*ae115bc7Smrj	 * Make sure hidden parts of segment registers are 16 bit clean
257*ae115bc7Smrj	 */
258*ae115bc7Smrj	DATASZ	movl	$B16DATA_SEL, %eax
259*ae115bc7Smrj		movw    %ax, %ss
260*ae115bc7Smrj		movw    %ax, %ds
261*ae115bc7Smrj		movw    %ax, %es
262*ae115bc7Smrj		movw    %ax, %fs
263*ae115bc7Smrj		movw    %ax, %gs
264*ae115bc7Smrj
265*ae115bc7Smrj
266*ae115bc7Smrj	DATASZ	movl	$0x0, %eax	/* put us in real mode */
267*ae115bc7Smrj	DATASZ	movl	%eax, %cr0
268*ae115bc7Smrj	.byte	0xea			/* ljmp */
269*ae115bc7Smrjenter_real_ljmp:
270*ae115bc7Smrj	.value	0			/* addr (16 bit) */
271*ae115bc7Smrj	.value	0x0			/* value for %cs */
272*ae115bc7Smrjenter_real:
273*ae115bc7Smrj
274*ae115bc7Smrj	/*
275*ae115bc7Smrj	 * zero out the remaining segment registers
276*ae115bc7Smrj	 */
277*ae115bc7Smrj	DATASZ	xorl	%eax, %eax
278*ae115bc7Smrj		movw    %ax, %ss
279*ae115bc7Smrj		movw    %ax, %ds
280*ae115bc7Smrj		movw    %ax, %es
281*ae115bc7Smrj		movw    %ax, %fs
282*ae115bc7Smrj		movw    %ax, %gs
283*ae115bc7Smrj
284*ae115bc7Smrj	/*
285*ae115bc7Smrj	 * load the arguments to the BIOS call from the stack
286*ae115bc7Smrj	 */
287*ae115bc7Smrj	popl	%eax	/* really executes a 16 bit pop */
288*ae115bc7Smrj	popl	%ebx
289*ae115bc7Smrj	popl	%ecx
290*ae115bc7Smrj	popl	%edx
291*ae115bc7Smrj	popl	%esi
292*ae115bc7Smrj	popl	%edi
293*ae115bc7Smrj	popl	%ebp
294*ae115bc7Smrj	pop	%es
295*ae115bc7Smrj	pop	%ds
296*ae115bc7Smrj
297*ae115bc7Smrj	/*
298*ae115bc7Smrj	 * do the actual BIOS call
299*ae115bc7Smrj	 */
300*ae115bc7Smrj	sti
301*ae115bc7Smrjint_instr:
302*ae115bc7Smrj	int	$0x10		/* this int number is overwritten */
303*ae115bc7Smrj	cli			/* ensure interrupts remain disabled */
304*ae115bc7Smrj
305*ae115bc7Smrj	/*
306*ae115bc7Smrj	 * save results of the BIOS call
307*ae115bc7Smrj	 */
308*ae115bc7Smrj	pushf
309*ae115bc7Smrj	push	%ds
310*ae115bc7Smrj	push	%es
311*ae115bc7Smrj	pushl	%ebp		/* still executes as 16 bit */
312*ae115bc7Smrj	pushl	%edi
313*ae115bc7Smrj	pushl	%esi
314*ae115bc7Smrj	pushl	%edx
315*ae115bc7Smrj	pushl	%ecx
316*ae115bc7Smrj	pushl	%ebx
317*ae115bc7Smrj	pushl	%eax
318*ae115bc7Smrj
319*ae115bc7Smrj	/*
320*ae115bc7Smrj	 * Restore protected mode and 32 bit execution
321*ae115bc7Smrj	 */
322*ae115bc7Smrj	push	$0			/* make sure %ds is zero before lgdt */
323*ae115bc7Smrj	pop	%ds
324*ae115bc7Smrj	.byte	0x0f, 0x01, 0x16	/* lgdt */
325*ae115bc7Smrjgdt_info_load:
326*ae115bc7Smrj	.value	0	/* temp GDT in currently addressible mem */
327*ae115bc7Smrj
328*ae115bc7Smrj	DATASZ	movl	$0x1, %eax
329*ae115bc7Smrj	DATASZ	movl	%eax, %cr0
330*ae115bc7Smrj
331*ae115bc7Smrj	.byte	0xea			/* ljmp */
332*ae115bc7Smrjenter_protected_ljmp:
333*ae115bc7Smrj	.value	0			/* addr (still in 16 bit) */
334*ae115bc7Smrj	.value	B32CODE_SEL		/* %cs value */
335*ae115bc7Smrjenter_protected:
336*ae115bc7Smrj
337*ae115bc7Smrj	/*
338*ae115bc7Smrj	 * We are now back in a 32 bit code section, fix data/stack segments
339*ae115bc7Smrj	 */
340*ae115bc7Smrj	.code32
341*ae115bc7Smrj	movw	$B32DATA_SEL, %ax
342*ae115bc7Smrj	movw	%ax, %ds
343*ae115bc7Smrj	movw	%ax, %ss
344*ae115bc7Smrj
345*ae115bc7Smrj	/*
346*ae115bc7Smrj	 * Re-enable paging. Note we only use 32 bit mov's to restore these
347*ae115bc7Smrj	 * control registers. That's OK as the upper 32 bits are always zero.
348*ae115bc7Smrj	 */
349*ae115bc7Smrj	movl	save_cr4, %eax
350*ae115bc7Smrj	movl	%eax, %cr4
351*ae115bc7Smrj	movl	save_cr3, %eax
352*ae115bc7Smrj	movl	%eax, %cr3
353*ae115bc7Smrj
354*ae115bc7Smrj#if defined(__amd64)
355*ae115bc7Smrj	/*
356*ae115bc7Smrj	 * re-enable long mode
357*ae115bc7Smrj	 */
358*ae115bc7Smrj	movl	$MSR_AMD_EFER, %ecx
359*ae115bc7Smrj	rdmsr
360*ae115bc7Smrj	btsl	$8, %eax
361*ae115bc7Smrj	wrmsr
362*ae115bc7Smrj#endif
363*ae115bc7Smrj
364*ae115bc7Smrj	movl	save_cr0, %eax
365*ae115bc7Smrj	movl	%eax, %cr0
366*ae115bc7Smrj	jmp	enter_paging
367*ae115bc7Smrjenter_paging:
368*ae115bc7Smrj
369*ae115bc7Smrj
370*ae115bc7Smrj#if defined(__amd64)
371*ae115bc7Smrj	/*
372*ae115bc7Smrj	 * transition back to 64 bit mode
373*ae115bc7Smrj	 */
374*ae115bc7Smrj	pushl	$B64CODE_SEL
375*ae115bc7Smrj	pushl	$longmode
376*ae115bc7Smrj	lret
377*ae115bc7Smrjlongmode:
378*ae115bc7Smrj	.code64
379*ae115bc7Smrj#endif
380*ae115bc7Smrj	/*
381*ae115bc7Smrj	 * restore caller frame pointer and segment registers
382*ae115bc7Smrj	 */
383*ae115bc7Smrj	lgdt	save_gdt
384*ae115bc7Smrj	lidt	save_idt
385*ae115bc7Smrj
386*ae115bc7Smrj	/*
387*ae115bc7Smrj	 * Before loading the task register we need to reset the busy bit
388*ae115bc7Smrj	 * in its corresponding GDT selector. The busy bit is the 2nd bit in
389*ae115bc7Smrj	 * the 5th byte of the selector.
390*ae115bc7Smrj	 */
391*ae115bc7Smrj#if defined(__i386)
392*ae115bc7Smrj	movzwl	save_tr, %eax
393*ae115bc7Smrj	addl	save_gdt+2, %eax
394*ae115bc7Smrj	btcl	$1, 5(%eax)
395*ae115bc7Smrj#elif defined(__amd64)
396*ae115bc7Smrj	movzwq	save_tr, %rax
397*ae115bc7Smrj	addq	save_gdt+2, %rax
398*ae115bc7Smrj	btcl	$1, 5(%rax)
399*ae115bc7Smrj#endif
400*ae115bc7Smrj	ltr	save_tr
401*ae115bc7Smrj	movw	save_ds, %ds
402*ae115bc7Smrj	movw	save_ss, %ss
403*ae115bc7Smrj	movw	save_es, %es
404*ae115bc7Smrj	movw	save_fs, %fs
405*ae115bc7Smrj	movw	save_gs, %gs
406*ae115bc7Smrj
407*ae115bc7Smrj#if defined(__i386)
408*ae115bc7Smrj	pushl	save_cs
409*ae115bc7Smrj	pushl	$.newcs
410*ae115bc7Smrj	lret
411*ae115bc7Smrj#elif defined(__amd64)
412*ae115bc7Smrj	pushq	save_cs
413*ae115bc7Smrj	pushq	$.newcs
414*ae115bc7Smrj	lretq
415*ae115bc7Smrj#endif
416*ae115bc7Smrj.newcs:
417*ae115bc7Smrj
418*ae115bc7Smrj#if defined(__amd64)
419*ae115bc7Smrj	/*
420*ae115bc7Smrj	 * restore the hidden kernel segment base register values
421*ae115bc7Smrj	 */
422*ae115bc7Smrj	movl	save_fsbase, %eax
423*ae115bc7Smrj	movl	save_fsbase + 4, %edx
424*ae115bc7Smrj	movl	$MSR_AMD_FSBASE, %ecx
425*ae115bc7Smrj	wrmsr
426*ae115bc7Smrj
427*ae115bc7Smrj	movl	save_gsbase, %eax
428*ae115bc7Smrj	movl	save_gsbase + 4, %edx
429*ae115bc7Smrj	movl	$MSR_AMD_GSBASE, %ecx
430*ae115bc7Smrj	wrmsr
431*ae115bc7Smrj
432*ae115bc7Smrj	movl	save_kgsbase, %eax
433*ae115bc7Smrj	movl	save_kgsbase + 4, %edx
434*ae115bc7Smrj	movl	$MSR_AMD_KGSBASE, %ecx
435*ae115bc7Smrj	wrmsr
436*ae115bc7Smrj
437*ae115bc7Smrj	movq	save_cr8, %rax
438*ae115bc7Smrj	cmpq	$0, %rax
439*ae115bc7Smrj	je	1f
440*ae115bc7Smrj	movq	%rax, %cr8
441*ae115bc7Smrj1:
442*ae115bc7Smrj#endif
443*ae115bc7Smrj
444*ae115bc7Smrj	/*
445*ae115bc7Smrj	 * copy results to caller's location, then restore remaining registers
446*ae115bc7Smrj	 */
447*ae115bc7Smrj#if defined(__i386)
448*ae115bc7Smrj	movl    save_esp, %edi
449*ae115bc7Smrj	movl	8(%edi), %edi
450*ae115bc7Smrj	movl	%esp, %esi
451*ae115bc7Smrj	movl	$18, %ecx
452*ae115bc7Smrj	rep
453*ae115bc7Smrj	movsb
454*ae115bc7Smrj	movw	18(%esp), %ax
455*ae115bc7Smrj	andl	$0xffff, %eax
456*ae115bc7Smrj	movl    save_ebx, %ebx
457*ae115bc7Smrj	movl    save_esi, %esi
458*ae115bc7Smrj	movl    save_edi, %edi
459*ae115bc7Smrj	movl    save_esp, %esp
460*ae115bc7Smrj	movl    save_ebp, %ebp
461*ae115bc7Smrj	movl    save_esp, %esp
462*ae115bc7Smrj	ret
463*ae115bc7Smrj
464*ae115bc7Smrj#elif defined(__amd64)
465*ae115bc7Smrj	movq    save_rsi, %rdi
466*ae115bc7Smrj	movq	%rsp, %rsi
467*ae115bc7Smrj	movq	$18, %rcx
468*ae115bc7Smrj	rep
469*ae115bc7Smrj	movsb
470*ae115bc7Smrj	movw	18(%rsp), %ax
471*ae115bc7Smrj	andq	$0xffff, %rax
472*ae115bc7Smrj	movq    save_r12, %r12
473*ae115bc7Smrj	movq    save_r13, %r13
474*ae115bc7Smrj	movq    save_r14, %r14
475*ae115bc7Smrj	movq    save_r15, %r15
476*ae115bc7Smrj	movq    save_rbx, %rbx
477*ae115bc7Smrj	movq    save_rbp, %rbp
478*ae115bc7Smrj	movq    save_rsp, %rsp
479*ae115bc7Smrj	ret
480*ae115bc7Smrj
481*ae115bc7Smrj#endif
482*ae115bc7Smrj
483*ae115bc7Smrj
484*ae115bc7Smrj/*
485*ae115bc7Smrj * Caller's registers to restore
486*ae115bc7Smrj */
487*ae115bc7Smrj	.align 4
488*ae115bc7Smrjsave_esi:
489*ae115bc7Smrj	.long	0
490*ae115bc7Smrjsave_edi:
491*ae115bc7Smrj	.long	0
492*ae115bc7Smrjsave_ebx:
493*ae115bc7Smrj	.long	0
494*ae115bc7Smrjsave_ebp:
495*ae115bc7Smrj	.long	0
496*ae115bc7Smrjsave_esp:
497*ae115bc7Smrj	.long	0
498*ae115bc7Smrj
499*ae115bc7Smrj	.align 8
500*ae115bc7Smrj#if defined(__amd64)
501*ae115bc7Smrjsave_rsi:
502*ae115bc7Smrj	.quad	0
503*ae115bc7Smrjsave_rbx:
504*ae115bc7Smrj	.quad	0
505*ae115bc7Smrjsave_rbp:
506*ae115bc7Smrj	.quad	0
507*ae115bc7Smrjsave_rsp:
508*ae115bc7Smrj	.quad	0
509*ae115bc7Smrjsave_r12:
510*ae115bc7Smrj	.quad	0
511*ae115bc7Smrjsave_r13:
512*ae115bc7Smrj	.quad	0
513*ae115bc7Smrjsave_r14:
514*ae115bc7Smrj	.quad	0
515*ae115bc7Smrjsave_r15:
516*ae115bc7Smrj	.quad	0
517*ae115bc7Smrjsave_kgsbase:
518*ae115bc7Smrj	.quad	0
519*ae115bc7Smrjsave_gsbase:
520*ae115bc7Smrj	.quad	0
521*ae115bc7Smrjsave_fsbase:
522*ae115bc7Smrj	.quad	0
523*ae115bc7Smrjsave_cr8:
524*ae115bc7Smrj	.quad	0
525*ae115bc7Smrj#endif	/* __amd64 */
526*ae115bc7Smrj
527*ae115bc7Smrjsave_idt:
528*ae115bc7Smrj	.quad	0
529*ae115bc7Smrj	.quad	0
530*ae115bc7Smrj
531*ae115bc7Smrjsave_gdt:
532*ae115bc7Smrj	.quad	0
533*ae115bc7Smrj	.quad	0
534*ae115bc7Smrj
535*ae115bc7Smrjsave_cr0:
536*ae115bc7Smrj	.quad	0
537*ae115bc7Smrjsave_cr3:
538*ae115bc7Smrj	.quad	0
539*ae115bc7Smrjsave_cr4:
540*ae115bc7Smrj	.quad	0
541*ae115bc7Smrjsave_cs:
542*ae115bc7Smrj	.quad	0
543*ae115bc7Smrjsave_ss:
544*ae115bc7Smrj	.value	0
545*ae115bc7Smrjsave_ds:
546*ae115bc7Smrj	.value	0
547*ae115bc7Smrjsave_es:
548*ae115bc7Smrj	.value	0
549*ae115bc7Smrjsave_fs:
550*ae115bc7Smrj	.value	0
551*ae115bc7Smrjsave_gs:
552*ae115bc7Smrj	.value	0
553*ae115bc7Smrjsave_tr:
554*ae115bc7Smrj	.value	0
555*ae115bc7Smrj
556*ae115bc7Smrjidt_info:
557*ae115bc7Smrj	.value 0x3ff
558*ae115bc7Smrj	.quad 0
559*ae115bc7Smrj
560*ae115bc7Smrj
561*ae115bc7Smrj/*
562*ae115bc7Smrj * We need to trampoline thru a gdt we have in low memory.
563*ae115bc7Smrj */
564*ae115bc7Smrj#include "../boot/boot_gdt.s"
565*ae115bc7Smrj#endif /* __lint */
566