blob: 790ee51ab4171f01646df708f7ed4786ac50e35e [file] [log] [blame]
# TITLE CpuAsm.asm:
#------------------------------------------------------------------------------
#*
#* Copyright 2008 - 2011, Intel Corporation
#* All rights reserved. This program and the accompanying materials
#* are licensed and made available under the terms and conditions of the BSD License
#* which accompanies this distribution. The full text of the license may be found at
#* http://opensource.org/licenses/bsd-license.php
#*
#* THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
#* WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#*
#* CpuAsm.S
#*
#* Abstract:
#*
#------------------------------------------------------------------------------
#text SEGMENT
#EXTRN ASM_PFX(mErrorCodeFlag):DWORD # Error code flags for exceptions
#
# point to the external interrupt vector table
#
ExternalVectorTablePtr:
.byte 0, 0, 0, 0, 0, 0, 0, 0
.intel_syntax
ASM_GLOBAL ASM_PFX(InitializeExternalVectorTablePtr)
ASM_PFX(InitializeExternalVectorTablePtr):
lea %rax, [%rip+ExternalVectorTablePtr] # save vector number
mov [%rax], %rcx
ret
#------------------------------------------------------------------------------
# VOID
# SetCodeSelector (
# UINT16 Selector
# );
#------------------------------------------------------------------------------
.intel_syntax
ASM_GLOBAL ASM_PFX(SetCodeSelector)
ASM_PFX(SetCodeSelector):
sub %rsp, 0x10
lea %rax, [%rip+setCodeSelectorLongJump]
mov [%rsp], %rax
mov [%rsp+4], %cx
jmp fword ptr [%rsp]
setCodeSelectorLongJump:
add %rsp, 0x10
ret
#------------------------------------------------------------------------------
# VOID
# SetDataSelectors (
# UINT16 Selector
# );
#------------------------------------------------------------------------------
.intel_syntax
ASM_GLOBAL ASM_PFX(SetDataSelectors)
ASM_PFX(SetDataSelectors):
mov %ss, %cx
mov %ds, %cx
mov %es, %cx
mov %fs, %cx
mov %gs, %cx
ret
#---------------------------------------;
# CommonInterruptEntry ;
#---------------------------------------;
# The follow algorithm is used for the common interrupt routine.
.intel_syntax
ASM_GLOBAL ASM_PFX(CommonInterruptEntry)
ASM_PFX(CommonInterruptEntry):
cli
#
# All interrupt handlers are invoked through interrupt gates, so
# IF flag automatically cleared at the entry point
#
#
# Calculate vector number
#
xchg %rcx, [%rsp] # get the return address of call, actually, it is the address of vector number.
movzx %ecx, word ptr [%rcx]
cmp %ecx, 32 # Intel reserved vector for exceptions?
jae NoErrorCode
push %rax
lea %rax, [%rip+ASM_PFX(mErrorCodeFlag)]
bt dword ptr [%rax], %ecx
pop %rax
jc CommonInterruptEntry_al_0000
NoErrorCode:
#
# Push a dummy error code on the stack
# to maintain coherent stack map
#
push [%rsp]
mov qword ptr [%rsp + 8], 0
CommonInterruptEntry_al_0000:
push %rbp
mov %rbp, %rsp
#
# Stack:
# +---------------------+ <-- 16-byte aligned ensured by processor
# + Old SS +
# +---------------------+
# + Old RSP +
# +---------------------+
# + RFlags +
# +---------------------+
# + CS +
# +---------------------+
# + RIP +
# +---------------------+
# + Error Code +
# +---------------------+
# + RCX / Vector Number +
# +---------------------+
# + RBP +
# +---------------------+ <-- RBP, 16-byte aligned
#
#
# Since here the stack pointer is 16-byte aligned, so
# EFI_FX_SAVE_STATE_X64 of EFI_SYSTEM_CONTEXT_x64
# is 16-byte aligned
#
#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
push %r15
push %r14
push %r13
push %r12
push %r11
push %r10
push %r9
push %r8
push %rax
push qword ptr [%rbp + 8] # RCX
push %rdx
push %rbx
push qword ptr [%rbp + 48] # RSP
push qword ptr [%rbp] # RBP
push %rsi
push %rdi
#; UINT64 Gs, Fs, Es, Ds, Cs, Ss; insure high 16 bits of each is zero
movzx %rax, word ptr [%rbp + 56]
push %rax # for ss
movzx %rax, word ptr [%rbp + 32]
push %rax # for cs
mov %rax, %ds
push %rax
mov %rax, %es
push %rax
mov %rax, %fs
push %rax
mov %rax, %gs
push %rax
mov [%rbp + 8], %rcx # save vector number
#; UINT64 Rip;
push qword ptr [%rbp + 24]
#; UINT64 Gdtr[2], Idtr[2];
xor %rax, %rax
push %rax
push %rax
sidt [%rsp]
xchg %rax, [%rsp + 2]
xchg %rax, [%rsp]
xchg %rax, [%rsp + 8]
xor %rax, %rax
push %rax
push %rax
sgdt [%rsp]
xchg %rax, [%rsp + 2]
xchg %rax, [%rsp]
xchg %rax, [%rsp + 8]
#; UINT64 Ldtr, Tr;
xor %rax, %rax
str %ax
push %rax
sldt %ax
push %rax
#; UINT64 RFlags;
push qword ptr [%rbp + 40]
#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
mov %rax, %cr8
push %rax
mov %rax, %cr4
or %rax, 0x208
mov %cr4, %rax
push %rax
mov %rax, %cr3
push %rax
mov %rax, %cr2
push %rax
xor %rax, %rax
push %rax
mov %rax, %cr0
push %rax
#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
mov %rax, %dr7
push %rax
#; clear Dr7 while executing debugger itself
xor %rax, %rax
mov %dr7, %rax
mov %rax, %dr6
push %rax
#; insure all status bits in dr6 are clear...
xor %rax, %rax
mov %dr6, %rax
mov %rax, %dr3
push %rax
mov %rax, %dr2
push %rax
mov %rax, %dr1
push %rax
mov %rax, %dr0
push %rax
#; FX_SAVE_STATE_X64 FxSaveState;
sub %rsp, 512
mov %rdi, %rsp
.byte 0x0f, 0x0ae, 0x07 #fxsave [rdi]
#; UEFI calling convention for x64 requires that Direction flag in EFLAGs is clear
cld
#; UINT32 ExceptionData;
push qword ptr [%rbp + 16]
#; call into exception handler
mov %rcx, [%rbp + 8]
lea %rax, [%rip+ExternalVectorTablePtr]
mov %eax, [%eax]
mov %rax, [%rax + %rcx * 8]
or %rax, %rax # NULL?
je nonNullValue#
#; Prepare parameter and call
# mov rcx, [rbp + 8]
mov %rdx, %rsp
#
# Per X64 calling convention, allocate maximum parameter stack space
# and make sure RSP is 16-byte aligned
#
sub %rsp, 4 * 8 + 8
call %rax
add %rsp, 4 * 8 + 8
nonNullValue:
cli
#; UINT64 ExceptionData;
add %rsp, 8
#; FX_SAVE_STATE_X64 FxSaveState;
mov %rsi, %rsp
.byte 0x0f, 0x0ae, 0x0E # fxrstor [rsi]
add %rsp, 512
#; UINT64 Dr0, Dr1, Dr2, Dr3, Dr6, Dr7;
pop %rax
mov %dr0, %rax
pop %rax
mov %dr1, %rax
pop %rax
mov %dr2, %rax
pop %rax
mov %dr3, %rax
#; skip restore of dr6. We cleared dr6 during the context save.
add %rsp, 8
pop %rax
mov %dr7, %rax
#; UINT64 Cr0, Cr1, Cr2, Cr3, Cr4, Cr8;
pop %rax
mov %cr0, %rax
add %rsp, 8 # not for Cr1
pop %rax
mov %cr2, %rax
pop %rax
mov %cr3, %rax
pop %rax
mov %cr4, %rax
pop %rax
mov %cr8, %rax
#; UINT64 RFlags;
pop qword ptr [%rbp + 40]
#; UINT64 Ldtr, Tr;
#; UINT64 Gdtr[2], Idtr[2];
#; Best not let anyone mess with these particular registers...
add %rsp, 48
#; UINT64 Rip;
pop qword ptr [%rbp + 24]
#; UINT64 Gs, Fs, Es, Ds, Cs, Ss;
pop %rax
# mov gs, rax ; not for gs
pop %rax
# mov fs, rax ; not for fs
# (X64 will not use fs and gs, so we do not restore it)
pop %rax
mov %es, %rax
pop %rax
mov %ds, %rax
pop qword ptr [%rbp + 32] # for cs
pop qword ptr [%rbp + 56] # for ss
#; UINT64 Rdi, Rsi, Rbp, Rsp, Rbx, Rdx, Rcx, Rax;
#; UINT64 R8, R9, R10, R11, R12, R13, R14, R15;
pop %rdi
pop %rsi
add %rsp, 8 # not for rbp
pop qword ptr [%rbp + 48] # for rsp
pop %rbx
pop %rdx
pop %rcx
pop %rax
pop %r8
pop %r9
pop %r10
pop %r11
pop %r12
pop %r13
pop %r14
pop %r15
mov %rsp, %rbp
pop %rbp
add %rsp, 16
iretq
#text ENDS
#END