.code64 .globl bios.regs bios.regs: reax: .int 0x0 rebx: .int 0x0 recx: .int 0x0 redx: .int 0x0 redi: .int 0x0 resi: .int 0x0 res: .short 0x0 prev_idt: .quad 0x0 prev_gdt: .quad 0x0 .short 0x0 # :The 64 bits sidt stores on 8+2 bytes # :real_call # This function is intended to be called from long mode # and calls another function in real mode. Note that precautions have # to be made. Recall that real mode can only access below 0xFFFFF. # So the target function, and objects of arguments passed to it cannot # be located above that. A reasonable scheme is to put the result # in a buffer and then, once back in long mode, copy its content to higher # above. # The procedure to go from Long Mode back to Real Mode is explained in much details # in the AMD64 Programmer's Manual Vol. 2, in particular, the figure 1-6. in section 1.3. .globl bios.call bios.call: xchg %bx, %bx push %rbx push %r12 push %r13 push %r14 push %r15 pushf # :Save code segment (and second push to prepare for far return) mov %cs, %ax push %ax push %ax # :Save data segment mov %ds, %ax push %ax # :Save the interrupt number push %di cli #sidt (prev_idt) sgdt (prev_gdt) lgdt (gdtr32) pushq $(gdt32_code - gdt32) pushq $real_call_to_pmode_down retfq .code32 real_call_to_pmode_down: # :Here, we are in compatibility mode (32 bits) # :Disable paging mov %cr0, %eax and $~(1 << 31), %eax mov %eax, %cr0 # :Disabling long mode in msr mov $0xc0000080, %ecx rdmsr and $~(1 << 8), %eax wrmsr # :Here we are in true protected mode (32 bits) # Let's continue our descent to real mode # by switching our gdt to 16 bits lgdt gdtr16 ljmp $(gdt16_code - gdt16), $real_call_to_16bits_pmode_down .code16 real_call_to_16bits_pmode_down: # :Disable protected mode mov %cr0, %eax and $~1, %eax mov %eax, %cr0 ljmp $0, $real_call_to_16bits_rmode_down real_call_to_16bits_rmode_down: mov $0, %ax mov %ax, %ds mov %ax, %es mov %ax, %ss mov %ax, %gs mov %ax, %fs lidt bios_idtr # :Self modifying code to call arbitrary interrupts pop %ax mov $real_call_int, %bx mov %al, 1(%bx) # :Load registers mov (reax), %eax mov (rebx), %ebx mov (recx), %ecx mov (redx), %edx mov (redi), %edi mov (resi), %esi mov (res), %es sti real_call_int: int $0x0 cli xchg %bx, %bx mov %es, (res) mov %eax, (reax) mov %ebx, (rebx) mov %ecx, (recx) mov %edx, (redx) mov %edi, (redi) mov %esi, (resi) # :Restore protected mode mov %cr0, %eax or $1, %eax mov %eax, %cr0 lgdt gdtr32 ljmp $(gdt32_code - gdt32), $real_call_to_pmode_up .code32 real_call_to_pmode_up: # :Restore PAE (probably unneeded) mov %cr4, %eax or $(1 << 5), %eax mov %eax, %cr4 # :Restore long mode mov $0xc0000080, %ecx rdmsr or $(1 << 8), %eax wrmsr # :Restore paging mov %cr0, %eax or $1 << 31, %eax mov %eax, %cr0 # :At this point %ds is latest in stack pop %ax # :At this point %cs is latest in stack # Do a long jump push $real_call_to_longmode_up lgdt (prev_gdt) retf .code64 real_call_to_longmode_up: mov %ax, %fs mov %ax, %gs mov %ax, %ss mov %ax, %es mov %ax, %ds # :Avoid doing this until the bootloader loads a 64 bits IDT # XXX # lidt (prev_idt) real_call_end: popf pop %r15 pop %r14 pop %r13 pop %r12 pop %rbx cli ret