Improvements for Apple?

This commit is contained in:
Vladimir 'phcoder' Serbinenko 2009-08-05 19:58:05 +02:00
parent 5c29f4d962
commit b131c45455
4 changed files with 87 additions and 66 deletions

View file

@ -0,0 +1,30 @@
/* memory.h - describe the memory map */
/*
* GRUB -- GRand Unified Bootloader
* Copyright (C) 2002,2007,2008 Free Software Foundation, Inc.
*
* GRUB is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* GRUB is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GRUB. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef GRUB_MEMORY_CPU_HEADER
#define GRUB_MEMORY_CPU_HEADER 1
/* The flag for protected mode. */
#define GRUB_MEMORY_CPU_CR0_PE_ON 0x1
#define GRUB_MEMORY_CPU_CR4_PAE_ON 0x00000040
#define GRUB_MEMORY_CPU_CR0_PAGING_ON 0x80000000
#define GRUB_MEMORY_CPU_AMD64_MSR 0xc0000080
#define GRUB_MEMORY_CPU_AMD64_MSR_ON 0x00000100
#endif /* ! GRUB_MEMORY_CPU_HEADER */

View file

@ -65,6 +65,18 @@
/* The address where another boot loader is loaded. */
#define GRUB_MEMORY_MACHINE_BOOT_LOADER_ADDR 0x7c00
/* The code segment of the protected mode. */
#define GRUB_MEMORY_MACHINE_PROT_MODE_CSEG 0x8
/* The data segment of the protected mode. */
#define GRUB_MEMORY_MACHINE_PROT_MODE_DSEG 0x10
/* The code segment of the pseudo real mode. */
#define GRUB_MEMORY_MACHINE_PSEUDO_REAL_CSEG 0x18
/* The data segment of the pseudo real mode. */
#define GRUB_MEMORY_MACHINE_PSEUDO_REAL_DSEG 0x20
#ifndef ASM_FILE
struct grub_machine_mmap_entry

View file

@ -127,7 +127,7 @@ real_to_prot:
/* turn on protected mode */
movl %cr0, %eax
orl $GRUB_MEMORY_MACHINE_CR0_PE_ON, %eax
orl $GRUB_MEMORY_CPU_CR0_PE_ON, %eax
movl %eax, %cr0
/* jump to relocation, flush prefetch queue, and reload %cs */
@ -196,7 +196,7 @@ tmpcseg:
/* clear the PE bit of CR0 */
movl %cr0, %eax
andl $(~GRUB_MEMORY_MACHINE_CR0_PE_ON), %eax
andl $(~GRUB_MEMORY_CPU_CR0_PE_ON), %eax
movl %eax, %cr0
/* flush prefetch queue, reload %cs */

View file

@ -27,30 +27,28 @@
#ifdef __x86_64__
#define RAX %rax
#define RCX %rcx
#define RDX %rdx
#define RDI %rdi
#define RSI %rdi
#else
#define RAX %eax
#define RCX %ecx
#define RDX %edx
#define RDI %edi
#define RSI %esi
#endif
/* Apple's linker has a problem with 64-bit relocations. */
#if defined (__apple__) || ! defined (__x86_64__)
#define RSIA %esi
#define RAXA %eax
#else
#define RSIA %rsi
#define RAXA %rax
#endif
/* The code segment of the protected mode. */
#define CODE_SEGMENT 0x10
/* The data segment of the protected mode. */
#define DATA_SEGMENT 0x18
.p2align 4 /* force 16-byte alignment */
RELOCATOR_VARIABLE(start)
#ifdef BACKWARD
base:
L_base:
#endif
cli
@ -105,14 +103,14 @@ RELOCATOR_VARIABLE(size)
#ifndef BACKWARD
add RCX, RAX
#endif
addq $0x3, RCX
shrq $2, RCX
add $0x3, RCX
shr $2, RCX
#ifdef BACKWARD
/* Backward movsl is implicitly off-by-four. compensate that. */
subq $4, RSI
subq $4, RDI
sub $4, RSI
sub $4, RDI
/* Backward copy. */
std
@ -129,27 +127,27 @@ RELOCATOR_VARIABLE(size)
/* %rax contains now our new 'base'. */
mov RAX, RSI
add $(cont0 - base), RAXA
add $(L_cont0 - L_base), RAX
jmp *RAX
cont0:
lea (cont1 - base) (RSIA, 1), RAXA
movl %eax, (jump_vector - base) (RSIA, 1)
L_cont0:
lea (L_cont1 - L_base) (RSI, 1), RAX
movl %eax, (L_jump_vector - L_base) (RSI, 1)
lea (gdt - base) (RSIA, 1), RAXA
mov RAXA, (gdt_addr - base) (RSIA, 1)
lea (L_gdt - L_base) (RSI, 1), RAX
mov RAX, (L_gdt_addr - L_base) (RSI, 1)
/* Switch to compatibility mode. */
lgdt (gdtdesc - base) (RSIA, 1)
lgdt (L_gdtdesc - L_base) (RSI, 1)
/* Update %cs. Thanks to David Miller for pointing this mistake out. */
ljmp *(jump_vector - base) (RSIA, 1)
ljmp *(L_jump_vector - L_base) (RSI, 1)
cont1:
L_cont1:
.code32
/* Update other registers. */
movl $GRUB_MEMORY_MACHINE_PROT_MODE_DSEG, %eax
movl $DATA_SEGMENT, %eax
movl %eax, %ds
movl %eax, %es
movl %eax, %fs
@ -158,22 +156,22 @@ cont1:
/* Disable paging. */
movl %cr0, %eax
andl $0x7fffffff, %eax
andl $(~GRUB_MEMORY_CPU_CR0_PAGING_ON), %eax
movl %eax, %cr0
/* Disable amd64. */
movl $0xc0000080, %ecx
movl $GRUB_MEMORY_CPU_AMD64_MSR, %ecx
rdmsr
andl $0xfffffeff, %eax
andl $(~GRUB_MEMORY_CPU_AMD64_MSR_ON), %eax
wrmsr
/* Turn off PAE. */
movl %cr4, %eax
andl $0xffffffcf, %eax
andl $GRUB_MEMORY_CPU_CR4_PAE_ON, %eax
movl %eax, %cr4
jmp cont2
cont2:
jmp L_cont2
L_cont2:
.code32
/* mov imm32, %eax */
@ -212,44 +210,25 @@ RELOCATOR_VARIABLE (eip)
.long 0
.word 0x08
/* GDT. The same as is used in 32-bit GRUB. */
/* GDT. Copied from loader/i386/linux.c. */
.p2align 4
gdt:
.word 0, 0
.byte 0, 0, 0, 0
L_gdt:
/* NULL. */
.byte 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
/* -- code segment --
* base = 0x00000000, limit = 0xFFFFF (4 KiB Granularity), present
* type = 32bit code execute/read, DPL = 0
*/
.word 0xFFFF, 0
.byte 0, 0x9A, 0xCF, 0
/* Reserved. */
.byte 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
/* -- data segment --
* base = 0x00000000, limit 0xFFFFF (4 KiB Granularity), present
* type = 32 bit data read/write, DPL = 0
*/
.word 0xFFFF, 0
.byte 0, 0x92, 0xCF, 0
/* Code segment. */
.byte 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x9A, 0xCF, 0x00
/* -- 16 bit real mode CS --
* base = 0x00000000, limit 0x0FFFF (1 B Granularity), present
* type = 16 bit code execute/read only/conforming, DPL = 0
*/
.word 0xFFFF, 0
.byte 0, 0x9E, 0, 0
/* -- 16 bit real mode DS --
* base = 0x00000000, limit 0x0FFFF (1 B Granularity), present
* type = 16 bit data read/write, DPL = 0
*/
.word 0xFFFF, 0
.byte 0, 0x92, 0, 0
/* Data segment. */
.byte 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x92, 0xCF, 0x00
.p2align 4
gdtdesc:
L_gdtdesc:
.word 0x27
gdt_addr:
L_gdt_addr:
#ifdef __x86_64__
/* Filled by the code. */
.quad 0
@ -259,13 +238,13 @@ gdt_addr:
#endif
.p2align 4
jump_vector:
L_jump_vector:
/* Jump location. Is filled by the code */
.long 0
.long GRUB_MEMORY_MACHINE_PROT_MODE_CSEG
.long CODE_SEGMENT
#ifndef BACKWARD
base:
L_base:
#endif
RELOCATOR_VARIABLE(end)