.code32
.text
#define _pa(x) ((x) - __START_KERNEL_map)
#define rva(x) ((x) - pvh_start_xen)
#include <linux/elfnote.h>
#include <linux/init.h>
#include <linux/linkage.h>
#include <asm/desc_defs.h>
#include <asm/segment.h>
#include <asm/asm.h>
#include <asm/boot.h>
#include <asm/pgtable.h>
#include <asm/processor-flags.h>
#include <asm/msr.h>
#include <asm/nospec-branch.h>
#include <xen/interface/elfnote.h>
__HEAD
#define PVH_GDT_ENTRY_CS 1
#define PVH_GDT_ENTRY_DS 2
#define PVH_CS_SEL (PVH_GDT_ENTRY_CS * 8)
#define PVH_DS_SEL (PVH_GDT_ENTRY_DS * 8)
SYM_CODE_START_LOCAL(pvh_start_xen)
UNWIND_HINT_END_OF_STACK
cld
mov (%ebx), %eax
leal 4(%ebx), %esp
ANNOTATE_INTRA_FUNCTION_CALL
call 1f
1: popl %ebp
mov %eax, (%ebx)
subl $rva(1b), %ebp
movl $0, %esp
leal rva(gdt)(%ebp), %eax
leal rva(gdt_start)(%ebp), %ecx
movl %ecx, 2(%eax)
lgdt (%eax)
mov $PVH_DS_SEL,%eax
mov %eax,%ds
mov %eax,%es
mov %eax,%ss
leal rva(pvh_start_info)(%ebp), %edi
mov %ebx, %esi
movl rva(pvh_start_info_sz)(%ebp), %ecx
shr $2,%ecx
rep
movsl
leal rva(early_stack_end)(%ebp), %esp
mov %cr4, %eax
orl $X86_CR4_PAE, %eax
mov %eax, %cr4
#ifdef CONFIG_X86_64
mov $MSR_EFER, %ecx
rdmsr
btsl $_EFER_LME, %eax
wrmsr
mov %ebp, %ebx
subl $_pa(pvh_start_xen), %ebx
jz .Lpagetable_done
leal rva(pvh_init_top_pgt)(%ebp), %edi
movl $PTRS_PER_PGD, %ecx
2:
testl $_PAGE_PRESENT, 0x00(%edi)
jz 1f
addl %ebx, 0x00(%edi)
1:
addl $8, %edi
decl %ecx
jnz 2b
leal rva(pvh_level3_ident_pgt)(%ebp), %edi
addl %ebx, 0x00(%edi)
leal rva(pvh_level3_kernel_pgt)(%ebp), %edi
addl %ebx, (PAGE_SIZE - 16)(%edi)
addl %ebx, (PAGE_SIZE - 8)(%edi)
leal rva(pvh_level2_kernel_pgt)(%ebp), %edi
movl $PTRS_PER_PMD, %ecx
2:
testl $_PAGE_PRESENT, 0x00(%edi)
jz 1f
addl %ebx, 0x00(%edi)
1:
addl $8, %edi
decl %ecx
jnz 2b
.Lpagetable_done:
leal rva(pvh_init_top_pgt)(%ebp), %eax
mov %eax, %cr3
mov $(X86_CR0_PG | X86_CR0_PE), %eax
mov %eax, %cr0
pushl $PVH_CS_SEL
leal rva(1f)(%ebp), %eax
pushl %eax
lretl
.code64
1:
UNWIND_HINT_END_OF_STACK
mov $MSR_GS_BASE,%ecx
leal canary(%rip), %eax
xor %edx, %edx
wrmsr
movq %rbp, %rbx
subq $_pa(pvh_start_xen), %rbx
movq %rbx, phys_base(%rip)
call xen_prepare_pvh
xor %rbx, %rbx
movq %rbx, phys_base(%rip)
lea pvh_bootparams(%rip), %rsi
jmp startup_64
#else
call mk_early_pgtbl_32
mov $_pa(initial_page_table), %eax
mov %eax, %cr3
mov %cr0, %eax
or $(X86_CR0_PG | X86_CR0_PE), %eax
mov %eax, %cr0
ljmp $PVH_CS_SEL, $1f
1:
call xen_prepare_pvh
mov $_pa(pvh_bootparams), %esi
ljmp $PVH_CS_SEL, $_pa(2f)
2:
mov %cr0, %eax
and $~X86_CR0_PG, %eax
mov %eax, %cr0
mov %cr4, %eax
and $~X86_CR4_PAE, %eax
mov %eax, %cr4
ljmp $PVH_CS_SEL, $_pa(startup_32)
#endif
SYM_CODE_END(pvh_start_xen)
.section ".init.data","aw"
.balign 8
SYM_DATA_START_LOCAL(gdt)
.word gdt_end - gdt_start
.long _pa(gdt_start)
.word 0
SYM_DATA_END(gdt)
SYM_DATA_START_LOCAL(gdt_start)
.quad 0x0000000000000000
#ifdef CONFIG_X86_64
.quad GDT_ENTRY(DESC_CODE64, 0, 0xfffff)
#else
.quad GDT_ENTRY(DESC_CODE32, 0, 0xfffff)
#endif
.quad GDT_ENTRY(DESC_DATA32, 0, 0xfffff)
SYM_DATA_END_LABEL(gdt_start, SYM_L_LOCAL, gdt_end)
.balign 16
SYM_DATA_LOCAL(canary, .fill 48, 1, 0)
SYM_DATA_START_LOCAL(early_stack)
.fill BOOT_STACK_SIZE, 1, 0
SYM_DATA_END_LABEL(early_stack, SYM_L_LOCAL, early_stack_end)
#ifdef CONFIG_X86_64
SYM_DATA_START_PAGE_ALIGNED(pvh_init_top_pgt)
.quad pvh_level3_ident_pgt - __START_KERNEL_map + _KERNPG_TABLE_NOENC
.org pvh_init_top_pgt + L4_PAGE_OFFSET * 8, 0
.quad pvh_level3_ident_pgt - __START_KERNEL_map + _KERNPG_TABLE_NOENC
.org pvh_init_top_pgt + L4_START_KERNEL * 8, 0
.quad pvh_level3_kernel_pgt - __START_KERNEL_map + _PAGE_TABLE_NOENC
SYM_DATA_END(pvh_init_top_pgt)
SYM_DATA_START_PAGE_ALIGNED(pvh_level3_ident_pgt)
.quad pvh_level2_ident_pgt - __START_KERNEL_map + _KERNPG_TABLE_NOENC
.fill 511, 8, 0
SYM_DATA_END(pvh_level3_ident_pgt)
SYM_DATA_START_PAGE_ALIGNED(pvh_level2_ident_pgt)
PMDS(0, __PAGE_KERNEL_IDENT_LARGE_EXEC, PTRS_PER_PMD)
SYM_DATA_END(pvh_level2_ident_pgt)
SYM_DATA_START_PAGE_ALIGNED(pvh_level3_kernel_pgt)
.fill L3_START_KERNEL, 8, 0
.quad pvh_level2_kernel_pgt - __START_KERNEL_map + _KERNPG_TABLE_NOENC
.quad 0
SYM_DATA_END(pvh_level3_kernel_pgt)
SYM_DATA_START_PAGE_ALIGNED(pvh_level2_kernel_pgt)
PMDS(0, __PAGE_KERNEL_LARGE_EXEC, KERNEL_IMAGE_SIZE / PMD_SIZE)
SYM_DATA_END(pvh_level2_kernel_pgt)
ELFNOTE(Xen, XEN_ELFNOTE_PHYS32_RELOC,
.long CONFIG_PHYSICAL_ALIGN;
.long LOAD_PHYSICAL_ADDR;
.long KERNEL_IMAGE_SIZE - 1)
#endif
ELFNOTE(Xen, XEN_ELFNOTE_PHYS32_ENTRY,
_ASM_PTR (pvh_start_xen - __START_KERNEL_map))