x86: fix section mismatch in head_64.S:initial_code
[sfrench/cifs-2.6.git] / arch / x86 / kernel / head_64.S
index b6167fe3330e22aec72abc9f534de07d19fcb614..eb415043a9297d742f3df5d9a1de10fa35d0b69b 100644 (file)
 #include <asm/msr.h>
 #include <asm/cache.h>
 
+#ifdef CONFIG_PARAVIRT
+#include <asm/asm-offsets.h>
+#include <asm/paravirt.h>
+#else
+#define GET_CR2_INTO_RCX movq %cr2, %rcx
+#endif
+
 /* we are not able to switch in one step to the final KERNEL ADRESS SPACE
  * because we need identity-mapped pages.
  *
@@ -56,7 +63,7 @@ startup_64:
 
        /* Is the address not 2M aligned? */
        movq    %rbp, %rax
-       andl    $~LARGE_PAGE_MASK, %eax
+       andl    $~PMD_PAGE_MASK, %eax
        testl   %eax, %eax
        jnz     bad_address
 
@@ -81,7 +88,7 @@ startup_64:
 
        /* Add an Identity mapping if I am above 1G */
        leaq    _text(%rip), %rdi
-       andq    $LARGE_PAGE_MASK, %rdi
+       andq    $PMD_PAGE_MASK, %rdi
 
        movq    %rdi, %rax
        shrq    $PUD_SHIFT, %rax
@@ -100,8 +107,13 @@ startup_64:
        movq    %rdx, 0(%rbx, %rax, 8)
 ident_complete:
 
-       /* Fixup the kernel text+data virtual addresses
+       /*
+        * Fixup the kernel text+data virtual addresses. Note that
+        * we might write invalid pmds, when the kernel is relocated
+        * cleanup_highmap() fixes this up along with the mappings
+        * beyond _end.
         */
+
        leaq    level2_kernel_pgt(%rip), %rdi
        leaq    4096(%rdi), %r8
        /* See if it is a valid page table entry */
@@ -243,31 +255,55 @@ ENTRY(secondary_startup_64)
        lretq
 
        /* SMP bootup changes these two */
-#ifndef CONFIG_HOTPLUG_CPU
-       .pushsection .init.data
-#endif
+       __REFDATA
        .align  8
-       .globl  initial_code
-initial_code:
+       ENTRY(initial_code)
        .quad   x86_64_start_kernel
-#ifndef CONFIG_HOTPLUG_CPU
-       .popsection
-#endif
-       .globl init_rsp
-init_rsp:
+       __FINITDATA
+
+       ENTRY(init_rsp)
        .quad  init_thread_union+THREAD_SIZE-8
 
 bad_address:
        jmp bad_address
 
+#ifdef CONFIG_EARLY_PRINTK
+.macro early_idt_tramp first, last
+       .ifgt \last-\first
+       early_idt_tramp \first, \last-1
+       .endif
+       movl $\last,%esi
+       jmp early_idt_handler
+.endm
+
+       .globl early_idt_handlers
+early_idt_handlers:
+       early_idt_tramp 0, 63
+       early_idt_tramp 64, 127
+       early_idt_tramp 128, 191
+       early_idt_tramp 192, 255
+#endif
+
 ENTRY(early_idt_handler)
+#ifdef CONFIG_EARLY_PRINTK
        cmpl $2,early_recursion_flag(%rip)
        jz  1f
        incl early_recursion_flag(%rip)
+       GET_CR2_INTO_RCX
+       movq %rcx,%r9
+       xorl %r8d,%r8d          # zero for error code
+       movl %esi,%ecx          # get vector number
+       # Test %ecx against mask of vectors that push error code.
+       cmpl $31,%ecx
+       ja 0f
+       movl $1,%eax
+       salq %cl,%rax
+       testl $0x27d00,%eax
+       je 0f
+       popq %r8                # get error code
+0:     movq 0(%rsp),%rcx       # get ip
+       movq 8(%rsp),%rdx       # get cs
        xorl %eax,%eax
-       movq 8(%rsp),%rsi       # get rip
-       movq (%rsp),%rdx
-       movq %cr2,%rcx
        leaq early_idt_msg(%rip),%rdi
        call early_printk
        cmpl $2,early_recursion_flag(%rip)
@@ -278,15 +314,19 @@ ENTRY(early_idt_handler)
        movq 8(%rsp),%rsi       # get rip again
        call __print_symbol
 #endif
+#endif /* EARLY_PRINTK */
 1:     hlt
        jmp 1b
+
+#ifdef CONFIG_EARLY_PRINTK
 early_recursion_flag:
        .long 0
 
 early_idt_msg:
-       .asciz "PANIC: early exception rip %lx error %lx cr2 %lx\n"
+       .asciz "PANIC: early exception %02lx rip %lx:%lx error %lx cr2 %lx\n"
 early_idt_ripmsg:
        .asciz "RIP %s\n"
+#endif /* CONFIG_EARLY_PRINTK */
 
 .balign PAGE_SIZE