Merge branch 'perf-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git...
[sfrench/cifs-2.6.git] / arch / x86 / include / asm / jump_label.h
index 8c0de4282659b52079dff43b68affcbed665109f..a5fb34fe56a4bb31f78023ff3d258132ff93ee16 100644 (file)
@@ -2,19 +2,6 @@
 #ifndef _ASM_X86_JUMP_LABEL_H
 #define _ASM_X86_JUMP_LABEL_H
 
-#ifndef HAVE_JUMP_LABEL
-/*
- * For better or for worse, if jump labels (the gcc extension) are missing,
- * then the entire static branch patching infrastructure is compiled out.
- * If that happens, the code in here will malfunction.  Raise a compiler
- * error instead.
- *
- * In theory, jump labels and the static branch patching infrastructure
- * could be decoupled to fix this.
- */
-#error asm/jump_label.h included on a non-jump-label kernel
-#endif
-
 #define JUMP_LABEL_NOP_SIZE 5
 
 #ifdef CONFIG_X86_64
 
 static __always_inline bool arch_static_branch(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:"
-               ".byte " __stringify(STATIC_KEY_INIT_NOP) "\n\t"
-               ".pushsection __jump_table,  \"aw\" \n\t"
-               _ASM_ALIGN "\n\t"
-               _ASM_PTR "1b, %l[l_yes], %c0 + %c1 \n\t"
-               ".popsection \n\t"
-               : :  "i" (key), "i" (branch) : : l_yes);
-
+       asm_volatile_goto("STATIC_BRANCH_NOP l_yes=\"%l[l_yes]\" key=\"%c0\" "
+                         "branch=\"%c1\""
+                       : :  "i" (key), "i" (branch) : : l_yes);
        return false;
 l_yes:
        return true;
@@ -48,13 +30,8 @@ l_yes:
 
 static __always_inline bool arch_static_branch_jump(struct static_key *key, bool branch)
 {
-       asm_volatile_goto("1:"
-               ".byte 0xe9\n\t .long %l[l_yes] - 2f\n\t"
-               "2:\n\t"
-               ".pushsection __jump_table,  \"aw\" \n\t"
-               _ASM_ALIGN "\n\t"
-               _ASM_PTR "1b, %l[l_yes], %c0 + %c1 \n\t"
-               ".popsection \n\t"
+       asm_volatile_goto("STATIC_BRANCH_JMP l_yes=\"%l[l_yes]\" key=\"%c0\" "
+                         "branch=\"%c1\""
                : :  "i" (key), "i" (branch) : : l_yes);
 
        return false;
@@ -62,49 +39,28 @@ l_yes:
        return true;
 }
 
-#ifdef CONFIG_X86_64
-typedef u64 jump_label_t;
-#else
-typedef u32 jump_label_t;
-#endif
-
-struct jump_entry {
-       jump_label_t code;
-       jump_label_t target;
-       jump_label_t key;
-};
-
 #else  /* __ASSEMBLY__ */
 
-.macro STATIC_JUMP_IF_TRUE target, key, def
-.Lstatic_jump_\@:
-       .if \def
-       /* Equivalent to "jmp.d32 \target" */
-       .byte           0xe9
-       .long           \target - .Lstatic_jump_after_\@
-.Lstatic_jump_after_\@:
-       .else
-       .byte           STATIC_KEY_INIT_NOP
-       .endif
+.macro STATIC_BRANCH_NOP l_yes:req key:req branch:req
+.Lstatic_branch_nop_\@:
+       .byte STATIC_KEY_INIT_NOP
+.Lstatic_branch_no_after_\@:
        .pushsection __jump_table, "aw"
        _ASM_ALIGN
-       _ASM_PTR        .Lstatic_jump_\@, \target, \key
+       .long           .Lstatic_branch_nop_\@ - ., \l_yes - .
+       _ASM_PTR        \key + \branch - .
        .popsection
 .endm
 
-.macro STATIC_JUMP_IF_FALSE target, key, def
-.Lstatic_jump_\@:
-       .if \def
-       .byte           STATIC_KEY_INIT_NOP
-       .else
-       /* Equivalent to "jmp.d32 \target" */
-       .byte           0xe9
-       .long           \target - .Lstatic_jump_after_\@
-.Lstatic_jump_after_\@:
-       .endif
+.macro STATIC_BRANCH_JMP l_yes:req key:req branch:req
+.Lstatic_branch_jmp_\@:
+       .byte 0xe9
+       .long \l_yes - .Lstatic_branch_jmp_after_\@
+.Lstatic_branch_jmp_after_\@:
        .pushsection __jump_table, "aw"
        _ASM_ALIGN
-       _ASM_PTR        .Lstatic_jump_\@, \target, \key + 1
+       .long           .Lstatic_branch_jmp_\@ - ., \l_yes - .
+       _ASM_PTR        \key + \branch - .
        .popsection
 .endm