Revert "x86/objtool: Use asm macros to work around GCC inlining bugs"

This reverts commit c06c4d8090.

See this commit for details about the revert:

  e769742d35 ("Revert "x86/jump-labels: Macrofy inline assembly code to work around GCC inlining bugs"")

Reported-by: Masahiro Yamada <yamada.masahiro@socionext.com>
Reviewed-by: Borislav Petkov <bp@alien8.de>
Reviewed-by: Thomas Gleixner <tglx@linutronix.de>
Cc: Juergen Gross <jgross@suse.com>
Cc: Richard Biener <rguenther@suse.de>
Cc: Kees Cook <keescook@chromium.org>
Cc: Segher Boessenkool <segher@kernel.crashing.org>
Cc: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Josh Poimboeuf <jpoimboe@redhat.com>
Cc: Nadav Amit <namit@vmware.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
This commit is contained in:
Ingo Molnar 2018-12-19 11:23:27 +01:00
Родитель ac180540b0
Коммит 96af6cd02a
2 изменённых файлов: 13 добавлений и 45 удалений

Просмотреть файл

@ -5,5 +5,3 @@
* commonly used. The macros are precompiled into assmebly file which is later * commonly used. The macros are precompiled into assmebly file which is later
* assembled together with each compiled file. * assembled together with each compiled file.
*/ */
#include <linux/compiler.h>

Просмотреть файл

@ -99,13 +99,22 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,
* unique, to convince GCC not to merge duplicate inline asm statements. * unique, to convince GCC not to merge duplicate inline asm statements.
*/ */
#define annotate_reachable() ({ \ #define annotate_reachable() ({ \
asm volatile("ANNOTATE_REACHABLE counter=%c0" \ asm volatile("%c0:\n\t" \
: : "i" (__COUNTER__)); \ ".pushsection .discard.reachable\n\t" \
".long %c0b - .\n\t" \
".popsection\n\t" : : "i" (__COUNTER__)); \
}) })
#define annotate_unreachable() ({ \ #define annotate_unreachable() ({ \
asm volatile("ANNOTATE_UNREACHABLE counter=%c0" \ asm volatile("%c0:\n\t" \
: : "i" (__COUNTER__)); \ ".pushsection .discard.unreachable\n\t" \
".long %c0b - .\n\t" \
".popsection\n\t" : : "i" (__COUNTER__)); \
}) })
#define ASM_UNREACHABLE \
"999:\n\t" \
".pushsection .discard.unreachable\n\t" \
".long 999b - .\n\t" \
".popsection\n\t"
#else #else
#define annotate_reachable() #define annotate_reachable()
#define annotate_unreachable() #define annotate_unreachable()
@ -293,45 +302,6 @@ static inline void *offset_to_ptr(const int *off)
return (void *)((unsigned long)off + *off); return (void *)((unsigned long)off + *off);
} }
#else /* __ASSEMBLY__ */
#ifdef __KERNEL__
#ifndef LINKER_SCRIPT
#ifdef CONFIG_STACK_VALIDATION
.macro ANNOTATE_UNREACHABLE counter:req
\counter:
.pushsection .discard.unreachable
.long \counter\()b -.
.popsection
.endm
.macro ANNOTATE_REACHABLE counter:req
\counter:
.pushsection .discard.reachable
.long \counter\()b -.
.popsection
.endm
.macro ASM_UNREACHABLE
999:
.pushsection .discard.unreachable
.long 999b - .
.popsection
.endm
#else /* CONFIG_STACK_VALIDATION */
.macro ANNOTATE_UNREACHABLE counter:req
.endm
.macro ANNOTATE_REACHABLE counter:req
.endm
.macro ASM_UNREACHABLE
.endm
#endif /* CONFIG_STACK_VALIDATION */
#endif /* LINKER_SCRIPT */
#endif /* __KERNEL__ */
#endif /* __ASSEMBLY__ */ #endif /* __ASSEMBLY__ */
/* Compile time object size, -1 for unknown */ /* Compile time object size, -1 for unknown */