x86/retpoline,kprobes: Fix position of thunk sections with CONFIG_LTO_CLANG
The linker script arch/x86/kernel/vmlinux.lds.S matches the thunk
sections ".text.__x86.*" from arch/x86/lib/retpoline.S as follows:
.text {
[...]
TEXT_TEXT
[...]
__indirect_thunk_start = .;
*(.text.__x86.*)
__indirect_thunk_end = .;
[...]
}
Macro TEXT_TEXT references TEXT_MAIN which normally expands to only
".text". However, with CONFIG_LTO_CLANG, TEXT_MAIN becomes
".text .text.[0-9a-zA-Z_]*" which wrongly matches also the thunk
sections. The output layout is then different than expected. For
instance, the currently defined range [__indirect_thunk_start,
__indirect_thunk_end] becomes empty.
Prevent the problem by using ".." as the first separator, for example,
".text..__x86.indirect_thunk". This pattern is utilized by other
explicit section names which start with one of the standard prefixes,
such as ".text" or ".data", and that need to be individually selected in
the linker script.
[ nathan: Fix conflicts with SRSO and fold in fix issue brought up by
Andrew Cooper in post-review:
https://lore.kernel.org/20230803230323.1478869-1-andrew.cooper3@citrix.com ]
Fixes: dc5723b02e
("kbuild: add support for Clang LTO")
Signed-off-by: Petr Pavlu <petr.pavlu@suse.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: Nathan Chancellor <nathan@kernel.org>
Signed-off-by: Borislav Petkov (AMD) <bp@alien8.de>
Link: https://lore.kernel.org/r/20230711091952.27944-2-petr.pavlu@suse.com
This commit is contained in:
Родитель
e9fbc47b81
Коммит
79cd2a1122
|
@ -134,15 +134,15 @@ SECTIONS
|
||||||
SOFTIRQENTRY_TEXT
|
SOFTIRQENTRY_TEXT
|
||||||
#ifdef CONFIG_RETPOLINE
|
#ifdef CONFIG_RETPOLINE
|
||||||
__indirect_thunk_start = .;
|
__indirect_thunk_start = .;
|
||||||
*(.text.__x86.indirect_thunk)
|
*(.text..__x86.indirect_thunk)
|
||||||
*(.text.__x86.return_thunk)
|
*(.text..__x86.return_thunk)
|
||||||
__indirect_thunk_end = .;
|
__indirect_thunk_end = .;
|
||||||
#endif
|
#endif
|
||||||
STATIC_CALL_TEXT
|
STATIC_CALL_TEXT
|
||||||
|
|
||||||
ALIGN_ENTRY_TEXT_BEGIN
|
ALIGN_ENTRY_TEXT_BEGIN
|
||||||
#ifdef CONFIG_CPU_SRSO
|
#ifdef CONFIG_CPU_SRSO
|
||||||
*(.text.__x86.rethunk_untrain)
|
*(.text..__x86.rethunk_untrain)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
ENTRY_TEXT
|
ENTRY_TEXT
|
||||||
|
@ -153,7 +153,7 @@ SECTIONS
|
||||||
* definition.
|
* definition.
|
||||||
*/
|
*/
|
||||||
. = srso_untrain_ret_alias | (1 << 2) | (1 << 8) | (1 << 14) | (1 << 20);
|
. = srso_untrain_ret_alias | (1 << 2) | (1 << 8) | (1 << 14) | (1 << 20);
|
||||||
*(.text.__x86.rethunk_safe)
|
*(.text..__x86.rethunk_safe)
|
||||||
#endif
|
#endif
|
||||||
ALIGN_ENTRY_TEXT_END
|
ALIGN_ENTRY_TEXT_END
|
||||||
*(.gnu.warning)
|
*(.gnu.warning)
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
#include <asm/frame.h>
|
#include <asm/frame.h>
|
||||||
#include <asm/nops.h>
|
#include <asm/nops.h>
|
||||||
|
|
||||||
.section .text.__x86.indirect_thunk
|
.section .text..__x86.indirect_thunk
|
||||||
|
|
||||||
|
|
||||||
.macro POLINE reg
|
.macro POLINE reg
|
||||||
|
@ -148,7 +148,7 @@ SYM_CODE_END(__x86_indirect_jump_thunk_array)
|
||||||
* As a result, srso_safe_ret_alias() becomes a safe return.
|
* As a result, srso_safe_ret_alias() becomes a safe return.
|
||||||
*/
|
*/
|
||||||
#ifdef CONFIG_CPU_SRSO
|
#ifdef CONFIG_CPU_SRSO
|
||||||
.section .text.__x86.rethunk_untrain
|
.section .text..__x86.rethunk_untrain
|
||||||
|
|
||||||
SYM_START(srso_untrain_ret_alias, SYM_L_GLOBAL, SYM_A_NONE)
|
SYM_START(srso_untrain_ret_alias, SYM_L_GLOBAL, SYM_A_NONE)
|
||||||
ANNOTATE_NOENDBR
|
ANNOTATE_NOENDBR
|
||||||
|
@ -158,7 +158,7 @@ SYM_START(srso_untrain_ret_alias, SYM_L_GLOBAL, SYM_A_NONE)
|
||||||
SYM_FUNC_END(srso_untrain_ret_alias)
|
SYM_FUNC_END(srso_untrain_ret_alias)
|
||||||
__EXPORT_THUNK(srso_untrain_ret_alias)
|
__EXPORT_THUNK(srso_untrain_ret_alias)
|
||||||
|
|
||||||
.section .text.__x86.rethunk_safe
|
.section .text..__x86.rethunk_safe
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Needs a definition for the __x86_return_thunk alternative below. */
|
/* Needs a definition for the __x86_return_thunk alternative below. */
|
||||||
|
@ -172,7 +172,7 @@ SYM_START(srso_safe_ret_alias, SYM_L_GLOBAL, SYM_A_NONE)
|
||||||
int3
|
int3
|
||||||
SYM_FUNC_END(srso_safe_ret_alias)
|
SYM_FUNC_END(srso_safe_ret_alias)
|
||||||
|
|
||||||
.section .text.__x86.return_thunk
|
.section .text..__x86.return_thunk
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Safety details here pertain to the AMD Zen{1,2} microarchitecture:
|
* Safety details here pertain to the AMD Zen{1,2} microarchitecture:
|
||||||
|
|
|
@ -389,7 +389,7 @@ static int decode_instructions(struct objtool_file *file)
|
||||||
if (!strcmp(sec->name, ".noinstr.text") ||
|
if (!strcmp(sec->name, ".noinstr.text") ||
|
||||||
!strcmp(sec->name, ".entry.text") ||
|
!strcmp(sec->name, ".entry.text") ||
|
||||||
!strcmp(sec->name, ".cpuidle.text") ||
|
!strcmp(sec->name, ".cpuidle.text") ||
|
||||||
!strncmp(sec->name, ".text.__x86.", 12))
|
!strncmp(sec->name, ".text..__x86.", 13))
|
||||||
sec->noinstr = true;
|
sec->noinstr = true;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
Загрузка…
Ссылка в новой задаче