Merge branch 'x86-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip
Pull x86 fixes from Thomas Gleixner: "A collection of assorted fixes: - Fix for the pinned cr0/4 fallout which escaped all testing efforts because the kvm-intel module was never loaded when the kernel was compiled with CONFIG_PARAVIRT=n. The cr0/4 accessors are moved out of line and static key is now solely used in the core code and therefore can stay in the RO after init section. So the kvm-intel and other modules do not longer reference the (read only) static key which the module loader tried to update. - Prevent an infinite loop in arch_stack_walk_user() by breaking out of the loop once the return address is detected to be 0. - Prevent the int3_emulate_call() selftest from corrupting the stack when KASAN is enabled. KASASN clobbers more registers than covered by the emulated call implementation. Convert the int3_magic() selftest to a ASM function so the compiler cannot KASANify it. - Unbreak the build with old GCC versions and with the Gold linker by reverting the 'Move of _etext to the actual end of .text'. In both cases the build fails with 'Invalid absolute R_X86_64_32S relocation: _etext' - Initialize the context lock for init_mm, which was never an issue until the alternatives code started to use a temporary mm for patching. - Fix a build warning vs. the LOWMEM_PAGES constant where clang complains rightfully about a signed integer overflow in the shift operation by converting the operand to an ULL. - Adjust the misnamed ENDPROC() of common_spurious in the 32bit entry code" * 'x86-urgent-for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip: x86/stacktrace: Prevent infinite loop in arch_stack_walk_user() x86/asm: Move native_write_cr0/4() out of line x86/pgtable/32: Fix LOWMEM_PAGES constant x86/alternatives: Fix int3_emulate_call() selftest stack corruption x86/entry/32: Fix ENDPROC of common_spurious Revert "x86/build: Move _etext to actual end of .text" x86/ldt: Initialize the context lock for init_mm
This commit is contained in:
Коммит
753c8d9b7d
|
@ -1189,7 +1189,7 @@ common_spurious:
|
||||||
movl %esp, %eax
|
movl %esp, %eax
|
||||||
call smp_spurious_interrupt
|
call smp_spurious_interrupt
|
||||||
jmp ret_from_intr
|
jmp ret_from_intr
|
||||||
ENDPROC(common_interrupt)
|
ENDPROC(common_spurious)
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -59,6 +59,7 @@ typedef struct {
|
||||||
#define INIT_MM_CONTEXT(mm) \
|
#define INIT_MM_CONTEXT(mm) \
|
||||||
.context = { \
|
.context = { \
|
||||||
.ctx_id = 1, \
|
.ctx_id = 1, \
|
||||||
|
.lock = __MUTEX_INITIALIZER(mm.context.lock), \
|
||||||
}
|
}
|
||||||
|
|
||||||
void leave_mm(int cpu);
|
void leave_mm(int cpu);
|
||||||
|
|
|
@ -106,6 +106,6 @@ do { \
|
||||||
* with only a host target support using a 32-bit type for internal
|
* with only a host target support using a 32-bit type for internal
|
||||||
* representation.
|
* representation.
|
||||||
*/
|
*/
|
||||||
#define LOWMEM_PAGES ((((2<<31) - __PAGE_OFFSET) >> PAGE_SHIFT))
|
#define LOWMEM_PAGES ((((_ULL(2)<<31) - __PAGE_OFFSET) >> PAGE_SHIFT))
|
||||||
|
|
||||||
#endif /* _ASM_X86_PGTABLE_32_H */
|
#endif /* _ASM_X86_PGTABLE_32_H */
|
||||||
|
|
|
@ -741,6 +741,7 @@ extern void load_direct_gdt(int);
|
||||||
extern void load_fixmap_gdt(int);
|
extern void load_fixmap_gdt(int);
|
||||||
extern void load_percpu_segment(int);
|
extern void load_percpu_segment(int);
|
||||||
extern void cpu_init(void);
|
extern void cpu_init(void);
|
||||||
|
extern void cr4_init(void);
|
||||||
|
|
||||||
static inline unsigned long get_debugctlmsr(void)
|
static inline unsigned long get_debugctlmsr(void)
|
||||||
{
|
{
|
||||||
|
|
|
@ -18,9 +18,7 @@
|
||||||
*/
|
*/
|
||||||
extern unsigned long __force_order;
|
extern unsigned long __force_order;
|
||||||
|
|
||||||
/* Starts false and gets enabled once CPU feature detection is done. */
|
void native_write_cr0(unsigned long val);
|
||||||
DECLARE_STATIC_KEY_FALSE(cr_pinning);
|
|
||||||
extern unsigned long cr4_pinned_bits;
|
|
||||||
|
|
||||||
static inline unsigned long native_read_cr0(void)
|
static inline unsigned long native_read_cr0(void)
|
||||||
{
|
{
|
||||||
|
@ -29,24 +27,6 @@ static inline unsigned long native_read_cr0(void)
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void native_write_cr0(unsigned long val)
|
|
||||||
{
|
|
||||||
unsigned long bits_missing = 0;
|
|
||||||
|
|
||||||
set_register:
|
|
||||||
asm volatile("mov %0,%%cr0": "+r" (val), "+m" (__force_order));
|
|
||||||
|
|
||||||
if (static_branch_likely(&cr_pinning)) {
|
|
||||||
if (unlikely((val & X86_CR0_WP) != X86_CR0_WP)) {
|
|
||||||
bits_missing = X86_CR0_WP;
|
|
||||||
val |= bits_missing;
|
|
||||||
goto set_register;
|
|
||||||
}
|
|
||||||
/* Warn after we've set the missing bits. */
|
|
||||||
WARN_ONCE(bits_missing, "CR0 WP bit went missing!?\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline unsigned long native_read_cr2(void)
|
static inline unsigned long native_read_cr2(void)
|
||||||
{
|
{
|
||||||
unsigned long val;
|
unsigned long val;
|
||||||
|
@ -91,24 +71,7 @@ static inline unsigned long native_read_cr4(void)
|
||||||
return val;
|
return val;
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline void native_write_cr4(unsigned long val)
|
void native_write_cr4(unsigned long val);
|
||||||
{
|
|
||||||
unsigned long bits_missing = 0;
|
|
||||||
|
|
||||||
set_register:
|
|
||||||
asm volatile("mov %0,%%cr4": "+r" (val), "+m" (cr4_pinned_bits));
|
|
||||||
|
|
||||||
if (static_branch_likely(&cr_pinning)) {
|
|
||||||
if (unlikely((val & cr4_pinned_bits) != cr4_pinned_bits)) {
|
|
||||||
bits_missing = ~val & cr4_pinned_bits;
|
|
||||||
val |= bits_missing;
|
|
||||||
goto set_register;
|
|
||||||
}
|
|
||||||
/* Warn after we've set the missing bits. */
|
|
||||||
WARN_ONCE(bits_missing, "CR4 bits went missing: %lx!?\n",
|
|
||||||
bits_missing);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#ifdef CONFIG_X86_64
|
#ifdef CONFIG_X86_64
|
||||||
static inline unsigned long native_read_cr8(void)
|
static inline unsigned long native_read_cr8(void)
|
||||||
|
|
|
@ -625,10 +625,23 @@ extern struct paravirt_patch_site __start_parainstructions[],
|
||||||
*
|
*
|
||||||
* See entry_{32,64}.S for more details.
|
* See entry_{32,64}.S for more details.
|
||||||
*/
|
*/
|
||||||
static void __init int3_magic(unsigned int *ptr)
|
|
||||||
{
|
/*
|
||||||
*ptr = 1;
|
* We define the int3_magic() function in assembly to control the calling
|
||||||
}
|
* convention such that we can 'call' it from assembly.
|
||||||
|
*/
|
||||||
|
|
||||||
|
extern void int3_magic(unsigned int *ptr); /* defined in asm */
|
||||||
|
|
||||||
|
asm (
|
||||||
|
" .pushsection .init.text, \"ax\", @progbits\n"
|
||||||
|
" .type int3_magic, @function\n"
|
||||||
|
"int3_magic:\n"
|
||||||
|
" movl $1, (%" _ASM_ARG1 ")\n"
|
||||||
|
" ret\n"
|
||||||
|
" .size int3_magic, .-int3_magic\n"
|
||||||
|
" .popsection\n"
|
||||||
|
);
|
||||||
|
|
||||||
extern __initdata unsigned long int3_selftest_ip; /* defined in asm below */
|
extern __initdata unsigned long int3_selftest_ip; /* defined in asm below */
|
||||||
|
|
||||||
|
@ -676,7 +689,9 @@ static void __init int3_selftest(void)
|
||||||
"int3_selftest_ip:\n\t"
|
"int3_selftest_ip:\n\t"
|
||||||
__ASM_SEL(.long, .quad) " 1b\n\t"
|
__ASM_SEL(.long, .quad) " 1b\n\t"
|
||||||
".popsection\n\t"
|
".popsection\n\t"
|
||||||
: : __ASM_SEL_RAW(a, D) (&val) : "memory");
|
: ASM_CALL_CONSTRAINT
|
||||||
|
: __ASM_SEL_RAW(a, D) (&val)
|
||||||
|
: "memory");
|
||||||
|
|
||||||
BUG_ON(val != 1);
|
BUG_ON(val != 1);
|
||||||
|
|
||||||
|
|
|
@ -366,10 +366,62 @@ out:
|
||||||
cr4_clear_bits(X86_CR4_UMIP);
|
cr4_clear_bits(X86_CR4_UMIP);
|
||||||
}
|
}
|
||||||
|
|
||||||
DEFINE_STATIC_KEY_FALSE_RO(cr_pinning);
|
static DEFINE_STATIC_KEY_FALSE_RO(cr_pinning);
|
||||||
EXPORT_SYMBOL(cr_pinning);
|
static unsigned long cr4_pinned_bits __ro_after_init;
|
||||||
unsigned long cr4_pinned_bits __ro_after_init;
|
|
||||||
EXPORT_SYMBOL(cr4_pinned_bits);
|
void native_write_cr0(unsigned long val)
|
||||||
|
{
|
||||||
|
unsigned long bits_missing = 0;
|
||||||
|
|
||||||
|
set_register:
|
||||||
|
asm volatile("mov %0,%%cr0": "+r" (val), "+m" (__force_order));
|
||||||
|
|
||||||
|
if (static_branch_likely(&cr_pinning)) {
|
||||||
|
if (unlikely((val & X86_CR0_WP) != X86_CR0_WP)) {
|
||||||
|
bits_missing = X86_CR0_WP;
|
||||||
|
val |= bits_missing;
|
||||||
|
goto set_register;
|
||||||
|
}
|
||||||
|
/* Warn after we've set the missing bits. */
|
||||||
|
WARN_ONCE(bits_missing, "CR0 WP bit went missing!?\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EXPORT_SYMBOL(native_write_cr0);
|
||||||
|
|
||||||
|
void native_write_cr4(unsigned long val)
|
||||||
|
{
|
||||||
|
unsigned long bits_missing = 0;
|
||||||
|
|
||||||
|
set_register:
|
||||||
|
asm volatile("mov %0,%%cr4": "+r" (val), "+m" (cr4_pinned_bits));
|
||||||
|
|
||||||
|
if (static_branch_likely(&cr_pinning)) {
|
||||||
|
if (unlikely((val & cr4_pinned_bits) != cr4_pinned_bits)) {
|
||||||
|
bits_missing = ~val & cr4_pinned_bits;
|
||||||
|
val |= bits_missing;
|
||||||
|
goto set_register;
|
||||||
|
}
|
||||||
|
/* Warn after we've set the missing bits. */
|
||||||
|
WARN_ONCE(bits_missing, "CR4 bits went missing: %lx!?\n",
|
||||||
|
bits_missing);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
EXPORT_SYMBOL(native_write_cr4);
|
||||||
|
|
||||||
|
void cr4_init(void)
|
||||||
|
{
|
||||||
|
unsigned long cr4 = __read_cr4();
|
||||||
|
|
||||||
|
if (boot_cpu_has(X86_FEATURE_PCID))
|
||||||
|
cr4 |= X86_CR4_PCIDE;
|
||||||
|
if (static_branch_likely(&cr_pinning))
|
||||||
|
cr4 |= cr4_pinned_bits;
|
||||||
|
|
||||||
|
__write_cr4(cr4);
|
||||||
|
|
||||||
|
/* Initialize cr4 shadow for this CPU. */
|
||||||
|
this_cpu_write(cpu_tlbstate.cr4, cr4);
|
||||||
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Once CPU feature detection is finished (and boot params have been
|
* Once CPU feature detection is finished (and boot params have been
|
||||||
|
@ -1723,12 +1775,6 @@ void cpu_init(void)
|
||||||
|
|
||||||
wait_for_master_cpu(cpu);
|
wait_for_master_cpu(cpu);
|
||||||
|
|
||||||
/*
|
|
||||||
* Initialize the CR4 shadow before doing anything that could
|
|
||||||
* try to read it.
|
|
||||||
*/
|
|
||||||
cr4_init_shadow();
|
|
||||||
|
|
||||||
if (cpu)
|
if (cpu)
|
||||||
load_ucode_ap();
|
load_ucode_ap();
|
||||||
|
|
||||||
|
@ -1823,12 +1869,6 @@ void cpu_init(void)
|
||||||
|
|
||||||
wait_for_master_cpu(cpu);
|
wait_for_master_cpu(cpu);
|
||||||
|
|
||||||
/*
|
|
||||||
* Initialize the CR4 shadow before doing anything that could
|
|
||||||
* try to read it.
|
|
||||||
*/
|
|
||||||
cr4_init_shadow();
|
|
||||||
|
|
||||||
show_ucode_info_early();
|
show_ucode_info_early();
|
||||||
|
|
||||||
pr_info("Initializing CPU#%d\n", cpu);
|
pr_info("Initializing CPU#%d\n", cpu);
|
||||||
|
|
|
@ -210,28 +210,16 @@ static int enable_start_cpu0;
|
||||||
*/
|
*/
|
||||||
static void notrace start_secondary(void *unused)
|
static void notrace start_secondary(void *unused)
|
||||||
{
|
{
|
||||||
unsigned long cr4 = __read_cr4();
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Don't put *anything* except direct CPU state initialization
|
* Don't put *anything* except direct CPU state initialization
|
||||||
* before cpu_init(), SMP booting is too fragile that we want to
|
* before cpu_init(), SMP booting is too fragile that we want to
|
||||||
* limit the things done here to the most necessary things.
|
* limit the things done here to the most necessary things.
|
||||||
*/
|
*/
|
||||||
if (boot_cpu_has(X86_FEATURE_PCID))
|
cr4_init();
|
||||||
cr4 |= X86_CR4_PCIDE;
|
|
||||||
if (static_branch_likely(&cr_pinning))
|
|
||||||
cr4 |= cr4_pinned_bits;
|
|
||||||
|
|
||||||
__write_cr4(cr4);
|
|
||||||
|
|
||||||
#ifdef CONFIG_X86_32
|
#ifdef CONFIG_X86_32
|
||||||
/* switch away from the initial page table */
|
/* switch away from the initial page table */
|
||||||
load_cr3(swapper_pg_dir);
|
load_cr3(swapper_pg_dir);
|
||||||
/*
|
|
||||||
* Initialize the CR4 shadow before doing anything that could
|
|
||||||
* try to read it.
|
|
||||||
*/
|
|
||||||
cr4_init_shadow();
|
|
||||||
__flush_tlb_all();
|
__flush_tlb_all();
|
||||||
#endif
|
#endif
|
||||||
load_current_idt();
|
load_current_idt();
|
||||||
|
|
|
@ -129,11 +129,9 @@ void arch_stack_walk_user(stack_trace_consume_fn consume_entry, void *cookie,
|
||||||
break;
|
break;
|
||||||
if ((unsigned long)fp < regs->sp)
|
if ((unsigned long)fp < regs->sp)
|
||||||
break;
|
break;
|
||||||
if (frame.ret_addr) {
|
if (!frame.ret_addr)
|
||||||
if (!consume_entry(cookie, frame.ret_addr, false))
|
break;
|
||||||
return;
|
if (!consume_entry(cookie, frame.ret_addr, false))
|
||||||
}
|
|
||||||
if (fp == frame.next_fp)
|
|
||||||
break;
|
break;
|
||||||
fp = frame.next_fp;
|
fp = frame.next_fp;
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,10 +141,10 @@ SECTIONS
|
||||||
*(.text.__x86.indirect_thunk)
|
*(.text.__x86.indirect_thunk)
|
||||||
__indirect_thunk_end = .;
|
__indirect_thunk_end = .;
|
||||||
#endif
|
#endif
|
||||||
} :text = 0x9090
|
|
||||||
|
|
||||||
/* End of text section */
|
/* End of text section */
|
||||||
_etext = .;
|
_etext = .;
|
||||||
|
} :text = 0x9090
|
||||||
|
|
||||||
NOTES :text :note
|
NOTES :text :note
|
||||||
|
|
||||||
|
|
|
@ -58,6 +58,7 @@ static void cpu_bringup(void)
|
||||||
{
|
{
|
||||||
int cpu;
|
int cpu;
|
||||||
|
|
||||||
|
cr4_init();
|
||||||
cpu_init();
|
cpu_init();
|
||||||
touch_softlockup_watchdog();
|
touch_softlockup_watchdog();
|
||||||
preempt_disable();
|
preempt_disable();
|
||||||
|
|
Загрузка…
Ссылка в новой задаче