x86: unify paravirt parts of system.h

This patch moves the i386 control registers manipulation functions,
wbinvd, and clts functions to system.h. They are essentially the same
as in x86_64.

With this, system.h paravirt comes for free in x86_64.

[ mingo@elte.hu: reintroduced the cr8 bits - needed for resume images ]

Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
This commit is contained in:
Glauber de Oliveira Costa 2008-01-30 13:31:08 +01:00 коммит произвёл Ingo Molnar
Родитель a6b4655258
Коммит d3ca901f94
3 изменённых файлов: 122 добавлений и 167 удалений

Просмотреть файл

@ -3,6 +3,8 @@
#include <asm/asm.h> #include <asm/asm.h>
#include <linux/kernel.h>
#ifdef CONFIG_X86_32 #ifdef CONFIG_X86_32
# include "system_32.h" # include "system_32.h"
#else #else
@ -38,6 +40,8 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \
#define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base)) #define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base))
#define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1)) #define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1))
extern void load_gs_index(unsigned);
/* /*
* Load a segment. Fall back on loading the zero * Load a segment. Fall back on loading the zero
* segment if something goes wrong.. * segment if something goes wrong..
@ -72,6 +76,112 @@ static inline unsigned long get_limit(unsigned long segment)
:"=r" (__limit):"r" (segment)); :"=r" (__limit):"r" (segment));
return __limit+1; return __limit+1;
} }
static inline void native_clts(void)
{
asm volatile ("clts");
}
/*
* Volatile isn't enough to prevent the compiler from reordering the
* read/write functions for the control registers and messing everything up.
* A memory clobber would solve the problem, but would prevent reordering of
* all loads stores around it, which can hurt performance. Solution is to
* use a variable and mimic reads and writes to it to enforce serialization
*/
static unsigned long __force_order;
static inline unsigned long native_read_cr0(void)
{
unsigned long val;
asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order));
return val;
}
static inline void native_write_cr0(unsigned long val)
{
asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order));
}
static inline unsigned long native_read_cr2(void)
{
unsigned long val;
asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order));
return val;
}
static inline void native_write_cr2(unsigned long val)
{
asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order));
}
static inline unsigned long native_read_cr3(void)
{
unsigned long val;
asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order));
return val;
}
static inline void native_write_cr3(unsigned long val)
{
asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order));
}
static inline unsigned long native_read_cr4(void)
{
unsigned long val;
asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order));
return val;
}
static inline unsigned long native_read_cr4_safe(void)
{
unsigned long val;
/* This could fault if %cr4 does not exist. In x86_64, a cr4 always
* exists, so it will never fail. */
#ifdef CONFIG_X86_32
asm volatile("1: mov %%cr4, %0 \n"
"2: \n"
".section __ex_table,\"a\" \n"
".long 1b,2b \n"
".previous \n"
: "=r" (val), "=m" (__force_order) : "0" (0));
#else
val = native_read_cr4();
#endif
return val;
}
static inline void native_write_cr4(unsigned long val)
{
asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order));
}
static inline void native_wbinvd(void)
{
asm volatile("wbinvd": : :"memory");
}
#ifdef CONFIG_PARAVIRT
#include <asm/paravirt.h>
#else
#define read_cr0() (native_read_cr0())
#define write_cr0(x) (native_write_cr0(x))
#define read_cr2() (native_read_cr2())
#define write_cr2(x) (native_write_cr2(x))
#define read_cr3() (native_read_cr3())
#define write_cr3(x) (native_write_cr3(x))
#define read_cr4() (native_read_cr4())
#define read_cr4_safe() (native_read_cr4_safe())
#define write_cr4(x) (native_write_cr4(x))
#define wbinvd() (native_wbinvd())
/* Clear the 'TS' bit */
#define clts() (native_clts())
#endif/* CONFIG_PARAVIRT */
#define stts() write_cr0(8 | read_cr0())
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
static inline void clflush(void *__p) static inline void clflush(void *__p)

Просмотреть файл

@ -1,7 +1,6 @@
#ifndef __ASM_SYSTEM_H #ifndef __ASM_SYSTEM_H
#define __ASM_SYSTEM_H #define __ASM_SYSTEM_H
#include <linux/kernel.h>
#include <asm/segment.h> #include <asm/segment.h>
#include <asm/cpufeature.h> #include <asm/cpufeature.h>
#include <asm/cmpxchg.h> #include <asm/cmpxchg.h>
@ -34,99 +33,6 @@ extern struct task_struct * FASTCALL(__switch_to(struct task_struct *prev, struc
"2" (prev), "d" (next)); \ "2" (prev), "d" (next)); \
} while (0) } while (0)
static inline void native_clts(void)
{
asm volatile ("clts");
}
static inline unsigned long native_read_cr0(void)
{
unsigned long val;
asm volatile("movl %%cr0,%0\n\t" :"=r" (val));
return val;
}
static inline void native_write_cr0(unsigned long val)
{
asm volatile("movl %0,%%cr0": :"r" (val));
}
static inline unsigned long native_read_cr2(void)
{
unsigned long val;
asm volatile("movl %%cr2,%0\n\t" :"=r" (val));
return val;
}
static inline void native_write_cr2(unsigned long val)
{
asm volatile("movl %0,%%cr2": :"r" (val));
}
static inline unsigned long native_read_cr3(void)
{
unsigned long val;
asm volatile("movl %%cr3,%0\n\t" :"=r" (val));
return val;
}
static inline void native_write_cr3(unsigned long val)
{
asm volatile("movl %0,%%cr3": :"r" (val));
}
static inline unsigned long native_read_cr4(void)
{
unsigned long val;
asm volatile("movl %%cr4,%0\n\t" :"=r" (val));
return val;
}
static inline unsigned long native_read_cr4_safe(void)
{
unsigned long val;
/* This could fault if %cr4 does not exist */
asm volatile("1: movl %%cr4, %0 \n"
"2: \n"
".section __ex_table,\"a\" \n"
".long 1b,2b \n"
".previous \n"
: "=r" (val): "0" (0));
return val;
}
static inline void native_write_cr4(unsigned long val)
{
asm volatile("movl %0,%%cr4": :"r" (val));
}
static inline void native_wbinvd(void)
{
asm volatile("wbinvd": : :"memory");
}
#ifdef CONFIG_PARAVIRT
#include <asm/paravirt.h>
#else
#define read_cr0() (native_read_cr0())
#define write_cr0(x) (native_write_cr0(x))
#define read_cr2() (native_read_cr2())
#define write_cr2(x) (native_write_cr2(x))
#define read_cr3() (native_read_cr3())
#define write_cr3(x) (native_write_cr3(x))
#define read_cr4() (native_read_cr4())
#define read_cr4_safe() (native_read_cr4_safe())
#define write_cr4(x) (native_write_cr4(x))
#define wbinvd() (native_wbinvd())
/* Clear the 'TS' bit */
#define clts() (native_clts())
#endif/* CONFIG_PARAVIRT */
/* Set the 'TS' bit */
#define stts() write_cr0(8 | read_cr0())
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */

Просмотреть файл

@ -1,7 +1,6 @@
#ifndef __ASM_SYSTEM_H #ifndef __ASM_SYSTEM_H
#define __ASM_SYSTEM_H #define __ASM_SYSTEM_H
#include <linux/kernel.h>
#include <asm/segment.h> #include <asm/segment.h>
#include <asm/cmpxchg.h> #include <asm/cmpxchg.h>
@ -47,78 +46,6 @@
[pda_pcurrent] "i" (offsetof(struct x8664_pda, pcurrent)) \ [pda_pcurrent] "i" (offsetof(struct x8664_pda, pcurrent)) \
: "memory", "cc" __EXTRA_CLOBBER) : "memory", "cc" __EXTRA_CLOBBER)
extern void load_gs_index(unsigned);
/*
* Clear and set 'TS' bit respectively
*/
#define clts() __asm__ __volatile__ ("clts")
static inline unsigned long read_cr0(void)
{
unsigned long cr0;
asm volatile("movq %%cr0,%0" : "=r" (cr0));
return cr0;
}
static inline void write_cr0(unsigned long val)
{
asm volatile("movq %0,%%cr0" :: "r" (val));
}
static inline unsigned long read_cr2(void)
{
unsigned long cr2;
asm volatile("movq %%cr2,%0" : "=r" (cr2));
return cr2;
}
static inline void write_cr2(unsigned long val)
{
asm volatile("movq %0,%%cr2" :: "r" (val));
}
static inline unsigned long read_cr3(void)
{
unsigned long cr3;
asm volatile("movq %%cr3,%0" : "=r" (cr3));
return cr3;
}
static inline void write_cr3(unsigned long val)
{
asm volatile("movq %0,%%cr3" :: "r" (val) : "memory");
}
static inline unsigned long read_cr4(void)
{
unsigned long cr4;
asm volatile("movq %%cr4,%0" : "=r" (cr4));
return cr4;
}
static inline void write_cr4(unsigned long val)
{
asm volatile("movq %0,%%cr4" :: "r" (val) : "memory");
}
static inline unsigned long read_cr8(void)
{
unsigned long cr8;
asm volatile("movq %%cr8,%0" : "=r" (cr8));
return cr8;
}
static inline void write_cr8(unsigned long val)
{
asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
}
#define stts() write_cr0(8 | read_cr0())
#define wbinvd() \
__asm__ __volatile__ ("wbinvd": : :"memory")
#endif /* __KERNEL__ */ #endif /* __KERNEL__ */
#ifdef CONFIG_SMP #ifdef CONFIG_SMP
@ -148,6 +75,18 @@ static inline void write_cr8(unsigned long val)
#define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0) #define warn_if_not_ulong(x) do { unsigned long foo; (void) (&(x) == &foo); } while (0)
static inline unsigned long read_cr8(void)
{
unsigned long cr8;
asm volatile("movq %%cr8,%0" : "=r" (cr8));
return cr8;
}
static inline void write_cr8(unsigned long val)
{
asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
}
#include <linux/irqflags.h> #include <linux/irqflags.h>
#endif #endif