powerpc: Define 32/64 bit asm macros and use them in fpu.S
These macros help in writing assembly code that works for both ppc32 and ppc64. With this we now have a common fpu.S. This takes out load_up_fpu from head_64.S. Signed-off-by: Paul Mackerras <paulus@samba.org>
This commit is contained in:
Родитель
6ce52e6438
Коммит
b85a046af3
|
@ -27,13 +27,9 @@
|
||||||
* Load up this task's FP registers from its thread_struct,
|
* Load up this task's FP registers from its thread_struct,
|
||||||
* enable the FPU for the current task and return to the task.
|
* enable the FPU for the current task and return to the task.
|
||||||
*/
|
*/
|
||||||
.globl load_up_fpu
|
_GLOBAL(load_up_fpu)
|
||||||
load_up_fpu:
|
|
||||||
mfmsr r5
|
mfmsr r5
|
||||||
ori r5,r5,MSR_FP
|
ori r5,r5,MSR_FP
|
||||||
#ifdef CONFIG_PPC64BRIDGE
|
|
||||||
clrldi r5,r5,1 /* turn off 64-bit mode */
|
|
||||||
#endif /* CONFIG_PPC64BRIDGE */
|
|
||||||
SYNC
|
SYNC
|
||||||
MTMSRD(r5) /* enable use of fpu now */
|
MTMSRD(r5) /* enable use of fpu now */
|
||||||
isync
|
isync
|
||||||
|
@ -43,67 +39,57 @@ load_up_fpu:
|
||||||
* to another. Instead we call giveup_fpu in switch_to.
|
* to another. Instead we call giveup_fpu in switch_to.
|
||||||
*/
|
*/
|
||||||
#ifndef CONFIG_SMP
|
#ifndef CONFIG_SMP
|
||||||
tophys(r6,0) /* get __pa constant */
|
LOADBASE(r3, last_task_used_math)
|
||||||
addis r3,r6,last_task_used_math@ha
|
tophys(r3,r3)
|
||||||
lwz r4,last_task_used_math@l(r3)
|
LDL r4,OFF(last_task_used_math)(r3)
|
||||||
cmpwi 0,r4,0
|
CMPI 0,r4,0
|
||||||
beq 1f
|
beq 1f
|
||||||
add r4,r4,r6
|
tophys(r4,r4)
|
||||||
addi r4,r4,THREAD /* want last_task_used_math->thread */
|
addi r4,r4,THREAD /* want last_task_used_math->thread */
|
||||||
SAVE_32FPRS(0, r4)
|
SAVE_32FPRS(0, r4)
|
||||||
mffs fr0
|
mffs fr0
|
||||||
stfd fr0,THREAD_FPSCR-4(r4)
|
stfd fr0,THREAD_FPSCR-4(r4)
|
||||||
lwz r5,PT_REGS(r4)
|
LDL r5,PT_REGS(r4)
|
||||||
add r5,r5,r6
|
tophys(r5,r5)
|
||||||
lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
||||||
li r10,MSR_FP|MSR_FE0|MSR_FE1
|
li r10,MSR_FP|MSR_FE0|MSR_FE1
|
||||||
andc r4,r4,r10 /* disable FP for previous task */
|
andc r4,r4,r10 /* disable FP for previous task */
|
||||||
stw r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
||||||
1:
|
1:
|
||||||
#endif /* CONFIG_SMP */
|
#endif /* CONFIG_SMP */
|
||||||
/* enable use of FP after return */
|
/* enable use of FP after return */
|
||||||
|
#ifdef CONFIG_PPC32
|
||||||
mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
|
mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
|
||||||
lwz r4,THREAD_FPEXC_MODE(r5)
|
lwz r4,THREAD_FPEXC_MODE(r5)
|
||||||
ori r9,r9,MSR_FP /* enable FP for current */
|
ori r9,r9,MSR_FP /* enable FP for current */
|
||||||
or r9,r9,r4
|
or r9,r9,r4
|
||||||
|
#else
|
||||||
|
ld r4,PACACURRENT(r13)
|
||||||
|
addi r5,r4,THREAD /* Get THREAD */
|
||||||
|
ld r4,THREAD_FPEXC_MODE(r5)
|
||||||
|
ori r12,r12,MSR_FP
|
||||||
|
or r12,r12,r4
|
||||||
|
std r12,_MSR(r1)
|
||||||
|
#endif
|
||||||
lfd fr0,THREAD_FPSCR-4(r5)
|
lfd fr0,THREAD_FPSCR-4(r5)
|
||||||
mtfsf 0xff,fr0
|
mtfsf 0xff,fr0
|
||||||
REST_32FPRS(0, r5)
|
REST_32FPRS(0, r5)
|
||||||
#ifndef CONFIG_SMP
|
#ifndef CONFIG_SMP
|
||||||
subi r4,r5,THREAD
|
subi r4,r5,THREAD
|
||||||
sub r4,r4,r6
|
tovirt(r4,r4)
|
||||||
stw r4,last_task_used_math@l(r3)
|
STL r4,OFF(last_task_used_math)(r3)
|
||||||
#endif /* CONFIG_SMP */
|
#endif /* CONFIG_SMP */
|
||||||
/* restore registers and return */
|
/* restore registers and return */
|
||||||
/* we haven't used ctr or xer or lr */
|
/* we haven't used ctr or xer or lr */
|
||||||
b fast_exception_return
|
b fast_exception_return
|
||||||
|
|
||||||
/*
|
|
||||||
* FP unavailable trap from kernel - print a message, but let
|
|
||||||
* the task use FP in the kernel until it returns to user mode.
|
|
||||||
*/
|
|
||||||
.globl KernelFP
|
|
||||||
KernelFP:
|
|
||||||
lwz r3,_MSR(r1)
|
|
||||||
ori r3,r3,MSR_FP
|
|
||||||
stw r3,_MSR(r1) /* enable use of FP after return */
|
|
||||||
lis r3,86f@h
|
|
||||||
ori r3,r3,86f@l
|
|
||||||
mr r4,r2 /* current */
|
|
||||||
lwz r5,_NIP(r1)
|
|
||||||
bl printk
|
|
||||||
b ret_from_except
|
|
||||||
86: .string "floating point used in kernel (task=%p, pc=%x)\n"
|
|
||||||
.align 4,0
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* giveup_fpu(tsk)
|
* giveup_fpu(tsk)
|
||||||
* Disable FP for the task given as the argument,
|
* Disable FP for the task given as the argument,
|
||||||
* and save the floating-point registers in its thread_struct.
|
* and save the floating-point registers in its thread_struct.
|
||||||
* Enables the FPU for use in the kernel on return.
|
* Enables the FPU for use in the kernel on return.
|
||||||
*/
|
*/
|
||||||
.globl giveup_fpu
|
_GLOBAL(giveup_fpu)
|
||||||
giveup_fpu:
|
|
||||||
mfmsr r5
|
mfmsr r5
|
||||||
ori r5,r5,MSR_FP
|
ori r5,r5,MSR_FP
|
||||||
SYNC_601
|
SYNC_601
|
||||||
|
@ -111,23 +97,23 @@ giveup_fpu:
|
||||||
MTMSRD(r5) /* enable use of fpu now */
|
MTMSRD(r5) /* enable use of fpu now */
|
||||||
SYNC_601
|
SYNC_601
|
||||||
isync
|
isync
|
||||||
cmpwi 0,r3,0
|
CMPI 0,r3,0
|
||||||
beqlr- /* if no previous owner, done */
|
beqlr- /* if no previous owner, done */
|
||||||
addi r3,r3,THREAD /* want THREAD of task */
|
addi r3,r3,THREAD /* want THREAD of task */
|
||||||
lwz r5,PT_REGS(r3)
|
LDL r5,PT_REGS(r3)
|
||||||
cmpwi 0,r5,0
|
CMPI 0,r5,0
|
||||||
SAVE_32FPRS(0, r3)
|
SAVE_32FPRS(0, r3)
|
||||||
mffs fr0
|
mffs fr0
|
||||||
stfd fr0,THREAD_FPSCR-4(r3)
|
stfd fr0,THREAD_FPSCR-4(r3)
|
||||||
beq 1f
|
beq 1f
|
||||||
lwz r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
LDL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
||||||
li r3,MSR_FP|MSR_FE0|MSR_FE1
|
li r3,MSR_FP|MSR_FE0|MSR_FE1
|
||||||
andc r4,r4,r3 /* disable FP for previous task */
|
andc r4,r4,r3 /* disable FP for previous task */
|
||||||
stw r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
||||||
1:
|
1:
|
||||||
#ifndef CONFIG_SMP
|
#ifndef CONFIG_SMP
|
||||||
li r5,0
|
li r5,0
|
||||||
lis r4,last_task_used_math@ha
|
LOADBASE(r4,last_task_used_math)
|
||||||
stw r5,last_task_used_math@l(r4)
|
STL r5,OFF(last_task_used_math)(r4)
|
||||||
#endif /* CONFIG_SMP */
|
#endif /* CONFIG_SMP */
|
||||||
blr
|
blr
|
||||||
|
|
|
@ -80,7 +80,7 @@ _stext:
|
||||||
_GLOBAL(__start)
|
_GLOBAL(__start)
|
||||||
/* NOP this out unconditionally */
|
/* NOP this out unconditionally */
|
||||||
BEGIN_FTR_SECTION
|
BEGIN_FTR_SECTION
|
||||||
b .__start_initialization_multiplatform
|
b .__start_initialization_multiplatform
|
||||||
END_FTR_SECTION(0, 1)
|
END_FTR_SECTION(0, 1)
|
||||||
#endif /* CONFIG_PPC_MULTIPLATFORM */
|
#endif /* CONFIG_PPC_MULTIPLATFORM */
|
||||||
|
|
||||||
|
@ -857,62 +857,6 @@ fp_unavailable_common:
|
||||||
bl .kernel_fp_unavailable_exception
|
bl .kernel_fp_unavailable_exception
|
||||||
BUG_OPCODE
|
BUG_OPCODE
|
||||||
|
|
||||||
/*
|
|
||||||
* load_up_fpu(unused, unused, tsk)
|
|
||||||
* Disable FP for the task which had the FPU previously,
|
|
||||||
* and save its floating-point registers in its thread_struct.
|
|
||||||
* Enables the FPU for use in the kernel on return.
|
|
||||||
* On SMP we know the fpu is free, since we give it up every
|
|
||||||
* switch (ie, no lazy save of the FP registers).
|
|
||||||
* On entry: r13 == 'current' && last_task_used_math != 'current'
|
|
||||||
*/
|
|
||||||
_STATIC(load_up_fpu)
|
|
||||||
mfmsr r5 /* grab the current MSR */
|
|
||||||
ori r5,r5,MSR_FP
|
|
||||||
mtmsrd r5 /* enable use of fpu now */
|
|
||||||
isync
|
|
||||||
/*
|
|
||||||
* For SMP, we don't do lazy FPU switching because it just gets too
|
|
||||||
* horrendously complex, especially when a task switches from one CPU
|
|
||||||
* to another. Instead we call giveup_fpu in switch_to.
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
#ifndef CONFIG_SMP
|
|
||||||
ld r3,last_task_used_math@got(r2)
|
|
||||||
ld r4,0(r3)
|
|
||||||
cmpdi 0,r4,0
|
|
||||||
beq 1f
|
|
||||||
/* Save FP state to last_task_used_math's THREAD struct */
|
|
||||||
addi r4,r4,THREAD
|
|
||||||
SAVE_32FPRS(0, r4)
|
|
||||||
mffs fr0
|
|
||||||
stfd fr0,THREAD_FPSCR(r4)
|
|
||||||
/* Disable FP for last_task_used_math */
|
|
||||||
ld r5,PT_REGS(r4)
|
|
||||||
ld r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
|
||||||
li r6,MSR_FP|MSR_FE0|MSR_FE1
|
|
||||||
andc r4,r4,r6
|
|
||||||
std r4,_MSR-STACK_FRAME_OVERHEAD(r5)
|
|
||||||
1:
|
|
||||||
#endif /* CONFIG_SMP */
|
|
||||||
/* enable use of FP after return */
|
|
||||||
ld r4,PACACURRENT(r13)
|
|
||||||
addi r5,r4,THREAD /* Get THREAD */
|
|
||||||
ld r4,THREAD_FPEXC_MODE(r5)
|
|
||||||
ori r12,r12,MSR_FP
|
|
||||||
or r12,r12,r4
|
|
||||||
std r12,_MSR(r1)
|
|
||||||
lfd fr0,THREAD_FPSCR(r5)
|
|
||||||
mtfsf 0xff,fr0
|
|
||||||
REST_32FPRS(0, r5)
|
|
||||||
#ifndef CONFIG_SMP
|
|
||||||
/* Update last_task_used_math to 'current' */
|
|
||||||
subi r4,r5,THREAD /* Back to 'current' */
|
|
||||||
std r4,0(r3)
|
|
||||||
#endif /* CONFIG_SMP */
|
|
||||||
/* restore registers and return */
|
|
||||||
b fast_exception_return
|
|
||||||
|
|
||||||
.align 7
|
.align 7
|
||||||
.globl altivec_unavailable_common
|
.globl altivec_unavailable_common
|
||||||
altivec_unavailable_common:
|
altivec_unavailable_common:
|
||||||
|
|
|
@ -103,12 +103,13 @@
|
||||||
oris rn,rn,name##@h; \
|
oris rn,rn,name##@h; \
|
||||||
ori rn,rn,name##@l
|
ori rn,rn,name##@l
|
||||||
|
|
||||||
#define LOADBASE(rn,name) \
|
#define LOADBASE(rn,name) \
|
||||||
lis rn,name@highest; \
|
.section .toc,"aw"; \
|
||||||
ori rn,rn,name@higher; \
|
1: .tc name[TC],name; \
|
||||||
rldicr rn,rn,32,31; \
|
.previous; \
|
||||||
oris rn,rn,name@ha
|
ld rn,1b@toc(r2)
|
||||||
|
|
||||||
|
#define OFF(name) 0
|
||||||
|
|
||||||
#define SET_REG_TO_CONST(reg, value) \
|
#define SET_REG_TO_CONST(reg, value) \
|
||||||
lis reg,(((value)>>48)&0xFFFF); \
|
lis reg,(((value)>>48)&0xFFFF); \
|
||||||
|
@ -123,6 +124,23 @@
|
||||||
rldicr reg,reg,32,31; \
|
rldicr reg,reg,32,31; \
|
||||||
oris reg,reg,(label)@h; \
|
oris reg,reg,(label)@h; \
|
||||||
ori reg,reg,(label)@l;
|
ori reg,reg,(label)@l;
|
||||||
|
|
||||||
|
/* operations for longs and pointers */
|
||||||
|
#define LDL ld
|
||||||
|
#define STL std
|
||||||
|
#define CMPI cmpdi
|
||||||
|
|
||||||
|
#else /* 32-bit */
|
||||||
|
#define LOADBASE(rn,name) \
|
||||||
|
lis rn,name@ha
|
||||||
|
|
||||||
|
#define OFF(name) name@l
|
||||||
|
|
||||||
|
/* operations for longs and pointers */
|
||||||
|
#define LDL lwz
|
||||||
|
#define STL stw
|
||||||
|
#define CMPI cmpwi
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* various errata or part fixups */
|
/* various errata or part fixups */
|
||||||
|
|
Загрузка…
Ссылка в новой задаче