MIPS: Optimize TLB refill for RI/XI configurations.

We don't have to do a separate shift to eliminate the software bits,
just rotate them into the fill and they will be ignored.

Signed-off-by: David Daney <ddaney@caviumnetworks.com>
Cc: linux-mips@linux-mips.org
Patchwork: https://patchwork.linux-mips.org/patch/4294/
Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
This commit is contained in:
David Daney 2012-08-23 10:02:03 -07:00 коммит произвёл Ralf Baechle
Родитель 981ef0de49
Коммит 748e787eb6
1 изменённых файлов: 7 добавлений и 16 удалений

Просмотреть файл

@ -599,8 +599,7 @@ static __cpuinit __maybe_unused void build_convert_pte_to_entrylo(u32 **p,
unsigned int reg) unsigned int reg)
{ {
if (cpu_has_rixi) { if (cpu_has_rixi) {
UASM_i_SRL(p, reg, reg, ilog2(_PAGE_NO_EXEC)); UASM_i_ROTR(p, reg, reg, ilog2(_PAGE_GLOBAL));
UASM_i_ROTR(p, reg, reg, ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC));
} else { } else {
#ifdef CONFIG_64BIT_PHYS_ADDR #ifdef CONFIG_64BIT_PHYS_ADDR
uasm_i_dsrl_safe(p, reg, reg, ilog2(_PAGE_GLOBAL)); uasm_i_dsrl_safe(p, reg, reg, ilog2(_PAGE_GLOBAL));
@ -1019,11 +1018,9 @@ static void __cpuinit build_update_entries(u32 **p, unsigned int tmp,
uasm_i_ld(p, tmp, 0, ptep); /* get even pte */ uasm_i_ld(p, tmp, 0, ptep); /* get even pte */
uasm_i_ld(p, ptep, sizeof(pte_t), ptep); /* get odd pte */ uasm_i_ld(p, ptep, sizeof(pte_t), ptep); /* get odd pte */
if (cpu_has_rixi) { if (cpu_has_rixi) {
UASM_i_SRL(p, tmp, tmp, ilog2(_PAGE_NO_EXEC)); UASM_i_ROTR(p, tmp, tmp, ilog2(_PAGE_GLOBAL));
UASM_i_SRL(p, ptep, ptep, ilog2(_PAGE_NO_EXEC));
UASM_i_ROTR(p, tmp, tmp, ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC));
UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */ UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */
UASM_i_ROTR(p, ptep, ptep, ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC)); UASM_i_ROTR(p, ptep, ptep, ilog2(_PAGE_GLOBAL));
} else { } else {
uasm_i_dsrl_safe(p, tmp, tmp, ilog2(_PAGE_GLOBAL)); /* convert to entrylo0 */ uasm_i_dsrl_safe(p, tmp, tmp, ilog2(_PAGE_GLOBAL)); /* convert to entrylo0 */
UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */ UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */
@ -1046,13 +1043,11 @@ static void __cpuinit build_update_entries(u32 **p, unsigned int tmp,
if (r45k_bvahwbug()) if (r45k_bvahwbug())
build_tlb_probe_entry(p); build_tlb_probe_entry(p);
if (cpu_has_rixi) { if (cpu_has_rixi) {
UASM_i_SRL(p, tmp, tmp, ilog2(_PAGE_NO_EXEC)); UASM_i_ROTR(p, tmp, tmp, ilog2(_PAGE_GLOBAL));
UASM_i_SRL(p, ptep, ptep, ilog2(_PAGE_NO_EXEC));
UASM_i_ROTR(p, tmp, tmp, ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC));
if (r4k_250MHZhwbug()) if (r4k_250MHZhwbug())
UASM_i_MTC0(p, 0, C0_ENTRYLO0); UASM_i_MTC0(p, 0, C0_ENTRYLO0);
UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */ UASM_i_MTC0(p, tmp, C0_ENTRYLO0); /* load it */
UASM_i_ROTR(p, ptep, ptep, ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC)); UASM_i_ROTR(p, ptep, ptep, ilog2(_PAGE_GLOBAL));
} else { } else {
UASM_i_SRL(p, tmp, tmp, ilog2(_PAGE_GLOBAL)); /* convert to entrylo0 */ UASM_i_SRL(p, tmp, tmp, ilog2(_PAGE_GLOBAL)); /* convert to entrylo0 */
if (r4k_250MHZhwbug()) if (r4k_250MHZhwbug())
@ -1212,13 +1207,9 @@ build_fast_tlb_refill_handler (u32 **p, struct uasm_label **l,
UASM_i_LW(p, odd, sizeof(pte_t), ptr); /* get odd pte */ UASM_i_LW(p, odd, sizeof(pte_t), ptr); /* get odd pte */
} }
if (cpu_has_rixi) { if (cpu_has_rixi) {
uasm_i_dsrl_safe(p, even, even, ilog2(_PAGE_NO_EXEC)); uasm_i_drotr(p, even, even, ilog2(_PAGE_GLOBAL));
uasm_i_dsrl_safe(p, odd, odd, ilog2(_PAGE_NO_EXEC));
uasm_i_drotr(p, even, even,
ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC));
UASM_i_MTC0(p, even, C0_ENTRYLO0); /* load it */ UASM_i_MTC0(p, even, C0_ENTRYLO0); /* load it */
uasm_i_drotr(p, odd, odd, uasm_i_drotr(p, odd, odd, ilog2(_PAGE_GLOBAL));
ilog2(_PAGE_GLOBAL) - ilog2(_PAGE_NO_EXEC));
} else { } else {
uasm_i_dsrl_safe(p, even, even, ilog2(_PAGE_GLOBAL)); uasm_i_dsrl_safe(p, even, even, ilog2(_PAGE_GLOBAL));
UASM_i_MTC0(p, even, C0_ENTRYLO0); /* load it */ UASM_i_MTC0(p, even, C0_ENTRYLO0); /* load it */