1*4882a593Smuzhiyun /* SPDX-License-Identifier: GPL-2.0-only */ 2*4882a593Smuzhiyun /* 3*4882a593Smuzhiyun * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com) 4*4882a593Smuzhiyun */ 5*4882a593Smuzhiyun 6*4882a593Smuzhiyun #ifndef __ASM_TLB_MMU_V1_H__ 7*4882a593Smuzhiyun #define __ASM_TLB_MMU_V1_H__ 8*4882a593Smuzhiyun 9*4882a593Smuzhiyun #include <asm/mmu.h> 10*4882a593Smuzhiyun 11*4882a593Smuzhiyun #if defined(__ASSEMBLY__) && (CONFIG_ARC_MMU_VER == 1) 12*4882a593Smuzhiyun 13*4882a593Smuzhiyun .macro TLB_WRITE_HEURISTICS 14*4882a593Smuzhiyun 15*4882a593Smuzhiyun #define JH_HACK1 16*4882a593Smuzhiyun #undef JH_HACK2 17*4882a593Smuzhiyun #undef JH_HACK3 18*4882a593Smuzhiyun 19*4882a593Smuzhiyun #ifdef JH_HACK3 20*4882a593Smuzhiyun ; Calculate set index for 2-way MMU 21*4882a593Smuzhiyun ; -avoiding use of GetIndex from MMU 22*4882a593Smuzhiyun ; and its unpleasant LFSR pseudo-random sequence 23*4882a593Smuzhiyun ; 24*4882a593Smuzhiyun ; r1 = TLBPD0 from TLB_RELOAD above 25*4882a593Smuzhiyun ; 26*4882a593Smuzhiyun ; -- jh_ex_way_set not cleared on startup 27*4882a593Smuzhiyun ; didn't want to change setup.c 28*4882a593Smuzhiyun ; hence extra instruction to clean 29*4882a593Smuzhiyun ; 30*4882a593Smuzhiyun ; -- should be in cache since in same line 31*4882a593Smuzhiyun ; as r0/r1 saves above 32*4882a593Smuzhiyun ; 33*4882a593Smuzhiyun ld r0,[jh_ex_way_sel] ; victim pointer 34*4882a593Smuzhiyun and r0,r0,1 ; clean 35*4882a593Smuzhiyun xor.f r0,r0,1 ; flip 36*4882a593Smuzhiyun st r0,[jh_ex_way_sel] ; store back 37*4882a593Smuzhiyun asr r0,r1,12 ; get set # <<1, note bit 12=R=0 38*4882a593Smuzhiyun or.nz r0,r0,1 ; set way bit 39*4882a593Smuzhiyun and r0,r0,0xff ; clean 40*4882a593Smuzhiyun sr r0,[ARC_REG_TLBINDEX] 41*4882a593Smuzhiyun #endif 42*4882a593Smuzhiyun 43*4882a593Smuzhiyun #ifdef JH_HACK2 44*4882a593Smuzhiyun ; JH hack #2 45*4882a593Smuzhiyun ; Faster than hack #1 in non-thrash case, but hard-coded for 2-way MMU 46*4882a593Smuzhiyun ; Slower in thrash case (where it matters) because more code is executed 47*4882a593Smuzhiyun ; Inefficient due to two-register paradigm of this miss handler 48*4882a593Smuzhiyun ; 49*4882a593Smuzhiyun /* r1 = data TLBPD0 at this point */ 50*4882a593Smuzhiyun lr r0,[eret] /* instruction address */ 51*4882a593Smuzhiyun xor r0,r0,r1 /* compare set # */ 52*4882a593Smuzhiyun and.f r0,r0,0x000fe000 /* 2-way MMU mask */ 53*4882a593Smuzhiyun bne 88f /* not in same set - no need to probe */ 54*4882a593Smuzhiyun 55*4882a593Smuzhiyun lr r0,[eret] /* instruction address */ 56*4882a593Smuzhiyun and r0,r0,PAGE_MASK /* VPN of instruction address */ 57*4882a593Smuzhiyun ; lr r1,[ARC_REG_TLBPD0] /* Data VPN+ASID - already in r1 from TLB_RELOAD*/ 58*4882a593Smuzhiyun and r1,r1,0xff /* Data ASID */ 59*4882a593Smuzhiyun or r0,r0,r1 /* Instruction address + Data ASID */ 60*4882a593Smuzhiyun 61*4882a593Smuzhiyun lr r1,[ARC_REG_TLBPD0] /* save TLBPD0 containing data TLB*/ 62*4882a593Smuzhiyun sr r0,[ARC_REG_TLBPD0] /* write instruction address to TLBPD0 */ 63*4882a593Smuzhiyun sr TLBProbe, [ARC_REG_TLBCOMMAND] /* Look for instruction */ 64*4882a593Smuzhiyun lr r0,[ARC_REG_TLBINDEX] /* r0 = index where instruction is, if at all */ 65*4882a593Smuzhiyun sr r1,[ARC_REG_TLBPD0] /* restore TLBPD0 */ 66*4882a593Smuzhiyun 67*4882a593Smuzhiyun xor r0,r0,1 /* flip bottom bit of data index */ 68*4882a593Smuzhiyun b.d 89f 69*4882a593Smuzhiyun sr r0,[ARC_REG_TLBINDEX] /* and put it back */ 70*4882a593Smuzhiyun 88: 71*4882a593Smuzhiyun sr TLBGetIndex, [ARC_REG_TLBCOMMAND] 72*4882a593Smuzhiyun 89: 73*4882a593Smuzhiyun #endif 74*4882a593Smuzhiyun 75*4882a593Smuzhiyun #ifdef JH_HACK1 76*4882a593Smuzhiyun ; 77*4882a593Smuzhiyun ; Always checks whether instruction will be kicked out by dtlb miss 78*4882a593Smuzhiyun ; 79*4882a593Smuzhiyun mov_s r3, r1 ; save PD0 prepared by TLB_RELOAD in r3 80*4882a593Smuzhiyun lr r0,[eret] /* instruction address */ 81*4882a593Smuzhiyun and r0,r0,PAGE_MASK /* VPN of instruction address */ 82*4882a593Smuzhiyun bmsk r1,r3,7 /* Data ASID, bits 7-0 */ 83*4882a593Smuzhiyun or_s r0,r0,r1 /* Instruction address + Data ASID */ 84*4882a593Smuzhiyun 85*4882a593Smuzhiyun sr r0,[ARC_REG_TLBPD0] /* write instruction address to TLBPD0 */ 86*4882a593Smuzhiyun sr TLBProbe, [ARC_REG_TLBCOMMAND] /* Look for instruction */ 87*4882a593Smuzhiyun lr r0,[ARC_REG_TLBINDEX] /* r0 = index where instruction is, if at all */ 88*4882a593Smuzhiyun sr r3,[ARC_REG_TLBPD0] /* restore TLBPD0 */ 89*4882a593Smuzhiyun 90*4882a593Smuzhiyun sr TLBGetIndex, [ARC_REG_TLBCOMMAND] 91*4882a593Smuzhiyun lr r1,[ARC_REG_TLBINDEX] /* r1 = index where MMU wants to put data */ 92*4882a593Smuzhiyun cmp r0,r1 /* if no match on indices, go around */ 93*4882a593Smuzhiyun xor.eq r1,r1,1 /* flip bottom bit of data index */ 94*4882a593Smuzhiyun sr r1,[ARC_REG_TLBINDEX] /* and put it back */ 95*4882a593Smuzhiyun #endif 96*4882a593Smuzhiyun 97*4882a593Smuzhiyun .endm 98*4882a593Smuzhiyun 99*4882a593Smuzhiyun #endif 100*4882a593Smuzhiyun 101*4882a593Smuzhiyun #endif 102