linux/arch/sparc/kernel/ktlb.S
<<
>>
Prefs
   1/* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
   2 *
   3 * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
   4 * Copyright (C) 1996 Eddie C. Dost        (ecd@brainaid.de)
   5 * Copyright (C) 1996 Miguel de Icaza      (miguel@nuclecu.unam.mx)
   6 * Copyright (C) 1996,98,99 Jakub Jelinek  (jj@sunsite.mff.cuni.cz)
   7 */
   8
   9#include <asm/head.h>
  10#include <asm/asi.h>
  11#include <asm/page.h>
  12#include <asm/pgtable.h>
  13#include <asm/tsb.h>
  14
  15        .text
  16        .align          32
  17
  18kvmap_itlb:
  19        /* g6: TAG TARGET */
  20        mov             TLB_TAG_ACCESS, %g4
  21        ldxa            [%g4] ASI_IMMU, %g4
  22
  23        /* sun4v_itlb_miss branches here with the missing virtual
  24         * address already loaded into %g4
  25         */
  26kvmap_itlb_4v:
  27
  28kvmap_itlb_nonlinear:
  29        /* Catch kernel NULL pointer calls.  */
  30        sethi           %hi(PAGE_SIZE), %g5
  31        cmp             %g4, %g5
  32        bleu,pn         %xcc, kvmap_dtlb_longpath
  33         nop
  34
  35        KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
  36
  37kvmap_itlb_tsb_miss:
  38        sethi           %hi(LOW_OBP_ADDRESS), %g5
  39        cmp             %g4, %g5
  40        blu,pn          %xcc, kvmap_itlb_vmalloc_addr
  41         mov            0x1, %g5
  42        sllx            %g5, 32, %g5
  43        cmp             %g4, %g5
  44        blu,pn          %xcc, kvmap_itlb_obp
  45         nop
  46
  47kvmap_itlb_vmalloc_addr:
  48        KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
  49
  50        KTSB_LOCK_TAG(%g1, %g2, %g7)
  51
  52        /* Load and check PTE.  */
  53        ldxa            [%g5] ASI_PHYS_USE_EC, %g5
  54        mov             1, %g7
  55        sllx            %g7, TSB_TAG_INVALID_BIT, %g7
  56        brgez,a,pn      %g5, kvmap_itlb_longpath
  57         KTSB_STORE(%g1, %g7)
  58
  59        KTSB_WRITE(%g1, %g5, %g6)
  60
  61        /* fallthrough to TLB load */
  62
  63kvmap_itlb_load:
  64
  65661:    stxa            %g5, [%g0] ASI_ITLB_DATA_IN
  66        retry
  67        .section        .sun4v_2insn_patch, "ax"
  68        .word           661b
  69        nop
  70        nop
  71        .previous
  72
  73        /* For sun4v the ASI_ITLB_DATA_IN store and the retry
  74         * instruction get nop'd out and we get here to branch
  75         * to the sun4v tlb load code.  The registers are setup
  76         * as follows:
  77         *
  78         * %g4: vaddr
  79         * %g5: PTE
  80         * %g6: TAG
  81         *
  82         * The sun4v TLB load wants the PTE in %g3 so we fix that
  83         * up here.
  84         */
  85        ba,pt           %xcc, sun4v_itlb_load
  86         mov            %g5, %g3
  87
  88kvmap_itlb_longpath:
  89
  90661:    rdpr    %pstate, %g5
  91        wrpr    %g5, PSTATE_AG | PSTATE_MG, %pstate
  92        .section .sun4v_2insn_patch, "ax"
  93        .word   661b
  94        SET_GL(1)
  95        nop
  96        .previous
  97
  98        rdpr    %tpc, %g5
  99        ba,pt   %xcc, sparc64_realfault_common
 100         mov    FAULT_CODE_ITLB, %g4
 101
 102kvmap_itlb_obp:
 103        OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
 104
 105        KTSB_LOCK_TAG(%g1, %g2, %g7)
 106
 107        KTSB_WRITE(%g1, %g5, %g6)
 108
 109        ba,pt           %xcc, kvmap_itlb_load
 110         nop
 111
 112kvmap_dtlb_obp:
 113        OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
 114
 115        KTSB_LOCK_TAG(%g1, %g2, %g7)
 116
 117        KTSB_WRITE(%g1, %g5, %g6)
 118
 119        ba,pt           %xcc, kvmap_dtlb_load
 120         nop
 121
 122        .align          32
 123kvmap_dtlb_tsb4m_load:
 124        KTSB_LOCK_TAG(%g1, %g2, %g7)
 125        KTSB_WRITE(%g1, %g5, %g6)
 126        ba,pt           %xcc, kvmap_dtlb_load
 127         nop
 128
 129kvmap_dtlb:
 130        /* %g6: TAG TARGET */
 131        mov             TLB_TAG_ACCESS, %g4
 132        ldxa            [%g4] ASI_DMMU, %g4
 133
 134        /* sun4v_dtlb_miss branches here with the missing virtual
 135         * address already loaded into %g4
 136         */
 137kvmap_dtlb_4v:
 138        brgez,pn        %g4, kvmap_dtlb_nonlinear
 139         nop
 140
 141#ifdef CONFIG_DEBUG_PAGEALLOC
 142        /* Index through the base page size TSB even for linear
 143         * mappings when using page allocation debugging.
 144         */
 145        KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
 146#else
 147        /* Correct TAG_TARGET is already in %g6, check 4mb TSB.  */
 148        KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
 149#endif
 150        /* TSB entry address left in %g1, lookup linear PTE.
 151         * Must preserve %g1 and %g6 (TAG).
 152         */
 153kvmap_dtlb_tsb4m_miss:
 154        /* Clear the PAGE_OFFSET top virtual bits, shift
 155         * down to get PFN, and make sure PFN is in range.
 156         */
 157        sllx            %g4, 21, %g5
 158
 159        /* Check to see if we know about valid memory at the 4MB
 160         * chunk this physical address will reside within.
 161         */
 162        srlx            %g5, 21 + 41, %g2
 163        brnz,pn         %g2, kvmap_dtlb_longpath
 164         nop
 165
 166        /* This unconditional branch and delay-slot nop gets patched
 167         * by the sethi sequence once the bitmap is properly setup.
 168         */
 169        .globl          valid_addr_bitmap_insn
 170valid_addr_bitmap_insn:
 171        ba,pt           %xcc, 2f
 172         nop
 173        .subsection     2
 174        .globl          valid_addr_bitmap_patch
 175valid_addr_bitmap_patch:
 176        sethi           %hi(sparc64_valid_addr_bitmap), %g7
 177        or              %g7, %lo(sparc64_valid_addr_bitmap), %g7
 178        .previous
 179
 180        srlx            %g5, 21 + 22, %g2
 181        srlx            %g2, 6, %g5
 182        and             %g2, 63, %g2
 183        sllx            %g5, 3, %g5
 184        ldx             [%g7 + %g5], %g5
 185        mov             1, %g7
 186        sllx            %g7, %g2, %g7
 187        andcc           %g5, %g7, %g0
 188        be,pn           %xcc, kvmap_dtlb_longpath
 189
 1902:       sethi          %hi(kpte_linear_bitmap), %g2
 191        or              %g2, %lo(kpte_linear_bitmap), %g2
 192
 193        /* Get the 256MB physical address index. */
 194        sllx            %g4, 21, %g5
 195        mov             1, %g7
 196        srlx            %g5, 21 + 28, %g5
 197
 198        /* Don't try this at home kids... this depends upon srlx
 199         * only taking the low 6 bits of the shift count in %g5.
 200         */
 201        sllx            %g7, %g5, %g7
 202
 203        /* Divide by 64 to get the offset into the bitmask.  */
 204        srlx            %g5, 6, %g5
 205        sllx            %g5, 3, %g5
 206
 207        /* kern_linear_pte_xor[((mask & bit) ? 1 : 0)] */
 208        ldx             [%g2 + %g5], %g2
 209        andcc           %g2, %g7, %g0
 210        sethi           %hi(kern_linear_pte_xor), %g5
 211        or              %g5, %lo(kern_linear_pte_xor), %g5
 212        bne,a,pt        %xcc, 1f
 213         add            %g5, 8, %g5
 214
 2151:      ldx             [%g5], %g2
 216
 217        .globl          kvmap_linear_patch
 218kvmap_linear_patch:
 219        ba,pt           %xcc, kvmap_dtlb_tsb4m_load
 220         xor            %g2, %g4, %g5
 221
 222kvmap_dtlb_vmalloc_addr:
 223        KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
 224
 225        KTSB_LOCK_TAG(%g1, %g2, %g7)
 226
 227        /* Load and check PTE.  */
 228        ldxa            [%g5] ASI_PHYS_USE_EC, %g5
 229        mov             1, %g7
 230        sllx            %g7, TSB_TAG_INVALID_BIT, %g7
 231        brgez,a,pn      %g5, kvmap_dtlb_longpath
 232         KTSB_STORE(%g1, %g7)
 233
 234        KTSB_WRITE(%g1, %g5, %g6)
 235
 236        /* fallthrough to TLB load */
 237
 238kvmap_dtlb_load:
 239
 240661:    stxa            %g5, [%g0] ASI_DTLB_DATA_IN     ! Reload TLB
 241        retry
 242        .section        .sun4v_2insn_patch, "ax"
 243        .word           661b
 244        nop
 245        nop
 246        .previous
 247
 248        /* For sun4v the ASI_DTLB_DATA_IN store and the retry
 249         * instruction get nop'd out and we get here to branch
 250         * to the sun4v tlb load code.  The registers are setup
 251         * as follows:
 252         *
 253         * %g4: vaddr
 254         * %g5: PTE
 255         * %g6: TAG
 256         *
 257         * The sun4v TLB load wants the PTE in %g3 so we fix that
 258         * up here.
 259         */
 260        ba,pt           %xcc, sun4v_dtlb_load
 261         mov            %g5, %g3
 262
 263#ifdef CONFIG_SPARSEMEM_VMEMMAP
 264kvmap_vmemmap:
 265        sub             %g4, %g5, %g5
 266        srlx            %g5, 22, %g5
 267        sethi           %hi(vmemmap_table), %g1
 268        sllx            %g5, 3, %g5
 269        or              %g1, %lo(vmemmap_table), %g1
 270        ba,pt           %xcc, kvmap_dtlb_load
 271         ldx            [%g1 + %g5], %g5
 272#endif
 273
 274kvmap_dtlb_nonlinear:
 275        /* Catch kernel NULL pointer derefs.  */
 276        sethi           %hi(PAGE_SIZE), %g5
 277        cmp             %g4, %g5
 278        bleu,pn         %xcc, kvmap_dtlb_longpath
 279         nop
 280
 281#ifdef CONFIG_SPARSEMEM_VMEMMAP
 282        /* Do not use the TSB for vmemmap.  */
 283        mov             (VMEMMAP_BASE >> 40), %g5
 284        sllx            %g5, 40, %g5
 285        cmp             %g4,%g5
 286        bgeu,pn         %xcc, kvmap_vmemmap
 287         nop
 288#endif
 289
 290        KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
 291
 292kvmap_dtlb_tsbmiss:
 293        sethi           %hi(MODULES_VADDR), %g5
 294        cmp             %g4, %g5
 295        blu,pn          %xcc, kvmap_dtlb_longpath
 296         mov            (VMALLOC_END >> 40), %g5
 297        sllx            %g5, 40, %g5
 298        cmp             %g4, %g5
 299        bgeu,pn         %xcc, kvmap_dtlb_longpath
 300         nop
 301
 302kvmap_check_obp:
 303        sethi           %hi(LOW_OBP_ADDRESS), %g5
 304        cmp             %g4, %g5
 305        blu,pn          %xcc, kvmap_dtlb_vmalloc_addr
 306         mov            0x1, %g5
 307        sllx            %g5, 32, %g5
 308        cmp             %g4, %g5
 309        blu,pn          %xcc, kvmap_dtlb_obp
 310         nop
 311        ba,pt           %xcc, kvmap_dtlb_vmalloc_addr
 312         nop
 313
 314kvmap_dtlb_longpath:
 315
 316661:    rdpr    %pstate, %g5
 317        wrpr    %g5, PSTATE_AG | PSTATE_MG, %pstate
 318        .section .sun4v_2insn_patch, "ax"
 319        .word   661b
 320        SET_GL(1)
 321        ldxa            [%g0] ASI_SCRATCHPAD, %g5
 322        .previous
 323
 324        rdpr    %tl, %g3
 325        cmp     %g3, 1
 326
 327661:    mov     TLB_TAG_ACCESS, %g4
 328        ldxa    [%g4] ASI_DMMU, %g5
 329        .section .sun4v_2insn_patch, "ax"
 330        .word   661b
 331        ldx     [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
 332        nop
 333        .previous
 334
 335        be,pt   %xcc, sparc64_realfault_common
 336         mov    FAULT_CODE_DTLB, %g4
 337        ba,pt   %xcc, winfix_trampoline
 338         nop
 339