uboot/arch/powerpc/cpu/mpc85xx/start.S
<<
>>
Prefs
   1/*
   2 * Copyright 2004, 2007-2012 Freescale Semiconductor, Inc.
   3 * Copyright (C) 2003  Motorola,Inc.
   4 *
   5 * SPDX-License-Identifier:     GPL-2.0+
   6 */
   7
   8/* U-Boot Startup Code for Motorola 85xx PowerPC based Embedded Boards
   9 *
  10 * The processor starts at 0xfffffffc and the code is first executed in the
  11 * last 4K page(0xfffff000-0xffffffff) in flash/rom.
  12 *
  13 */
  14
  15#include <asm-offsets.h>
  16#include <config.h>
  17#include <mpc85xx.h>
  18#include <version.h>
  19
  20#define _LINUX_CONFIG_H 1       /* avoid reading Linux autoconf.h file  */
  21
  22#include <ppc_asm.tmpl>
  23#include <ppc_defs.h>
  24
  25#include <asm/cache.h>
  26#include <asm/mmu.h>
  27
  28#undef  MSR_KERNEL
  29#define MSR_KERNEL ( MSR_ME )   /* Machine Check */
  30
  31#if defined(CONFIG_NAND_SPL) || \
  32        (defined(CONFIG_SPL_BUILD) && defined(CONFIG_SPL_INIT_MINIMAL))
  33#define MINIMAL_SPL
  34#endif
  35
  36#if !defined(CONFIG_SPL) && !defined(CONFIG_SYS_RAMBOOT) && \
  37        !defined(CONFIG_SECURE_BOOT) && !defined(CONFIG_SRIO_PCIE_BOOT_SLAVE)
  38#define NOR_BOOT
  39#endif
  40
  41/*
  42 * Set up GOT: Global Offset Table
  43 *
  44 * Use r12 to access the GOT
  45 */
  46        START_GOT
  47        GOT_ENTRY(_GOT2_TABLE_)
  48        GOT_ENTRY(_FIXUP_TABLE_)
  49
  50#ifndef MINIMAL_SPL
  51        GOT_ENTRY(_start)
  52        GOT_ENTRY(_start_of_vectors)
  53        GOT_ENTRY(_end_of_vectors)
  54        GOT_ENTRY(transfer_to_handler)
  55#endif
  56
  57        GOT_ENTRY(__init_end)
  58        GOT_ENTRY(__bss_end)
  59        GOT_ENTRY(__bss_start)
  60        END_GOT
  61
  62/*
  63 * e500 Startup -- after reset only the last 4KB of the effective
  64 * address space is mapped in the MMU L2 TLB1 Entry0. The .bootpg
  65 * section is located at THIS LAST page and basically does three
  66 * things: clear some registers, set up exception tables and
  67 * add more TLB entries for 'larger spaces'(e.g. the boot rom) to
  68 * continue the boot procedure.
  69
  70 * Once the boot rom is mapped by TLB entries we can proceed
  71 * with normal startup.
  72 *
  73 */
  74
  75        .section .bootpg,"ax"
  76        .globl _start_e500
  77
  78_start_e500:
  79/* Enable debug exception */
  80        li      r1,MSR_DE
  81        mtmsr   r1
  82
  83#ifdef CONFIG_SYS_FSL_ERRATUM_A004510
  84        mfspr   r3,SPRN_SVR
  85        rlwinm  r3,r3,0,0xff
  86        li      r4,CONFIG_SYS_FSL_ERRATUM_A004510_SVR_REV
  87        cmpw    r3,r4
  88        beq     1f
  89
  90#ifdef CONFIG_SYS_FSL_ERRATUM_A004510_SVR_REV2
  91        li      r4,CONFIG_SYS_FSL_ERRATUM_A004510_SVR_REV2
  92        cmpw    r3,r4
  93        beq     1f
  94#endif
  95
  96        /* Not a supported revision affected by erratum */
  97        li      r27,0
  98        b       2f
  99
 1001:      li      r27,1   /* Remember for later that we have the erratum */
 101        /* Erratum says set bits 55:60 to 001001 */
 102        msync
 103        isync
 104        mfspr   r3,SPRN_HDBCR0
 105        li      r4,0x48
 106        rlwimi  r3,r4,0,0x1f8
 107        mtspr   SPRN_HDBCR0,r3
 108        isync
 1092:
 110#endif
 111#ifdef CONFIG_SYS_FSL_ERRATUM_A005125
 112        msync
 113        isync
 114        mfspr   r3, SPRN_HDBCR0
 115        oris    r3, r3, 0x0080
 116        mtspr   SPRN_HDBCR0, r3
 117#endif
 118
 119
 120#if defined(CONFIG_SECURE_BOOT) && defined(CONFIG_E500MC)
 121        /* ISBC uses L2 as stack.
 122         * Disable L2 cache here so that u-boot can enable it later
 123         * as part of it's normal flow
 124        */
 125
 126        /* Check if L2 is enabled */
 127        mfspr   r3, SPRN_L2CSR0
 128        lis     r2, L2CSR0_L2E@h
 129        ori     r2, r2, L2CSR0_L2E@l
 130        and.    r4, r3, r2
 131        beq     l2_disabled
 132
 133        mfspr r3, SPRN_L2CSR0
 134        /* Flush L2 cache */
 135        lis     r2,(L2CSR0_L2FL)@h
 136        ori     r2, r2, (L2CSR0_L2FL)@l
 137        or      r3, r2, r3
 138        sync
 139        isync
 140        mtspr   SPRN_L2CSR0,r3
 141        isync
 1421:
 143        mfspr r3, SPRN_L2CSR0
 144        and. r1, r3, r2
 145        bne 1b
 146
 147        mfspr r3, SPRN_L2CSR0
 148        lis r2, L2CSR0_L2E@h
 149        ori r2, r2, L2CSR0_L2E@l
 150        andc r4, r3, r2
 151        sync
 152        isync
 153        mtspr SPRN_L2CSR0,r4
 154        isync
 155
 156l2_disabled:
 157#endif
 158
 159/* clear registers/arrays not reset by hardware */
 160
 161        /* L1 */
 162        li      r0,2
 163        mtspr   L1CSR0,r0       /* invalidate d-cache */
 164        mtspr   L1CSR1,r0       /* invalidate i-cache */
 165
 166        mfspr   r1,DBSR
 167        mtspr   DBSR,r1         /* Clear all valid bits */
 168
 169
 170        .macro  create_tlb1_entry esel ts tsize epn wimg rpn perm phy_high scratch
 171        lis     \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@h
 172        ori     \scratch, \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@l
 173        mtspr   MAS0, \scratch
 174        lis     \scratch, FSL_BOOKE_MAS1(1, 1, 0, \ts, \tsize)@h
 175        ori     \scratch, \scratch, FSL_BOOKE_MAS1(1, 1, 0, \ts, \tsize)@l
 176        mtspr   MAS1, \scratch
 177        lis     \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@h
 178        ori     \scratch, \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@l
 179        mtspr   MAS2, \scratch
 180        lis     \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@h
 181        ori     \scratch, \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@l
 182        mtspr   MAS3, \scratch
 183        lis     \scratch, \phy_high@h
 184        ori     \scratch, \scratch, \phy_high@l
 185        mtspr   MAS7, \scratch
 186        isync
 187        msync
 188        tlbwe
 189        isync
 190        .endm
 191
 192        .macro  create_tlb0_entry esel ts tsize epn wimg rpn perm phy_high scratch
 193        lis     \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@h
 194        ori     \scratch, \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@l
 195        mtspr   MAS0, \scratch
 196        lis     \scratch, FSL_BOOKE_MAS1(1, 0, 0, \ts, \tsize)@h
 197        ori     \scratch, \scratch, FSL_BOOKE_MAS1(1, 0, 0, \ts, \tsize)@l
 198        mtspr   MAS1, \scratch
 199        lis     \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@h
 200        ori     \scratch, \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@l
 201        mtspr   MAS2, \scratch
 202        lis     \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@h
 203        ori     \scratch, \scratch, FSL_BOOKE_MAS3(\rpn, 0, \perm)@l
 204        mtspr   MAS3, \scratch
 205        lis     \scratch, \phy_high@h
 206        ori     \scratch, \scratch, \phy_high@l
 207        mtspr   MAS7, \scratch
 208        isync
 209        msync
 210        tlbwe
 211        isync
 212        .endm
 213
 214        .macro  delete_tlb1_entry esel scratch
 215        lis     \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@h
 216        ori     \scratch, \scratch, FSL_BOOKE_MAS0(1, \esel, 0)@l
 217        mtspr   MAS0, \scratch
 218        li      \scratch, 0
 219        mtspr   MAS1, \scratch
 220        isync
 221        msync
 222        tlbwe
 223        isync
 224        .endm
 225
 226        .macro  delete_tlb0_entry esel epn wimg scratch
 227        lis     \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@h
 228        ori     \scratch, \scratch, FSL_BOOKE_MAS0(0, \esel, 0)@l
 229        mtspr   MAS0, \scratch
 230        li      \scratch, 0
 231        mtspr   MAS1, \scratch
 232        lis     \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@h
 233        ori     \scratch, \scratch, FSL_BOOKE_MAS2(\epn, \wimg)@l
 234        mtspr   MAS2, \scratch
 235        isync
 236        msync
 237        tlbwe
 238        isync
 239        .endm
 240
 241/* Interrupt vectors do not fit in minimal SPL. */
 242#if !defined(MINIMAL_SPL)
 243        /* Setup interrupt vectors */
 244        lis     r1,CONFIG_SYS_MONITOR_BASE@h
 245        mtspr   IVPR,r1
 246
 247        lis     r3,(CONFIG_SYS_MONITOR_BASE & 0xffff)@h
 248        ori     r3,r3,(CONFIG_SYS_MONITOR_BASE & 0xffff)@l
 249
 250        addi    r4,r3,CriticalInput - _start + _START_OFFSET
 251        mtspr   IVOR0,r4        /* 0: Critical input */
 252        addi    r4,r3,MachineCheck - _start + _START_OFFSET
 253        mtspr   IVOR1,r4        /* 1: Machine check */
 254        addi    r4,r3,DataStorage - _start + _START_OFFSET
 255        mtspr   IVOR2,r4        /* 2: Data storage */
 256        addi    r4,r3,InstStorage - _start + _START_OFFSET
 257        mtspr   IVOR3,r4        /* 3: Instruction storage */
 258        addi    r4,r3,ExtInterrupt - _start + _START_OFFSET
 259        mtspr   IVOR4,r4        /* 4: External interrupt */
 260        addi    r4,r3,Alignment - _start + _START_OFFSET
 261        mtspr   IVOR5,r4        /* 5: Alignment */
 262        addi    r4,r3,ProgramCheck - _start + _START_OFFSET
 263        mtspr   IVOR6,r4        /* 6: Program check */
 264        addi    r4,r3,FPUnavailable - _start + _START_OFFSET
 265        mtspr   IVOR7,r4        /* 7: floating point unavailable */
 266        addi    r4,r3,SystemCall - _start + _START_OFFSET
 267        mtspr   IVOR8,r4        /* 8: System call */
 268        /* 9: Auxiliary processor unavailable(unsupported) */
 269        addi    r4,r3,Decrementer - _start + _START_OFFSET
 270        mtspr   IVOR10,r4       /* 10: Decrementer */
 271        addi    r4,r3,IntervalTimer - _start + _START_OFFSET
 272        mtspr   IVOR11,r4       /* 11: Interval timer */
 273        addi    r4,r3,WatchdogTimer - _start + _START_OFFSET
 274        mtspr   IVOR12,r4       /* 12: Watchdog timer */
 275        addi    r4,r3,DataTLBError - _start + _START_OFFSET
 276        mtspr   IVOR13,r4       /* 13: Data TLB error */
 277        addi    r4,r3,InstructionTLBError - _start + _START_OFFSET
 278        mtspr   IVOR14,r4       /* 14: Instruction TLB error */
 279        addi    r4,r3,DebugBreakpoint - _start + _START_OFFSET
 280        mtspr   IVOR15,r4       /* 15: Debug */
 281#endif
 282
 283        /* Clear and set up some registers. */
 284        li      r0,0x0000
 285        lis     r1,0xffff
 286        mtspr   DEC,r0                  /* prevent dec exceptions */
 287        mttbl   r0                      /* prevent fit & wdt exceptions */
 288        mttbu   r0
 289        mtspr   TSR,r1                  /* clear all timer exception status */
 290        mtspr   TCR,r0                  /* disable all */
 291        mtspr   ESR,r0                  /* clear exception syndrome register */
 292        mtspr   MCSR,r0                 /* machine check syndrome register */
 293        mtxer   r0                      /* clear integer exception register */
 294
 295#ifdef CONFIG_SYS_BOOK3E_HV
 296        mtspr   MAS8,r0                 /* make sure MAS8 is clear */
 297#endif
 298
 299        /* Enable Time Base and Select Time Base Clock */
 300        lis     r0,HID0_EMCP@h          /* Enable machine check */
 301#if defined(CONFIG_ENABLE_36BIT_PHYS)
 302        ori     r0,r0,HID0_ENMAS7@l     /* Enable MAS7 */
 303#endif
 304#ifndef CONFIG_E500MC
 305        ori     r0,r0,HID0_TBEN@l       /* Enable Timebase */
 306#endif
 307        mtspr   HID0,r0
 308
 309#ifndef CONFIG_E500MC
 310        li      r0,(HID1_ASTME|HID1_ABE)@l      /* Addr streaming & broadcast */
 311        mfspr   r3,PVR
 312        andi.   r3,r3, 0xff
 313        cmpwi   r3,0x50@l       /* if we are rev 5.0 or greater set MBDD */
 314        blt 1f
 315        /* Set MBDD bit also */
 316        ori r0, r0, HID1_MBDD@l
 3171:
 318        mtspr   HID1,r0
 319#endif
 320
 321#ifdef CONFIG_SYS_FSL_ERRATUM_CPU_A003999
 322        mfspr   r3,SPRN_HDBCR1
 323        oris    r3,r3,0x0100
 324        mtspr   SPRN_HDBCR1,r3
 325#endif
 326
 327        /* Enable Branch Prediction */
 328#if defined(CONFIG_BTB)
 329        lis     r0,BUCSR_ENABLE@h
 330        ori     r0,r0,BUCSR_ENABLE@l
 331        mtspr   SPRN_BUCSR,r0
 332#endif
 333
 334#if defined(CONFIG_SYS_INIT_DBCR)
 335        lis     r1,0xffff
 336        ori     r1,r1,0xffff
 337        mtspr   DBSR,r1                 /* Clear all status bits */
 338        lis     r0,CONFIG_SYS_INIT_DBCR@h       /* DBCR0[IDM] must be set */
 339        ori     r0,r0,CONFIG_SYS_INIT_DBCR@l
 340        mtspr   DBCR0,r0
 341#endif
 342
 343#ifdef CONFIG_MPC8569
 344#define CONFIG_SYS_LBC_ADDR (CONFIG_SYS_CCSRBAR_DEFAULT + 0x5000)
 345#define CONFIG_SYS_LBCR_ADDR (CONFIG_SYS_LBC_ADDR + 0xd0)
 346
 347        /* MPC8569 Rev.0 silcon needs to set bit 13 of LBCR to allow elBC to
 348         * use address space which is more than 12bits, and it must be done in
 349         * the 4K boot page. So we set this bit here.
 350         */
 351
 352        /* create a temp mapping TLB0[0] for LBCR  */
 353        create_tlb0_entry 0, \
 354                0, BOOKE_PAGESZ_4K, \
 355                CONFIG_SYS_LBC_ADDR, MAS2_I|MAS2_G, \
 356                CONFIG_SYS_LBC_ADDR, MAS3_SW|MAS3_SR, \
 357                0, r6
 358
 359        /* Set LBCR register */
 360        lis     r4,CONFIG_SYS_LBCR_ADDR@h
 361        ori     r4,r4,CONFIG_SYS_LBCR_ADDR@l
 362
 363        lis     r5,CONFIG_SYS_LBC_LBCR@h
 364        ori     r5,r5,CONFIG_SYS_LBC_LBCR@l
 365        stw     r5,0(r4)
 366        isync
 367
 368        /* invalidate this temp TLB */
 369        lis     r4,CONFIG_SYS_LBC_ADDR@h
 370        ori     r4,r4,CONFIG_SYS_LBC_ADDR@l
 371        tlbivax 0,r4
 372        isync
 373
 374#endif /* CONFIG_MPC8569 */
 375
 376/*
 377 * Search for the TLB that covers the code we're executing, and shrink it
 378 * so that it covers only this 4K page.  That will ensure that any other
 379 * TLB we create won't interfere with it.  We assume that the TLB exists,
 380 * which is why we don't check the Valid bit of MAS1.  We also assume
 381 * it is in TLB1.
 382 *
 383 * This is necessary, for example, when booting from the on-chip ROM,
 384 * which (oddly) creates a single 4GB TLB that covers CCSR and DDR.
 385 */
 386        bl      nexti           /* Find our address */
 387nexti:  mflr    r1              /* R1 = our PC */
 388        li      r2, 0
 389        mtspr   MAS6, r2        /* Assume the current PID and AS are 0 */
 390        isync
 391        msync
 392        tlbsx   0, r1           /* This must succeed */
 393
 394        mfspr   r14, MAS0       /* Save ESEL for later */
 395        rlwinm  r14, r14, 16, 0xfff
 396
 397        /* Set the size of the TLB to 4KB */
 398        mfspr   r3, MAS1
 399        li      r2, 0xF80
 400        andc    r3, r3, r2      /* Clear the TSIZE bits */
 401        ori     r3, r3, MAS1_TSIZE(BOOKE_PAGESZ_4K)@l
 402        oris    r3, r3, MAS1_IPROT@h
 403        mtspr   MAS1, r3
 404
 405        /*
 406         * Set the base address of the TLB to our PC.  We assume that
 407         * virtual == physical.  We also assume that MAS2_EPN == MAS3_RPN.
 408         */
 409        lis     r3, MAS2_EPN@h
 410        ori     r3, r3, MAS2_EPN@l      /* R3 = MAS2_EPN */
 411
 412        and     r1, r1, r3      /* Our PC, rounded down to the nearest page */
 413
 414        mfspr   r2, MAS2
 415        andc    r2, r2, r3
 416        or      r2, r2, r1
 417#ifdef CONFIG_SYS_FSL_ERRATUM_A004510
 418        cmpwi   r27,0
 419        beq     1f
 420        andi.   r15, r2, MAS2_I|MAS2_G /* save the old I/G for later */
 421        rlwinm  r2, r2, 0, ~MAS2_I
 422        ori     r2, r2, MAS2_G
 4231:
 424#endif
 425        mtspr   MAS2, r2        /* Set the EPN to our PC base address */
 426
 427        mfspr   r2, MAS3
 428        andc    r2, r2, r3
 429        or      r2, r2, r1
 430        mtspr   MAS3, r2        /* Set the RPN to our PC base address */
 431
 432        isync
 433        msync
 434        tlbwe
 435
 436/*
 437 * Clear out any other TLB entries that may exist, to avoid conflicts.
 438 * Our TLB entry is in r14.
 439 */
 440        li      r0, TLBIVAX_ALL | TLBIVAX_TLB0
 441        tlbivax 0, r0
 442        tlbsync
 443
 444        mfspr   r4, SPRN_TLB1CFG
 445        rlwinm  r4, r4, 0, TLBnCFG_NENTRY_MASK
 446
 447        li      r3, 0
 448        mtspr   MAS1, r3
 4491:      cmpw    r3, r14
 450        rlwinm  r5, r3, 16, MAS0_ESEL_MSK
 451        addi    r3, r3, 1
 452        beq     2f              /* skip the entry we're executing from */
 453
 454        oris    r5, r5, MAS0_TLBSEL(1)@h
 455        mtspr   MAS0, r5
 456
 457        isync
 458        tlbwe
 459        isync
 460        msync
 461
 4622:      cmpw    r3, r4
 463        blt     1b
 464
 465#if defined(CONFIG_SYS_PPC_E500_DEBUG_TLB) && !defined(MINIMAL_SPL)
 466/*
 467 * TLB entry for debuggging in AS1
 468 * Create temporary TLB entry in AS0 to handle debug exception
 469 * As on debug exception MSR is cleared i.e. Address space is changed
 470 * to 0. A TLB entry (in AS0) is required to handle debug exception generated
 471 * in AS1.
 472 */
 473
 474#ifdef NOR_BOOT
 475/*
 476 * TLB entry is created for IVPR + IVOR15 to map on valid OP code address
 477 * bacause flash's virtual address maps to 0xff800000 - 0xffffffff.
 478 * and this window is outside of 4K boot window.
 479 */
 480        create_tlb1_entry CONFIG_SYS_PPC_E500_DEBUG_TLB, \
 481                0, BOOKE_PAGESZ_4M, \
 482                CONFIG_SYS_MONITOR_BASE & 0xffc00000,  MAS2_I|MAS2_G, \
 483                0xffc00000, MAS3_SX|MAS3_SW|MAS3_SR, \
 484                0, r6
 485
 486#elif !defined(CONFIG_SYS_RAMBOOT) && defined(CONFIG_SECURE_BOOT)
 487        create_tlb1_entry CONFIG_SYS_PPC_E500_DEBUG_TLB, \
 488                0, BOOKE_PAGESZ_1M, \
 489                CONFIG_SYS_MONITOR_BASE, MAS2_I|MAS2_G, \
 490                CONFIG_SYS_PBI_FLASH_WINDOW, MAS3_SX|MAS3_SW|MAS3_SR, \
 491                0, r6
 492#else
 493/*
 494 * TLB entry is created for IVPR + IVOR15 to map on valid OP code address
 495 * because "nexti" will resize TLB to 4K
 496 */
 497        create_tlb1_entry CONFIG_SYS_PPC_E500_DEBUG_TLB, \
 498                0, BOOKE_PAGESZ_256K, \
 499                CONFIG_SYS_MONITOR_BASE & 0xfffc0000, MAS2_I, \
 500                CONFIG_SYS_MONITOR_BASE & 0xfffc0000, MAS3_SX|MAS3_SW|MAS3_SR, \
 501                0, r6
 502#endif
 503#endif
 504
 505/*
 506 * Relocate CCSR, if necessary.  We relocate CCSR if (obviously) the default
 507 * location is not where we want it.  This typically happens on a 36-bit
 508 * system, where we want to move CCSR to near the top of 36-bit address space.
 509 *
 510 * To move CCSR, we create two temporary TLBs, one for the old location, and
 511 * another for the new location.  On CoreNet systems, we also need to create
 512 * a special, temporary LAW.
 513 *
 514 * As a general rule, TLB0 is used for short-term TLBs, and TLB1 is used for
 515 * long-term TLBs, so we use TLB0 here.
 516 */
 517#if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR_PHYS)
 518
 519#if !defined(CONFIG_SYS_CCSRBAR_PHYS_HIGH) || !defined(CONFIG_SYS_CCSRBAR_PHYS_LOW)
 520#error "CONFIG_SYS_CCSRBAR_PHYS_HIGH and CONFIG_SYS_CCSRBAR_PHYS_LOW) must be defined."
 521#endif
 522
 523create_ccsr_new_tlb:
 524        /*
 525         * Create a TLB for the new location of CCSR.  Register R8 is reserved
 526         * for the virtual address of this TLB (CONFIG_SYS_CCSRBAR).
 527         */
 528        lis     r8, CONFIG_SYS_CCSRBAR@h
 529        ori     r8, r8, CONFIG_SYS_CCSRBAR@l
 530        lis     r9, (CONFIG_SYS_CCSRBAR + 0x1000)@h
 531        ori     r9, r9, (CONFIG_SYS_CCSRBAR + 0x1000)@l
 532        create_tlb0_entry 0, \
 533                0, BOOKE_PAGESZ_4K, \
 534                CONFIG_SYS_CCSRBAR, MAS2_I|MAS2_G, \
 535                CONFIG_SYS_CCSRBAR_PHYS_LOW, MAS3_SW|MAS3_SR, \
 536                CONFIG_SYS_CCSRBAR_PHYS_HIGH, r3
 537        /*
 538         * Create a TLB for the current location of CCSR.  Register R9 is reserved
 539         * for the virtual address of this TLB (CONFIG_SYS_CCSRBAR + 0x1000).
 540         */
 541create_ccsr_old_tlb:
 542        create_tlb0_entry 1, \
 543                0, BOOKE_PAGESZ_4K, \
 544                CONFIG_SYS_CCSRBAR + 0x1000, MAS2_I|MAS2_G, \
 545                CONFIG_SYS_CCSRBAR_DEFAULT, MAS3_SW|MAS3_SR, \
 546                0, r3 /* The default CCSR address is always a 32-bit number */
 547
 548
 549        /*
 550         * We have a TLB for what we think is the current (old) CCSR.  Let's
 551         * verify that, otherwise we won't be able to move it.
 552         * CONFIG_SYS_CCSRBAR_DEFAULT is always a 32-bit number, so we only
 553         * need to compare the lower 32 bits of CCSRBAR on CoreNet systems.
 554         */
 555verify_old_ccsr:
 556        lis     r0, CONFIG_SYS_CCSRBAR_DEFAULT@h
 557        ori     r0, r0, CONFIG_SYS_CCSRBAR_DEFAULT@l
 558#ifdef CONFIG_FSL_CORENET
 559        lwz     r1, 4(r9)               /* CCSRBARL */
 560#else
 561        lwz     r1, 0(r9)               /* CCSRBAR, shifted right by 12 */
 562        slwi    r1, r1, 12
 563#endif
 564
 565        cmpl    0, r0, r1
 566
 567        /*
 568         * If the value we read from CCSRBARL is not what we expect, then
 569         * enter an infinite loop.  This will at least allow a debugger to
 570         * halt execution and examine TLBs, etc.  There's no point in going
 571         * on.
 572         */
 573infinite_debug_loop:
 574        bne     infinite_debug_loop
 575
 576#ifdef CONFIG_FSL_CORENET
 577
 578#define CCSR_LAWBARH0   (CONFIG_SYS_CCSRBAR + 0x1000)
 579#define LAW_EN          0x80000000
 580#define LAW_SIZE_4K     0xb
 581#define CCSRBAR_LAWAR   (LAW_EN | (0x1e << 20) | LAW_SIZE_4K)
 582#define CCSRAR_C        0x80000000      /* Commit */
 583
 584create_temp_law:
 585        /*
 586         * On CoreNet systems, we create the temporary LAW using a special LAW
 587         * target ID of 0x1e.  LAWBARH is at offset 0xc00 in CCSR.
 588         */
 589        lis     r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@h
 590        ori     r0, r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l
 591        lis     r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@h
 592        ori     r1, r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@l
 593        lis     r2, CCSRBAR_LAWAR@h
 594        ori     r2, r2, CCSRBAR_LAWAR@l
 595
 596        stw     r0, 0xc00(r9)   /* LAWBARH0 */
 597        stw     r1, 0xc04(r9)   /* LAWBARL0 */
 598        sync
 599        stw     r2, 0xc08(r9)   /* LAWAR0 */
 600
 601        /*
 602         * Read back from LAWAR to ensure the update is complete.  e500mc
 603         * cores also require an isync.
 604         */
 605        lwz     r0, 0xc08(r9)   /* LAWAR0 */
 606        isync
 607
 608        /*
 609         * Read the current CCSRBARH and CCSRBARL using load word instructions.
 610         * Follow this with an isync instruction. This forces any outstanding
 611         * accesses to configuration space to completion.
 612         */
 613read_old_ccsrbar:
 614        lwz     r0, 0(r9)       /* CCSRBARH */
 615        lwz     r0, 4(r9)       /* CCSRBARL */
 616        isync
 617
 618        /*
 619         * Write the new values for CCSRBARH and CCSRBARL to their old
 620         * locations.  The CCSRBARH has a shadow register. When the CCSRBARH
 621         * has a new value written it loads a CCSRBARH shadow register. When
 622         * the CCSRBARL is written, the CCSRBARH shadow register contents
 623         * along with the CCSRBARL value are loaded into the CCSRBARH and
 624         * CCSRBARL registers, respectively.  Follow this with a sync
 625         * instruction.
 626         */
 627write_new_ccsrbar:
 628        lis     r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@h
 629        ori     r0, r0, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l
 630        lis     r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@h
 631        ori     r1, r1, CONFIG_SYS_CCSRBAR_PHYS_LOW@l
 632        lis     r2, CCSRAR_C@h
 633        ori     r2, r2, CCSRAR_C@l
 634
 635        stw     r0, 0(r9)       /* Write to CCSRBARH */
 636        sync                    /* Make sure we write to CCSRBARH first */
 637        stw     r1, 4(r9)       /* Write to CCSRBARL */
 638        sync
 639
 640        /*
 641         * Write a 1 to the commit bit (C) of CCSRAR at the old location.
 642         * Follow this with a sync instruction.
 643         */
 644        stw     r2, 8(r9)
 645        sync
 646
 647        /* Delete the temporary LAW */
 648delete_temp_law:
 649        li      r1, 0
 650        stw     r1, 0xc08(r8)
 651        sync
 652        stw     r1, 0xc00(r8)
 653        stw     r1, 0xc04(r8)
 654        sync
 655
 656#else /* #ifdef CONFIG_FSL_CORENET */
 657
 658write_new_ccsrbar:
 659        /*
 660         * Read the current value of CCSRBAR using a load word instruction
 661         * followed by an isync. This forces all accesses to configuration
 662         * space to complete.
 663         */
 664        sync
 665        lwz     r0, 0(r9)
 666        isync
 667
 668/* CONFIG_SYS_CCSRBAR_PHYS right shifted by 12 */
 669#define CCSRBAR_PHYS_RS12 ((CONFIG_SYS_CCSRBAR_PHYS_HIGH << 20) | \
 670                           (CONFIG_SYS_CCSRBAR_PHYS_LOW >> 12))
 671
 672        /* Write the new value to CCSRBAR. */
 673        lis     r0, CCSRBAR_PHYS_RS12@h
 674        ori     r0, r0, CCSRBAR_PHYS_RS12@l
 675        stw     r0, 0(r9)
 676        sync
 677
 678        /*
 679         * The manual says to perform a load of an address that does not
 680         * access configuration space or the on-chip SRAM using an existing TLB,
 681         * but that doesn't appear to be necessary.  We will do the isync,
 682         * though.
 683         */
 684        isync
 685
 686        /*
 687         * Read the contents of CCSRBAR from its new location, followed by
 688         * another isync.
 689         */
 690        lwz     r0, 0(r8)
 691        isync
 692
 693#endif  /* #ifdef CONFIG_FSL_CORENET */
 694
 695        /* Delete the temporary TLBs */
 696delete_temp_tlbs:
 697        delete_tlb0_entry 0, CONFIG_SYS_CCSRBAR, MAS2_I|MAS2_G, r3
 698        delete_tlb0_entry 1, CONFIG_SYS_CCSRBAR + 0x1000, MAS2_I|MAS2_G, r3
 699
 700#endif /* #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR_PHYS) */
 701
 702#ifdef CONFIG_SYS_FSL_QORIQ_CHASSIS2
 703create_ccsr_l2_tlb:
 704        /*
 705         * Create a TLB for the MMR location of CCSR
 706         * to access L2CSR0 register
 707         */
 708        create_tlb0_entry 0, \
 709                0, BOOKE_PAGESZ_4K, \
 710                CONFIG_SYS_CCSRBAR + 0xC20000, MAS2_I|MAS2_G, \
 711                CONFIG_SYS_CCSRBAR_PHYS_LOW + 0xC20000, MAS3_SW|MAS3_SR, \
 712                CONFIG_SYS_CCSRBAR_PHYS_HIGH, r3
 713
 714enable_l2_cluster_l2:
 715        /* enable L2 cache */
 716        lis     r3, (CONFIG_SYS_CCSRBAR + 0xC20000)@h
 717        ori     r3, r3, (CONFIG_SYS_CCSRBAR + 0xC20000)@l
 718        li      r4, 33  /* stash id */
 719        stw     r4, 4(r3)
 720        lis     r4, (L2CSR0_L2FI|L2CSR0_L2LFC)@h
 721        ori     r4, r4, (L2CSR0_L2FI|L2CSR0_L2LFC)@l
 722        sync
 723        stw     r4, 0(r3)       /* invalidate L2 */
 7241:      sync
 725        lwz     r0, 0(r3)
 726        twi     0, r0, 0
 727        isync
 728        and.    r1, r0, r4
 729        bne     1b
 730        lis     r4, (L2CSR0_L2E|L2CSR0_L2PE)@h
 731        ori     r4, r4, (L2CSR0_L2REP_MODE)@l
 732        sync
 733        stw     r4, 0(r3)       /* enable L2 */
 734delete_ccsr_l2_tlb:
 735        delete_tlb0_entry 0, CONFIG_SYS_CCSRBAR + 0xC20000, MAS2_I|MAS2_G, r3
 736#endif
 737
 738        /*
 739         * Enable the L1. On e6500, this has to be done
 740         * after the L2 is up.
 741         */
 742
 743#ifdef CONFIG_SYS_CACHE_STASHING
 744        /* set stash id to (coreID) * 2 + 32 + L1 CT (0) */
 745        li      r2,(32 + 0)
 746        mtspr   L1CSR2,r2
 747#endif
 748
 749        /* Enable/invalidate the I-Cache */
 750        lis     r2,(L1CSR1_ICFI|L1CSR1_ICLFR)@h
 751        ori     r2,r2,(L1CSR1_ICFI|L1CSR1_ICLFR)@l
 752        mtspr   SPRN_L1CSR1,r2
 7531:
 754        mfspr   r3,SPRN_L1CSR1
 755        and.    r1,r3,r2
 756        bne     1b
 757
 758        lis     r3,(L1CSR1_CPE|L1CSR1_ICE)@h
 759        ori     r3,r3,(L1CSR1_CPE|L1CSR1_ICE)@l
 760        mtspr   SPRN_L1CSR1,r3
 761        isync
 7622:
 763        mfspr   r3,SPRN_L1CSR1
 764        andi.   r1,r3,L1CSR1_ICE@l
 765        beq     2b
 766
 767        /* Enable/invalidate the D-Cache */
 768        lis     r2,(L1CSR0_DCFI|L1CSR0_DCLFR)@h
 769        ori     r2,r2,(L1CSR0_DCFI|L1CSR0_DCLFR)@l
 770        mtspr   SPRN_L1CSR0,r2
 7711:
 772        mfspr   r3,SPRN_L1CSR0
 773        and.    r1,r3,r2
 774        bne     1b
 775
 776        lis     r3,(L1CSR0_CPE|L1CSR0_DCE)@h
 777        ori     r3,r3,(L1CSR0_CPE|L1CSR0_DCE)@l
 778        mtspr   SPRN_L1CSR0,r3
 779        isync
 7802:
 781        mfspr   r3,SPRN_L1CSR0
 782        andi.   r1,r3,L1CSR0_DCE@l
 783        beq     2b
 784#ifdef CONFIG_SYS_FSL_ERRATUM_A004510
 785#define DCSR_LAWBARH0   (CONFIG_SYS_CCSRBAR + 0x1000)
 786#define LAW_SIZE_1M     0x13
 787#define DCSRBAR_LAWAR   (LAW_EN | (0x1d << 20) | LAW_SIZE_1M)
 788
 789        cmpwi   r27,0
 790        beq     9f
 791
 792        /*
 793         * Create a TLB entry for CCSR
 794         *
 795         * We're executing out of TLB1 entry in r14, and that's the only
 796         * TLB entry that exists.  To allocate some TLB entries for our
 797         * own use, flip a bit high enough that we won't flip it again
 798         * via incrementing.
 799         */
 800
 801        xori    r8, r14, 32
 802        lis     r0, MAS0_TLBSEL(1)@h
 803        rlwimi  r0, r8, 16, MAS0_ESEL_MSK
 804        lis     r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_16M)@h
 805        ori     r1, r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_16M)@l
 806        lis     r7, CONFIG_SYS_CCSRBAR@h
 807        ori     r7, r7, CONFIG_SYS_CCSRBAR@l
 808        ori     r2, r7, MAS2_I|MAS2_G
 809        lis     r3, FSL_BOOKE_MAS3(CONFIG_SYS_CCSRBAR_PHYS_LOW, 0, (MAS3_SW|MAS3_SR))@h
 810        ori     r3, r3, FSL_BOOKE_MAS3(CONFIG_SYS_CCSRBAR_PHYS_LOW, 0, (MAS3_SW|MAS3_SR))@l
 811        lis     r4, CONFIG_SYS_CCSRBAR_PHYS_HIGH@h
 812        ori     r4, r4, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l
 813        mtspr   MAS0, r0
 814        mtspr   MAS1, r1
 815        mtspr   MAS2, r2
 816        mtspr   MAS3, r3
 817        mtspr   MAS7, r4
 818        isync
 819        tlbwe
 820        isync
 821        msync
 822
 823        /* Map DCSR temporarily to physical address zero */
 824        li      r0, 0
 825        lis     r3, DCSRBAR_LAWAR@h
 826        ori     r3, r3, DCSRBAR_LAWAR@l
 827
 828        stw     r0, 0xc00(r7)   /* LAWBARH0 */
 829        stw     r0, 0xc04(r7)   /* LAWBARL0 */
 830        sync
 831        stw     r3, 0xc08(r7)   /* LAWAR0 */
 832
 833        /* Read back from LAWAR to ensure the update is complete. */
 834        lwz     r3, 0xc08(r7)   /* LAWAR0 */
 835        isync
 836
 837        /* Create a TLB entry for DCSR at zero */
 838
 839        addi    r9, r8, 1
 840        lis     r0, MAS0_TLBSEL(1)@h
 841        rlwimi  r0, r9, 16, MAS0_ESEL_MSK
 842        lis     r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_1M)@h
 843        ori     r1, r1, FSL_BOOKE_MAS1(1, 1, 0, 0, BOOKE_PAGESZ_1M)@l
 844        li      r6, 0   /* DCSR effective address */
 845        ori     r2, r6, MAS2_I|MAS2_G
 846        li      r3, MAS3_SW|MAS3_SR
 847        li      r4, 0
 848        mtspr   MAS0, r0
 849        mtspr   MAS1, r1
 850        mtspr   MAS2, r2
 851        mtspr   MAS3, r3
 852        mtspr   MAS7, r4
 853        isync
 854        tlbwe
 855        isync
 856        msync
 857
 858        /* enable the timebase */
 859#define CTBENR  0xe2084
 860        li      r3, 1
 861        addis   r4, r7, CTBENR@ha
 862        stw     r3, CTBENR@l(r4)
 863        lwz     r3, CTBENR@l(r4)
 864        twi     0,r3,0
 865        isync
 866
 867        .macro  erratum_set_ccsr offset value
 868        addis   r3, r7, \offset@ha
 869        lis     r4, \value@h
 870        addi    r3, r3, \offset@l
 871        ori     r4, r4, \value@l
 872        bl      erratum_set_value
 873        .endm
 874
 875        .macro  erratum_set_dcsr offset value
 876        addis   r3, r6, \offset@ha
 877        lis     r4, \value@h
 878        addi    r3, r3, \offset@l
 879        ori     r4, r4, \value@l
 880        bl      erratum_set_value
 881        .endm
 882
 883        erratum_set_dcsr 0xb0e08 0xe0201800
 884        erratum_set_dcsr 0xb0e18 0xe0201800
 885        erratum_set_dcsr 0xb0e38 0xe0400000
 886        erratum_set_dcsr 0xb0008 0x00900000
 887        erratum_set_dcsr 0xb0e40 0xe00a0000
 888        erratum_set_ccsr 0x18600 CONFIG_SYS_FSL_CORENET_SNOOPVEC_COREONLY
 889        erratum_set_ccsr 0x10f00 0x415e5000
 890        erratum_set_ccsr 0x11f00 0x415e5000
 891
 892        /* Make temp mapping uncacheable again, if it was initially */
 893        bl      2f
 8942:      mflr    r3
 895        tlbsx   0, r3
 896        mfspr   r4, MAS2
 897        rlwimi  r4, r15, 0, MAS2_I
 898        rlwimi  r4, r15, 0, MAS2_G
 899        mtspr   MAS2, r4
 900        isync
 901        tlbwe
 902        isync
 903        msync
 904
 905        /* Clear the cache */
 906        lis     r3,(L1CSR1_ICFI|L1CSR1_ICLFR)@h
 907        ori     r3,r3,(L1CSR1_ICFI|L1CSR1_ICLFR)@l
 908        sync
 909        isync
 910        mtspr   SPRN_L1CSR1,r3
 911        isync
 9122:      sync
 913        mfspr   r4,SPRN_L1CSR1
 914        and.    r4,r4,r3
 915        bne     2b
 916
 917        lis     r3,(L1CSR1_CPE|L1CSR1_ICE)@h
 918        ori     r3,r3,(L1CSR1_CPE|L1CSR1_ICE)@l
 919        sync
 920        isync
 921        mtspr   SPRN_L1CSR1,r3
 922        isync
 9232:      sync
 924        mfspr   r4,SPRN_L1CSR1
 925        and.    r4,r4,r3
 926        beq     2b
 927
 928        /* Remove temporary mappings */
 929        lis     r0, MAS0_TLBSEL(1)@h
 930        rlwimi  r0, r9, 16, MAS0_ESEL_MSK
 931        li      r3, 0
 932        mtspr   MAS0, r0
 933        mtspr   MAS1, r3
 934        isync
 935        tlbwe
 936        isync
 937        msync
 938
 939        li      r3, 0
 940        stw     r3, 0xc08(r7)   /* LAWAR0 */
 941        lwz     r3, 0xc08(r7)
 942        isync
 943
 944        lis     r0, MAS0_TLBSEL(1)@h
 945        rlwimi  r0, r8, 16, MAS0_ESEL_MSK
 946        li      r3, 0
 947        mtspr   MAS0, r0
 948        mtspr   MAS1, r3
 949        isync
 950        tlbwe
 951        isync
 952        msync
 953
 954        b       9f
 955
 956        /* r3 = addr, r4 = value, clobbers r5, r11, r12 */
 957erratum_set_value:
 958        /* Lock two cache lines into I-Cache */
 959        sync
 960        mfspr   r11, SPRN_L1CSR1
 961        rlwinm  r11, r11, 0, ~L1CSR1_ICUL
 962        sync
 963        isync
 964        mtspr   SPRN_L1CSR1, r11
 965        isync
 966
 967        mflr    r12
 968        bl      5f
 9695:      mflr    r5
 970        addi    r5, r5, 2f - 5b
 971        icbtls  0, 0, r5
 972        addi    r5, r5, 64
 973
 974        sync
 975        mfspr   r11, SPRN_L1CSR1
 9763:      andi.   r11, r11, L1CSR1_ICUL
 977        bne     3b
 978
 979        icbtls  0, 0, r5
 980        addi    r5, r5, 64
 981
 982        sync
 983        mfspr   r11, SPRN_L1CSR1
 9843:      andi.   r11, r11, L1CSR1_ICUL
 985        bne     3b
 986
 987        b       2f
 988        .align  6
 989        /* Inside a locked cacheline, wait a while, write, then wait a while */
 9902:      sync
 991
 992        mfspr   r5, SPRN_TBRL
 993        addis   r11, r5, 0x10000@h /* wait 65536 timebase ticks */
 9944:      mfspr   r5, SPRN_TBRL
 995        subf.   r5, r5, r11
 996        bgt     4b
 997
 998        stw     r4, 0(r3)
 999
1000        mfspr   r5, SPRN_TBRL
1001        addis   r11, r5, 0x10000@h /* wait 65536 timebase ticks */
10024:      mfspr   r5, SPRN_TBRL
1003        subf.   r5, r5, r11
1004        bgt     4b
1005
1006        sync
1007
1008        /*
1009         * Fill out the rest of this cache line and the next with nops,
1010         * to ensure that nothing outside the locked area will be
1011         * fetched due to a branch.
1012         */
1013        .rept 19
1014        nop
1015        .endr
1016
1017        sync
1018        mfspr   r11, SPRN_L1CSR1
1019        rlwinm  r11, r11, 0, ~L1CSR1_ICUL
1020        sync
1021        isync
1022        mtspr   SPRN_L1CSR1, r11
1023        isync
1024
1025        mtlr    r12
1026        blr
1027
10289:
1029#endif
1030
1031create_init_ram_area:
1032        lis     r6,FSL_BOOKE_MAS0(1, 15, 0)@h
1033        ori     r6,r6,FSL_BOOKE_MAS0(1, 15, 0)@l
1034
1035#ifdef NOR_BOOT
1036        /* create a temp mapping in AS=1 to the 4M boot window */
1037        create_tlb1_entry 15, \
1038                1, BOOKE_PAGESZ_4M, \
1039                CONFIG_SYS_MONITOR_BASE & 0xffc00000, MAS2_I|MAS2_G, \
1040                0xffc00000, MAS3_SX|MAS3_SW|MAS3_SR, \
1041                0, r6
1042
1043#elif !defined(CONFIG_SYS_RAMBOOT) && defined(CONFIG_SECURE_BOOT)
1044        /* create a temp mapping in AS = 1 for Flash mapping
1045         * created by PBL for ISBC code
1046        */
1047        create_tlb1_entry 15, \
1048                1, BOOKE_PAGESZ_1M, \
1049                CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS2_I|MAS2_G, \
1050                CONFIG_SYS_PBI_FLASH_WINDOW & 0xfff00000, MAS3_SX|MAS3_SW|MAS3_SR, \
1051                0, r6
1052#else
1053        /*
1054         * create a temp mapping in AS=1 to the 1M CONFIG_SYS_MONITOR_BASE space, the main
1055         * image has been relocated to CONFIG_SYS_MONITOR_BASE on the second stage.
1056         */
1057        create_tlb1_entry 15, \
1058                1, BOOKE_PAGESZ_1M, \
1059                CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS2_I|MAS2_G, \
1060                CONFIG_SYS_MONITOR_BASE & 0xfff00000, MAS3_SX|MAS3_SW|MAS3_SR, \
1061                0, r6
1062#endif
1063
1064        /* create a temp mapping in AS=1 to the stack */
1065#if defined(CONFIG_SYS_INIT_RAM_ADDR_PHYS_LOW) && \
1066    defined(CONFIG_SYS_INIT_RAM_ADDR_PHYS_HIGH)
1067        create_tlb1_entry 14, \
1068                1, BOOKE_PAGESZ_16K, \
1069                CONFIG_SYS_INIT_RAM_ADDR, 0, \
1070                CONFIG_SYS_INIT_RAM_ADDR_PHYS_LOW, MAS3_SX|MAS3_SW|MAS3_SR, \
1071                CONFIG_SYS_INIT_RAM_ADDR_PHYS_HIGH, r6
1072
1073#else
1074        create_tlb1_entry 14, \
1075                1, BOOKE_PAGESZ_16K, \
1076                CONFIG_SYS_INIT_RAM_ADDR, 0, \
1077                CONFIG_SYS_INIT_RAM_ADDR, MAS3_SX|MAS3_SW|MAS3_SR, \
1078                0, r6
1079#endif
1080
1081        lis     r6,MSR_IS|MSR_DS|MSR_DE@h
1082        ori     r6,r6,MSR_IS|MSR_DS|MSR_DE@l
1083        lis     r7,switch_as@h
1084        ori     r7,r7,switch_as@l
1085
1086        mtspr   SPRN_SRR0,r7
1087        mtspr   SPRN_SRR1,r6
1088        rfi
1089
1090switch_as:
1091/* L1 DCache is used for initial RAM */
1092
1093        /* Allocate Initial RAM in data cache.
1094         */
1095        lis     r3,CONFIG_SYS_INIT_RAM_ADDR@h
1096        ori     r3,r3,CONFIG_SYS_INIT_RAM_ADDR@l
1097        mfspr   r2, L1CFG0
1098        andi.   r2, r2, 0x1ff
1099        /* cache size * 1024 / (2 * L1 line size) */
1100        slwi    r2, r2, (10 - 1 - L1_CACHE_SHIFT)
1101        mtctr   r2
1102        li      r0,0
11031:
1104        dcbz    r0,r3
1105        dcbtls  0,r0,r3
1106        addi    r3,r3,CONFIG_SYS_CACHELINE_SIZE
1107        bdnz    1b
1108
1109        /* Jump out the last 4K page and continue to 'normal' start */
1110#if defined(CONFIG_SYS_RAMBOOT) || defined(CONFIG_SPL)
1111        /* We assume that we're already running at the address we're linked at */
1112        b       _start_cont
1113#else
1114        /* Calculate absolute address in FLASH and jump there           */
1115        /*--------------------------------------------------------------*/
1116        lis     r3,CONFIG_SYS_MONITOR_BASE@h
1117        ori     r3,r3,CONFIG_SYS_MONITOR_BASE@l
1118        addi    r3,r3,_start_cont - _start + _START_OFFSET
1119        mtlr    r3
1120        blr
1121#endif
1122
1123        .text
1124        .globl  _start
1125_start:
1126        .long   0x27051956              /* U-BOOT Magic Number */
1127        .globl  version_string
1128version_string:
1129        .ascii U_BOOT_VERSION_STRING, "\0"
1130
1131        .align  4
1132        .globl  _start_cont
1133_start_cont:
1134        /* Setup the stack in initial RAM,could be L2-as-SRAM or L1 dcache*/
1135        lis     r3,(CONFIG_SYS_INIT_RAM_ADDR)@h
1136        ori     r3,r3,((CONFIG_SYS_INIT_SP_OFFSET-16)&~0xf)@l /* Align to 16 */
1137        li      r0,0
1138        stw     r0,0(r3)        /* Terminate Back Chain */
1139        stw     r0,+4(r3)       /* NULL return address. */
1140        mr      r1,r3           /* Transfer to SP(r1) */
1141
1142        GET_GOT
1143        bl      cpu_init_early_f
1144
1145        /* switch back to AS = 0 */
1146        lis     r3,(MSR_CE|MSR_ME|MSR_DE)@h
1147        ori     r3,r3,(MSR_CE|MSR_ME|MSR_DE)@l
1148        mtmsr   r3
1149        isync
1150
1151        bl      cpu_init_f
1152        bl      board_init_f
1153        isync
1154
1155        /* NOTREACHED - board_init_f() does not return */
1156
1157#ifndef MINIMAL_SPL
1158        . = EXC_OFF_SYS_RESET
1159        .globl  _start_of_vectors
1160_start_of_vectors:
1161
1162/* Critical input. */
1163        CRIT_EXCEPTION(0x0100, CriticalInput, CritcalInputException)
1164
1165/* Machine check */
1166        MCK_EXCEPTION(0x200, MachineCheck, MachineCheckException)
1167
1168/* Data Storage exception. */
1169        STD_EXCEPTION(0x0300, DataStorage, UnknownException)
1170
1171/* Instruction Storage exception. */
1172        STD_EXCEPTION(0x0400, InstStorage, UnknownException)
1173
1174/* External Interrupt exception. */
1175        STD_EXCEPTION(0x0500, ExtInterrupt, ExtIntException)
1176
1177/* Alignment exception. */
1178        . = 0x0600
1179Alignment:
1180        EXCEPTION_PROLOG(SRR0, SRR1)
1181        mfspr   r4,DAR
1182        stw     r4,_DAR(r21)
1183        mfspr   r5,DSISR
1184        stw     r5,_DSISR(r21)
1185        addi    r3,r1,STACK_FRAME_OVERHEAD
1186        EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE)
1187
1188/* Program check exception */
1189        . = 0x0700
1190ProgramCheck:
1191        EXCEPTION_PROLOG(SRR0, SRR1)
1192        addi    r3,r1,STACK_FRAME_OVERHEAD
1193        EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException,
1194                MSR_KERNEL, COPY_EE)
1195
1196        /* No FPU on MPC85xx.  This exception is not supposed to happen.
1197        */
1198        STD_EXCEPTION(0x0800, FPUnavailable, UnknownException)
1199
1200        . = 0x0900
1201/*
1202 * r0 - SYSCALL number
1203 * r3-... arguments
1204 */
1205SystemCall:
1206        addis   r11,r0,0        /* get functions table addr */
1207        ori     r11,r11,0       /* Note: this code is patched in trap_init */
1208        addis   r12,r0,0        /* get number of functions */
1209        ori     r12,r12,0
1210
1211        cmplw   0,r0,r12
1212        bge     1f
1213
1214        rlwinm  r0,r0,2,0,31    /* fn_addr = fn_tbl[r0] */
1215        add     r11,r11,r0
1216        lwz     r11,0(r11)
1217
1218        li      r20,0xd00-4     /* Get stack pointer */
1219        lwz     r12,0(r20)
1220        subi    r12,r12,12      /* Adjust stack pointer */
1221        li      r0,0xc00+_end_back-SystemCall
1222        cmplw   0,r0,r12        /* Check stack overflow */
1223        bgt     1f
1224        stw     r12,0(r20)
1225
1226        mflr    r0
1227        stw     r0,0(r12)
1228        mfspr   r0,SRR0
1229        stw     r0,4(r12)
1230        mfspr   r0,SRR1
1231        stw     r0,8(r12)
1232
1233        li      r12,0xc00+_back-SystemCall
1234        mtlr    r12
1235        mtspr   SRR0,r11
1236
12371:      SYNC
1238        rfi
1239_back:
1240
1241        mfmsr   r11                     /* Disable interrupts */
1242        li      r12,0
1243        ori     r12,r12,MSR_EE
1244        andc    r11,r11,r12
1245        SYNC                            /* Some chip revs need this... */
1246        mtmsr   r11
1247        SYNC
1248
1249        li      r12,0xd00-4             /* restore regs */
1250        lwz     r12,0(r12)
1251
1252        lwz     r11,0(r12)
1253        mtlr    r11
1254        lwz     r11,4(r12)
1255        mtspr   SRR0,r11
1256        lwz     r11,8(r12)
1257        mtspr   SRR1,r11
1258
1259        addi    r12,r12,12              /* Adjust stack pointer */
1260        li      r20,0xd00-4
1261        stw     r12,0(r20)
1262
1263        SYNC
1264        rfi
1265_end_back:
1266
1267        STD_EXCEPTION(0x0a00, Decrementer, timer_interrupt)
1268        STD_EXCEPTION(0x0b00, IntervalTimer, UnknownException)
1269        STD_EXCEPTION(0x0c00, WatchdogTimer, UnknownException)
1270
1271        STD_EXCEPTION(0x0d00, DataTLBError, UnknownException)
1272        STD_EXCEPTION(0x0e00, InstructionTLBError, UnknownException)
1273
1274        CRIT_EXCEPTION(0x0f00, DebugBreakpoint, DebugException )
1275
1276        .globl  _end_of_vectors
1277_end_of_vectors:
1278
1279
1280        . = . + (0x100 - ( . & 0xff ))  /* align for debug */
1281
1282/*
1283 * This code finishes saving the registers to the exception frame
1284 * and jumps to the appropriate handler for the exception.
1285 * Register r21 is pointer into trap frame, r1 has new stack pointer.
1286 */
1287        .globl  transfer_to_handler
1288transfer_to_handler:
1289        stw     r22,_NIP(r21)
1290        lis     r22,MSR_POW@h
1291        andc    r23,r23,r22
1292        stw     r23,_MSR(r21)
1293        SAVE_GPR(7, r21)
1294        SAVE_4GPRS(8, r21)
1295        SAVE_8GPRS(12, r21)
1296        SAVE_8GPRS(24, r21)
1297
1298        mflr    r23
1299        andi.   r24,r23,0x3f00          /* get vector offset */
1300        stw     r24,TRAP(r21)
1301        li      r22,0
1302        stw     r22,RESULT(r21)
1303        mtspr   SPRG2,r22               /* r1 is now kernel sp */
1304
1305        lwz     r24,0(r23)              /* virtual address of handler */
1306        lwz     r23,4(r23)              /* where to go when done */
1307        mtspr   SRR0,r24
1308        mtspr   SRR1,r20
1309        mtlr    r23
1310        SYNC
1311        rfi                             /* jump to handler, enable MMU */
1312
1313int_return:
1314        mfmsr   r28             /* Disable interrupts */
1315        li      r4,0
1316        ori     r4,r4,MSR_EE
1317        andc    r28,r28,r4
1318        SYNC                    /* Some chip revs need this... */
1319        mtmsr   r28
1320        SYNC
1321        lwz     r2,_CTR(r1)
1322        lwz     r0,_LINK(r1)
1323        mtctr   r2
1324        mtlr    r0
1325        lwz     r2,_XER(r1)
1326        lwz     r0,_CCR(r1)
1327        mtspr   XER,r2
1328        mtcrf   0xFF,r0
1329        REST_10GPRS(3, r1)
1330        REST_10GPRS(13, r1)
1331        REST_8GPRS(23, r1)
1332        REST_GPR(31, r1)
1333        lwz     r2,_NIP(r1)     /* Restore environment */
1334        lwz     r0,_MSR(r1)
1335        mtspr   SRR0,r2
1336        mtspr   SRR1,r0
1337        lwz     r0,GPR0(r1)
1338        lwz     r2,GPR2(r1)
1339        lwz     r1,GPR1(r1)
1340        SYNC
1341        rfi
1342
1343crit_return:
1344        mfmsr   r28             /* Disable interrupts */
1345        li      r4,0
1346        ori     r4,r4,MSR_EE
1347        andc    r28,r28,r4
1348        SYNC                    /* Some chip revs need this... */
1349        mtmsr   r28
1350        SYNC
1351        lwz     r2,_CTR(r1)
1352        lwz     r0,_LINK(r1)
1353        mtctr   r2
1354        mtlr    r0
1355        lwz     r2,_XER(r1)
1356        lwz     r0,_CCR(r1)
1357        mtspr   XER,r2
1358        mtcrf   0xFF,r0
1359        REST_10GPRS(3, r1)
1360        REST_10GPRS(13, r1)
1361        REST_8GPRS(23, r1)
1362        REST_GPR(31, r1)
1363        lwz     r2,_NIP(r1)     /* Restore environment */
1364        lwz     r0,_MSR(r1)
1365        mtspr   SPRN_CSRR0,r2
1366        mtspr   SPRN_CSRR1,r0
1367        lwz     r0,GPR0(r1)
1368        lwz     r2,GPR2(r1)
1369        lwz     r1,GPR1(r1)
1370        SYNC
1371        rfci
1372
1373mck_return:
1374        mfmsr   r28             /* Disable interrupts */
1375        li      r4,0
1376        ori     r4,r4,MSR_EE
1377        andc    r28,r28,r4
1378        SYNC                    /* Some chip revs need this... */
1379        mtmsr   r28
1380        SYNC
1381        lwz     r2,_CTR(r1)
1382        lwz     r0,_LINK(r1)
1383        mtctr   r2
1384        mtlr    r0
1385        lwz     r2,_XER(r1)
1386        lwz     r0,_CCR(r1)
1387        mtspr   XER,r2
1388        mtcrf   0xFF,r0
1389        REST_10GPRS(3, r1)
1390        REST_10GPRS(13, r1)
1391        REST_8GPRS(23, r1)
1392        REST_GPR(31, r1)
1393        lwz     r2,_NIP(r1)     /* Restore environment */
1394        lwz     r0,_MSR(r1)
1395        mtspr   SPRN_MCSRR0,r2
1396        mtspr   SPRN_MCSRR1,r0
1397        lwz     r0,GPR0(r1)
1398        lwz     r2,GPR2(r1)
1399        lwz     r1,GPR1(r1)
1400        SYNC
1401        rfmci
1402
1403/* Cache functions.
1404*/
1405.globl flush_icache
1406flush_icache:
1407.globl invalidate_icache
1408invalidate_icache:
1409        mfspr   r0,L1CSR1
1410        ori     r0,r0,L1CSR1_ICFI
1411        msync
1412        isync
1413        mtspr   L1CSR1,r0
1414        isync
1415        blr                             /* entire I cache */
1416
1417.globl invalidate_dcache
1418invalidate_dcache:
1419        mfspr   r0,L1CSR0
1420        ori     r0,r0,L1CSR0_DCFI
1421        msync
1422        isync
1423        mtspr   L1CSR0,r0
1424        isync
1425        blr
1426
1427        .globl  icache_enable
1428icache_enable:
1429        mflr    r8
1430        bl      invalidate_icache
1431        mtlr    r8
1432        isync
1433        mfspr   r4,L1CSR1
1434        ori     r4,r4,0x0001
1435        oris    r4,r4,0x0001
1436        mtspr   L1CSR1,r4
1437        isync
1438        blr
1439
1440        .globl  icache_disable
1441icache_disable:
1442        mfspr   r0,L1CSR1
1443        lis     r3,0
1444        ori     r3,r3,L1CSR1_ICE
1445        andc    r0,r0,r3
1446        mtspr   L1CSR1,r0
1447        isync
1448        blr
1449
1450        .globl  icache_status
1451icache_status:
1452        mfspr   r3,L1CSR1
1453        andi.   r3,r3,L1CSR1_ICE
1454        blr
1455
1456        .globl  dcache_enable
1457dcache_enable:
1458        mflr    r8
1459        bl      invalidate_dcache
1460        mtlr    r8
1461        isync
1462        mfspr   r0,L1CSR0
1463        ori     r0,r0,0x0001
1464        oris    r0,r0,0x0001
1465        msync
1466        isync
1467        mtspr   L1CSR0,r0
1468        isync
1469        blr
1470
1471        .globl  dcache_disable
1472dcache_disable:
1473        mfspr   r3,L1CSR0
1474        lis     r4,0
1475        ori     r4,r4,L1CSR0_DCE
1476        andc    r3,r3,r4
1477        mtspr   L1CSR0,r3
1478        isync
1479        blr
1480
1481        .globl  dcache_status
1482dcache_status:
1483        mfspr   r3,L1CSR0
1484        andi.   r3,r3,L1CSR0_DCE
1485        blr
1486
1487        .globl get_pir
1488get_pir:
1489        mfspr   r3,PIR
1490        blr
1491
1492        .globl get_pvr
1493get_pvr:
1494        mfspr   r3,PVR
1495        blr
1496
1497        .globl get_svr
1498get_svr:
1499        mfspr   r3,SVR
1500        blr
1501
1502        .globl wr_tcr
1503wr_tcr:
1504        mtspr   TCR,r3
1505        blr
1506
1507/*------------------------------------------------------------------------------- */
1508/* Function:     in8 */
1509/* Description:  Input 8 bits */
1510/*------------------------------------------------------------------------------- */
1511        .globl  in8
1512in8:
1513        lbz     r3,0x0000(r3)
1514        blr
1515
1516/*------------------------------------------------------------------------------- */
1517/* Function:     out8 */
1518/* Description:  Output 8 bits */
1519/*------------------------------------------------------------------------------- */
1520        .globl  out8
1521out8:
1522        stb     r4,0x0000(r3)
1523        sync
1524        blr
1525
1526/*------------------------------------------------------------------------------- */
1527/* Function:     out16 */
1528/* Description:  Output 16 bits */
1529/*------------------------------------------------------------------------------- */
1530        .globl  out16
1531out16:
1532        sth     r4,0x0000(r3)
1533        sync
1534        blr
1535
1536/*------------------------------------------------------------------------------- */
1537/* Function:     out16r */
1538/* Description:  Byte reverse and output 16 bits */
1539/*------------------------------------------------------------------------------- */
1540        .globl  out16r
1541out16r:
1542        sthbrx  r4,r0,r3
1543        sync
1544        blr
1545
1546/*------------------------------------------------------------------------------- */
1547/* Function:     out32 */
1548/* Description:  Output 32 bits */
1549/*------------------------------------------------------------------------------- */
1550        .globl  out32
1551out32:
1552        stw     r4,0x0000(r3)
1553        sync
1554        blr
1555
1556/*------------------------------------------------------------------------------- */
1557/* Function:     out32r */
1558/* Description:  Byte reverse and output 32 bits */
1559/*------------------------------------------------------------------------------- */
1560        .globl  out32r
1561out32r:
1562        stwbrx  r4,r0,r3
1563        sync
1564        blr
1565
1566/*------------------------------------------------------------------------------- */
1567/* Function:     in16 */
1568/* Description:  Input 16 bits */
1569/*------------------------------------------------------------------------------- */
1570        .globl  in16
1571in16:
1572        lhz     r3,0x0000(r3)
1573        blr
1574
1575/*------------------------------------------------------------------------------- */
1576/* Function:     in16r */
1577/* Description:  Input 16 bits and byte reverse */
1578/*------------------------------------------------------------------------------- */
1579        .globl  in16r
1580in16r:
1581        lhbrx   r3,r0,r3
1582        blr
1583
1584/*------------------------------------------------------------------------------- */
1585/* Function:     in32 */
1586/* Description:  Input 32 bits */
1587/*------------------------------------------------------------------------------- */
1588        .globl  in32
1589in32:
1590        lwz     3,0x0000(3)
1591        blr
1592
1593/*------------------------------------------------------------------------------- */
1594/* Function:     in32r */
1595/* Description:  Input 32 bits and byte reverse */
1596/*------------------------------------------------------------------------------- */
1597        .globl  in32r
1598in32r:
1599        lwbrx   r3,r0,r3
1600        blr
1601#endif  /* !MINIMAL_SPL */
1602
1603/*------------------------------------------------------------------------------*/
1604
1605/*
1606 * void write_tlb(mas0, mas1, mas2, mas3, mas7)
1607 */
1608        .globl  write_tlb
1609write_tlb:
1610        mtspr   MAS0,r3
1611        mtspr   MAS1,r4
1612        mtspr   MAS2,r5
1613        mtspr   MAS3,r6
1614#ifdef CONFIG_ENABLE_36BIT_PHYS
1615        mtspr   MAS7,r7
1616#endif
1617        li      r3,0
1618#ifdef CONFIG_SYS_BOOK3E_HV
1619        mtspr   MAS8,r3
1620#endif
1621        isync
1622        tlbwe
1623        msync
1624        isync
1625        blr
1626
1627/*
1628 * void relocate_code (addr_sp, gd, addr_moni)
1629 *
1630 * This "function" does not return, instead it continues in RAM
1631 * after relocating the monitor code.
1632 *
1633 * r3 = dest
1634 * r4 = src
1635 * r5 = length in bytes
1636 * r6 = cachelinesize
1637 */
1638        .globl  relocate_code
1639relocate_code:
1640        mr      r1,r3           /* Set new stack pointer                */
1641        mr      r9,r4           /* Save copy of Init Data pointer       */
1642        mr      r10,r5          /* Save copy of Destination Address     */
1643
1644        GET_GOT
1645        mr      r3,r5                           /* Destination Address  */
1646        lis     r4,CONFIG_SYS_MONITOR_BASE@h            /* Source      Address  */
1647        ori     r4,r4,CONFIG_SYS_MONITOR_BASE@l
1648        lwz     r5,GOT(__init_end)
1649        sub     r5,r5,r4
1650        li      r6,CONFIG_SYS_CACHELINE_SIZE            /* Cache Line Size      */
1651
1652        /*
1653         * Fix GOT pointer:
1654         *
1655         * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address
1656         *
1657         * Offset:
1658         */
1659        sub     r15,r10,r4
1660
1661        /* First our own GOT */
1662        add     r12,r12,r15
1663        /* the the one used by the C code */
1664        add     r30,r30,r15
1665
1666        /*
1667         * Now relocate code
1668         */
1669
1670        cmplw   cr1,r3,r4
1671        addi    r0,r5,3
1672        srwi.   r0,r0,2
1673        beq     cr1,4f          /* In place copy is not necessary       */
1674        beq     7f              /* Protect against 0 count              */
1675        mtctr   r0
1676        bge     cr1,2f
1677
1678        la      r8,-4(r4)
1679        la      r7,-4(r3)
16801:      lwzu    r0,4(r8)
1681        stwu    r0,4(r7)
1682        bdnz    1b
1683        b       4f
1684
16852:      slwi    r0,r0,2
1686        add     r8,r4,r0
1687        add     r7,r3,r0
16883:      lwzu    r0,-4(r8)
1689        stwu    r0,-4(r7)
1690        bdnz    3b
1691
1692/*
1693 * Now flush the cache: note that we must start from a cache aligned
1694 * address. Otherwise we might miss one cache line.
1695 */
16964:      cmpwi   r6,0
1697        add     r5,r3,r5
1698        beq     7f              /* Always flush prefetch queue in any case */
1699        subi    r0,r6,1
1700        andc    r3,r3,r0
1701        mr      r4,r3
17025:      dcbst   0,r4
1703        add     r4,r4,r6
1704        cmplw   r4,r5
1705        blt     5b
1706        sync                    /* Wait for all dcbst to complete on bus */
1707        mr      r4,r3
17086:      icbi    0,r4
1709        add     r4,r4,r6
1710        cmplw   r4,r5
1711        blt     6b
17127:      sync                    /* Wait for all icbi to complete on bus */
1713        isync
1714
1715/*
1716 * We are done. Do not return, instead branch to second part of board
1717 * initialization, now running from RAM.
1718 */
1719
1720        addi    r0,r10,in_ram - _start + _START_OFFSET
1721
1722        /*
1723         * As IVPR is going to point RAM address,
1724         * Make sure IVOR15 has valid opcode to support debugger
1725         */
1726        mtspr   IVOR15,r0
1727
1728        /*
1729         * Re-point the IVPR at RAM
1730         */
1731        mtspr   IVPR,r10
1732
1733        mtlr    r0
1734        blr                             /* NEVER RETURNS! */
1735        .globl  in_ram
1736in_ram:
1737
1738        /*
1739         * Relocation Function, r12 point to got2+0x8000
1740         *
1741         * Adjust got2 pointers, no need to check for 0, this code
1742         * already puts a few entries in the table.
1743         */
1744        li      r0,__got2_entries@sectoff@l
1745        la      r3,GOT(_GOT2_TABLE_)
1746        lwz     r11,GOT(_GOT2_TABLE_)
1747        mtctr   r0
1748        sub     r11,r3,r11
1749        addi    r3,r3,-4
17501:      lwzu    r0,4(r3)
1751        cmpwi   r0,0
1752        beq-    2f
1753        add     r0,r0,r11
1754        stw     r0,0(r3)
17552:      bdnz    1b
1756
1757        /*
1758         * Now adjust the fixups and the pointers to the fixups
1759         * in case we need to move ourselves again.
1760         */
1761        li      r0,__fixup_entries@sectoff@l
1762        lwz     r3,GOT(_FIXUP_TABLE_)
1763        cmpwi   r0,0
1764        mtctr   r0
1765        addi    r3,r3,-4
1766        beq     4f
17673:      lwzu    r4,4(r3)
1768        lwzux   r0,r4,r11
1769        cmpwi   r0,0
1770        add     r0,r0,r11
1771        stw     r4,0(r3)
1772        beq-    5f
1773        stw     r0,0(r4)
17745:      bdnz    3b
17754:
1776clear_bss:
1777        /*
1778         * Now clear BSS segment
1779         */
1780        lwz     r3,GOT(__bss_start)
1781        lwz     r4,GOT(__bss_end)
1782
1783        cmplw   0,r3,r4
1784        beq     6f
1785
1786        li      r0,0
17875:
1788        stw     r0,0(r3)
1789        addi    r3,r3,4
1790        cmplw   0,r3,r4
1791        blt     5b
17926:
1793
1794        mr      r3,r9           /* Init Data pointer            */
1795        mr      r4,r10          /* Destination Address          */
1796        bl      board_init_r
1797
1798#ifndef MINIMAL_SPL
1799        /*
1800         * Copy exception vector code to low memory
1801         *
1802         * r3: dest_addr
1803         * r7: source address, r8: end address, r9: target address
1804         */
1805        .globl  trap_init
1806trap_init:
1807        mflr    r4                      /* save link register           */
1808        GET_GOT
1809        lwz     r7,GOT(_start_of_vectors)
1810        lwz     r8,GOT(_end_of_vectors)
1811
1812        li      r9,0x100                /* reset vector always at 0x100 */
1813
1814        cmplw   0,r7,r8
1815        bgelr                           /* return if r7>=r8 - just in case */
18161:
1817        lwz     r0,0(r7)
1818        stw     r0,0(r9)
1819        addi    r7,r7,4
1820        addi    r9,r9,4
1821        cmplw   0,r7,r8
1822        bne     1b
1823
1824        /*
1825         * relocate `hdlr' and `int_return' entries
1826         */
1827        li      r7,.L_CriticalInput - _start + _START_OFFSET
1828        bl      trap_reloc
1829        li      r7,.L_MachineCheck - _start + _START_OFFSET
1830        bl      trap_reloc
1831        li      r7,.L_DataStorage - _start + _START_OFFSET
1832        bl      trap_reloc
1833        li      r7,.L_InstStorage - _start + _START_OFFSET
1834        bl      trap_reloc
1835        li      r7,.L_ExtInterrupt - _start + _START_OFFSET
1836        bl      trap_reloc
1837        li      r7,.L_Alignment - _start + _START_OFFSET
1838        bl      trap_reloc
1839        li      r7,.L_ProgramCheck - _start + _START_OFFSET
1840        bl      trap_reloc
1841        li      r7,.L_FPUnavailable - _start + _START_OFFSET
1842        bl      trap_reloc
1843        li      r7,.L_Decrementer - _start + _START_OFFSET
1844        bl      trap_reloc
1845        li      r7,.L_IntervalTimer - _start + _START_OFFSET
1846        li      r8,_end_of_vectors - _start + _START_OFFSET
18472:
1848        bl      trap_reloc
1849        addi    r7,r7,0x100             /* next exception vector        */
1850        cmplw   0,r7,r8
1851        blt     2b
1852
1853        /* Update IVORs as per relocated vector table address */
1854        li      r7,0x0100
1855        mtspr   IVOR0,r7        /* 0: Critical input */
1856        li      r7,0x0200
1857        mtspr   IVOR1,r7        /* 1: Machine check */
1858        li      r7,0x0300
1859        mtspr   IVOR2,r7        /* 2: Data storage */
1860        li      r7,0x0400
1861        mtspr   IVOR3,r7        /* 3: Instruction storage */
1862        li      r7,0x0500
1863        mtspr   IVOR4,r7        /* 4: External interrupt */
1864        li      r7,0x0600
1865        mtspr   IVOR5,r7        /* 5: Alignment */
1866        li      r7,0x0700
1867        mtspr   IVOR6,r7        /* 6: Program check */
1868        li      r7,0x0800
1869        mtspr   IVOR7,r7        /* 7: floating point unavailable */
1870        li      r7,0x0900
1871        mtspr   IVOR8,r7        /* 8: System call */
1872        /* 9: Auxiliary processor unavailable(unsupported) */
1873        li      r7,0x0a00
1874        mtspr   IVOR10,r7       /* 10: Decrementer */
1875        li      r7,0x0b00
1876        mtspr   IVOR11,r7       /* 11: Interval timer */
1877        li      r7,0x0c00
1878        mtspr   IVOR12,r7       /* 12: Watchdog timer */
1879        li      r7,0x0d00
1880        mtspr   IVOR13,r7       /* 13: Data TLB error */
1881        li      r7,0x0e00
1882        mtspr   IVOR14,r7       /* 14: Instruction TLB error */
1883        li      r7,0x0f00
1884        mtspr   IVOR15,r7       /* 15: Debug */
1885
1886        lis     r7,0x0
1887        mtspr   IVPR,r7
1888
1889        mtlr    r4                      /* restore link register        */
1890        blr
1891
1892.globl unlock_ram_in_cache
1893unlock_ram_in_cache:
1894        /* invalidate the INIT_RAM section */
1895        lis     r3,(CONFIG_SYS_INIT_RAM_ADDR & ~(CONFIG_SYS_CACHELINE_SIZE-1))@h
1896        ori     r3,r3,(CONFIG_SYS_INIT_RAM_ADDR & ~(CONFIG_SYS_CACHELINE_SIZE-1))@l
1897        mfspr   r4,L1CFG0
1898        andi.   r4,r4,0x1ff
1899        slwi    r4,r4,(10 - 1 - L1_CACHE_SHIFT)
1900        mtctr   r4
19011:      dcbi    r0,r3
1902        dcblc   r0,r3
1903        addi    r3,r3,CONFIG_SYS_CACHELINE_SIZE
1904        bdnz    1b
1905        sync
1906
1907        /* Invalidate the TLB entries for the cache */
1908        lis     r3,CONFIG_SYS_INIT_RAM_ADDR@h
1909        ori     r3,r3,CONFIG_SYS_INIT_RAM_ADDR@l
1910        tlbivax 0,r3
1911        addi    r3,r3,0x1000
1912        tlbivax 0,r3
1913        addi    r3,r3,0x1000
1914        tlbivax 0,r3
1915        addi    r3,r3,0x1000
1916        tlbivax 0,r3
1917        isync
1918        blr
1919
1920.globl flush_dcache
1921flush_dcache:
1922        mfspr   r3,SPRN_L1CFG0
1923
1924        rlwinm  r5,r3,9,3       /* Extract cache block size */
1925        twlgti  r5,1            /* Only 32 and 64 byte cache blocks
1926                                 * are currently defined.
1927                                 */
1928        li      r4,32
1929        subfic  r6,r5,2         /* r6 = log2(1KiB / cache block size) -
1930                                 *      log2(number of ways)
1931                                 */
1932        slw     r5,r4,r5        /* r5 = cache block size */
1933
1934        rlwinm  r7,r3,0,0xff    /* Extract number of KiB in the cache */
1935        mulli   r7,r7,13        /* An 8-way cache will require 13
1936                                 * loads per set.
1937                                 */
1938        slw     r7,r7,r6
1939
1940        /* save off HID0 and set DCFA */
1941        mfspr   r8,SPRN_HID0
1942        ori     r9,r8,HID0_DCFA@l
1943        mtspr   SPRN_HID0,r9
1944        isync
1945
1946        lis     r4,0
1947        mtctr   r7
1948
19491:      lwz     r3,0(r4)        /* Load... */
1950        add     r4,r4,r5
1951        bdnz    1b
1952
1953        msync
1954        lis     r4,0
1955        mtctr   r7
1956
19571:      dcbf    0,r4            /* ...and flush. */
1958        add     r4,r4,r5
1959        bdnz    1b
1960
1961        /* restore HID0 */
1962        mtspr   SPRN_HID0,r8
1963        isync
1964
1965        blr
1966
1967.globl setup_ivors
1968setup_ivors:
1969
1970#include "fixed_ivor.S"
1971        blr
1972#endif /* !MINIMAL_SPL */
1973