uboot/arch/powerpc/cpu/mpc512x/start.S
<<
>>
Prefs
   1/*
   2 * Copyright (C) 1998  Dan Malek <dmalek@jlc.net>
   3 * Copyright (C) 1999  Magnus Damm <kieraypc01.p.y.kie.era.ericsson.se>
   4 * Copyright (C) 2000-2009 Wolfgang Denk <wd@denx.de>
   5 * Copyright Freescale Semiconductor, Inc. 2004, 2006.
   6 *
   7 * SPDX-License-Identifier:     GPL-2.0+
   8 *
   9 * Based on the MPC83xx code.
  10 */
  11
  12/*
  13 *  U-Boot - Startup Code for MPC512x based Embedded Boards
  14 */
  15
  16#include <asm-offsets.h>
  17#include <config.h>
  18#ifndef  CONFIG_IDENT_STRING
  19#define  CONFIG_IDENT_STRING "MPC512X"
  20#endif
  21#include <version.h>
  22
  23#define CONFIG_521X     1               /* needed for Linux kernel header files*/
  24
  25#include <asm/immap_512x.h>
  26#include "asm-offsets.h"
  27
  28#include <ppc_asm.tmpl>
  29#include <ppc_defs.h>
  30
  31#include <asm/cache.h>
  32#include <asm/mmu.h>
  33#include <asm/u-boot.h>
  34
  35/*
  36 * Floating Point enable, Machine Check and Recoverable Interr.
  37 */
  38#undef  MSR_KERNEL
  39#ifdef DEBUG
  40#define MSR_KERNEL (MSR_FP|MSR_RI)
  41#else
  42#define MSR_KERNEL (MSR_FP|MSR_ME|MSR_RI)
  43#endif
  44
  45/* Macros for manipulating CSx_START/STOP */
  46#define START_REG(start)        ((start) >> 16)
  47#define STOP_REG(start, size)   (((start) + (size) - 1) >> 16)
  48
  49/*
  50 * Set up GOT: Global Offset Table
  51 *
  52 * Use r12 to access the GOT
  53 */
  54        START_GOT
  55        GOT_ENTRY(_GOT2_TABLE_)
  56        GOT_ENTRY(_FIXUP_TABLE_)
  57
  58        GOT_ENTRY(_start)
  59        GOT_ENTRY(_start_of_vectors)
  60        GOT_ENTRY(_end_of_vectors)
  61        GOT_ENTRY(transfer_to_handler)
  62
  63        GOT_ENTRY(__init_end)
  64        GOT_ENTRY(__bss_end)
  65        GOT_ENTRY(__bss_start)
  66        END_GOT
  67
  68/*
  69 * Magic number and version string
  70 */
  71        .long   0x27051956              /* U-Boot Magic Number */
  72        .globl  version_string
  73version_string:
  74        .ascii U_BOOT_VERSION_STRING, "\0"
  75
  76/*
  77 * Vector Table
  78 */
  79        .text
  80        . = EXC_OFF_SYS_RESET
  81
  82        .globl  _start
  83        /* Start from here after reset/power on */
  84_start:
  85        b       boot_cold
  86
  87        .globl  _start_of_vectors
  88_start_of_vectors:
  89
  90/* Machine check */
  91        STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  92
  93/* Data Storage exception. */
  94        STD_EXCEPTION(0x300, DataStorage, UnknownException)
  95
  96/* Instruction Storage exception. */
  97        STD_EXCEPTION(0x400, InstStorage, UnknownException)
  98
  99/* External Interrupt exception. */
 100        STD_EXCEPTION(0x500, ExtInterrupt, UnknownException)
 101
 102/* Alignment exception. */
 103        . = 0x600
 104Alignment:
 105        EXCEPTION_PROLOG(SRR0, SRR1)
 106        mfspr   r4,DAR
 107        stw     r4,_DAR(r21)
 108        mfspr   r5,DSISR
 109        stw     r5,_DSISR(r21)
 110        addi    r3,r1,STACK_FRAME_OVERHEAD
 111        EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE)
 112
 113/* Program check exception */
 114        . = 0x700
 115ProgramCheck:
 116        EXCEPTION_PROLOG(SRR0, SRR1)
 117        addi    r3,r1,STACK_FRAME_OVERHEAD
 118        EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException,
 119                MSR_KERNEL, COPY_EE)
 120
 121/* Floating Point Unit unavailable exception */
 122        STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
 123
 124/* Decrementer */
 125        STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
 126
 127/* Critical interrupt */
 128        STD_EXCEPTION(0xa00, Critical, UnknownException)
 129
 130/* System Call */
 131        STD_EXCEPTION(0xc00, SystemCall, UnknownException)
 132
 133/* Trace interrupt */
 134        STD_EXCEPTION(0xd00, Trace, UnknownException)
 135
 136/* Performance Monitor interrupt */
 137        STD_EXCEPTION(0xf00, PerfMon, UnknownException)
 138
 139/* Intruction Translation Miss */
 140        STD_EXCEPTION(0x1000, InstructionTLBMiss, UnknownException)
 141
 142/* Data Load Translation Miss */
 143        STD_EXCEPTION(0x1100, DataLoadTLBMiss, UnknownException)
 144
 145/* Data Store Translation Miss */
 146        STD_EXCEPTION(0x1200, DataStoreTLBMiss, UnknownException)
 147
 148/* Instruction Address Breakpoint */
 149        STD_EXCEPTION(0x1300, InstructionAddrBreakpoint, DebugException)
 150
 151/* System Management interrupt */
 152        STD_EXCEPTION(0x1400, SystemMgmtInterrupt, UnknownException)
 153
 154        .globl  _end_of_vectors
 155_end_of_vectors:
 156
 157        . = 0x3000
 158boot_cold:
 159        /* Save msr contents */
 160        mfmsr   r5
 161
 162        /* Set IMMR area to our preferred location */
 163        lis     r4, CONFIG_DEFAULT_IMMR@h
 164        lis     r3, CONFIG_SYS_IMMR@h
 165        ori     r3, r3, CONFIG_SYS_IMMR@l
 166        stw     r3, IMMRBAR(r4)
 167        mtspr   MBAR, r3                /* IMMRBAR is mirrored into the MBAR SPR (311) */
 168
 169        /* Initialise the machine */
 170        bl      cpu_early_init
 171
 172        /*
 173         * Set up Local Access Windows:
 174         *
 175         * 1) Boot/CS0 (boot FLASH)
 176         * 2) On-chip SRAM (initial stack purposes)
 177         */
 178
 179        /* Boot CS/CS0 window range */
 180        lis     r3, CONFIG_SYS_IMMR@h
 181        ori     r3, r3, CONFIG_SYS_IMMR@l
 182
 183        lis     r4, START_REG(CONFIG_SYS_FLASH_BASE)
 184        ori     r4, r4, STOP_REG(CONFIG_SYS_FLASH_BASE, CONFIG_SYS_FLASH_SIZE)
 185        stw     r4, LPCS0AW(r3)
 186
 187        /*
 188         * The SRAM window has a fixed size (256K), so only the start address
 189         * is necessary
 190         */
 191        lis     r4, START_REG(CONFIG_SYS_SRAM_BASE) & 0xff00
 192        stw     r4, SRAMBAR(r3)
 193
 194        /*
 195         * According to MPC5121e RM, configuring local access windows should
 196         * be followed by a dummy read of the config register that was
 197         * modified last and an isync
 198         */
 199        lwz     r4, SRAMBAR(r3)
 200        isync
 201
 202        /*
 203         * Set configuration of the Boot/CS0, the SRAM window does not have a
 204         * config register so no params can be set for it
 205         */
 206        lis     r3, (CONFIG_SYS_IMMR + LPC_OFFSET)@h
 207        ori     r3, r3, (CONFIG_SYS_IMMR + LPC_OFFSET)@l
 208
 209        lis     r4, CONFIG_SYS_CS0_CFG@h
 210        ori     r4, r4, CONFIG_SYS_CS0_CFG@l
 211        stw     r4, CS0_CONFIG(r3)
 212
 213        /* Master enable all CS's */
 214        lis     r4, CS_CTRL_ME@h
 215        ori     r4, r4, CS_CTRL_ME@l
 216        stw     r4, CS_CTRL(r3)
 217
 218        lis     r4, (CONFIG_SYS_MONITOR_BASE)@h
 219        ori     r4, r4, (CONFIG_SYS_MONITOR_BASE)@l
 220        addi    r5, r4, in_flash - _start + EXC_OFF_SYS_RESET
 221        mtlr    r5
 222        blr
 223
 224in_flash:
 225        lis     r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@h
 226        ori     r1, r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@l
 227
 228        li      r0, 0           /* Make room for stack frame header and */
 229        stwu    r0, -4(r1)      /* clear final stack frame so that      */
 230        stwu    r0, -4(r1)      /* stack backtraces terminate cleanly   */
 231
 232        /* let the C-code set up the rest                       */
 233        /*                                                      */
 234        /* Be careful to keep code relocatable & stack humble   */
 235        /*------------------------------------------------------*/
 236
 237        GET_GOT                 /* initialize GOT access        */
 238
 239        /* r3: IMMR */
 240        lis     r3, CONFIG_SYS_IMMR@h
 241        /* run low-level CPU init code (in Flash) */
 242        bl      cpu_init_f
 243
 244        /* run 1st part of board init code (in Flash) */
 245        bl      board_init_f
 246
 247        /* NOTREACHED - board_init_f() does not return */
 248
 249/*
 250 * This code finishes saving the registers to the exception frame
 251 * and jumps to the appropriate handler for the exception.
 252 * Register r21 is pointer into trap frame, r1 has new stack pointer.
 253 */
 254        .globl  transfer_to_handler
 255transfer_to_handler:
 256        stw     r22,_NIP(r21)
 257        lis     r22,MSR_POW@h
 258        andc    r23,r23,r22
 259        stw     r23,_MSR(r21)
 260        SAVE_GPR(7, r21)
 261        SAVE_4GPRS(8, r21)
 262        SAVE_8GPRS(12, r21)
 263        SAVE_8GPRS(24, r21)
 264        mflr    r23
 265        andi.   r24,r23,0x3f00          /* get vector offset */
 266        stw     r24,TRAP(r21)
 267        li      r22,0
 268        stw     r22,RESULT(r21)
 269        lwz     r24,0(r23)              /* virtual address of handler */
 270        lwz     r23,4(r23)              /* where to go when done */
 271        mtspr   SRR0,r24
 272        mtspr   SRR1,r20
 273        mtlr    r23
 274        SYNC
 275        rfi                             /* jump to handler, enable MMU */
 276
 277int_return:
 278        mfmsr   r28             /* Disable interrupts */
 279        li      r4,0
 280        ori     r4,r4,MSR_EE
 281        andc    r28,r28,r4
 282        SYNC                    /* Some chip revs need this... */
 283        mtmsr   r28
 284        SYNC
 285        lwz     r2,_CTR(r1)
 286        lwz     r0,_LINK(r1)
 287        mtctr   r2
 288        mtlr    r0
 289        lwz     r2,_XER(r1)
 290        lwz     r0,_CCR(r1)
 291        mtspr   XER,r2
 292        mtcrf   0xFF,r0
 293        REST_10GPRS(3, r1)
 294        REST_10GPRS(13, r1)
 295        REST_8GPRS(23, r1)
 296        REST_GPR(31, r1)
 297        lwz     r2,_NIP(r1)     /* Restore environment */
 298        lwz     r0,_MSR(r1)
 299        mtspr   SRR0,r2
 300        mtspr   SRR1,r0
 301        lwz     r0,GPR0(r1)
 302        lwz     r2,GPR2(r1)
 303        lwz     r1,GPR1(r1)
 304        SYNC
 305        rfi
 306
 307/*
 308 * This code initialises the machine, it expects original MSR contents to be in r5.
 309 */
 310cpu_early_init:
 311        /* Initialize machine status; enable machine check interrupt */
 312        /*-----------------------------------------------------------*/
 313
 314        li      r3, MSR_KERNEL                  /* Set ME and RI flags */
 315        rlwimi  r3, r5, 0, 25, 25               /* preserve IP bit */
 316#ifdef DEBUG
 317        rlwimi  r3, r5, 0, 21, 22               /* debugger might set SE, BE bits */
 318#endif
 319        mtmsr   r3
 320        SYNC
 321        mtspr   SRR1, r3                        /* Mirror current MSR state in SRR1 */
 322
 323        lis     r3, CONFIG_SYS_IMMR@h
 324
 325#if defined(CONFIG_WATCHDOG)
 326        /* Initialise the watchdog and reset it */
 327        /*--------------------------------------*/
 328        lis r4, CONFIG_SYS_WATCHDOG_VALUE
 329        ori r4, r4, (SWCRR_SWEN | SWCRR_SWRI | SWCRR_SWPR)
 330        stw r4, SWCRR(r3)
 331
 332        /* reset */
 333        li      r4, 0x556C
 334        sth     r4, SWSRR@l(r3)
 335        li      r4, 0x0
 336        ori     r4, r4, 0xAA39
 337        sth     r4, SWSRR@l(r3)
 338#else
 339        /* Disable the watchdog */
 340        /*----------------------*/
 341        lwz r4, SWCRR(r3)
 342        /*
 343         * Check to see if it's enabled for disabling: once disabled by s/w
 344         * it's not possible to re-enable it
 345         */
 346        andi. r4, r4, 0x4
 347        beq 1f
 348        xor r4, r4, r4
 349        stw r4, SWCRR(r3)
 3501:
 351#endif /* CONFIG_WATCHDOG */
 352
 353        /* Initialize the Hardware Implementation-dependent Registers */
 354        /* HID0 also contains cache control                     */
 355        /*------------------------------------------------------*/
 356        lis     r3, CONFIG_SYS_HID0_INIT@h
 357        ori     r3, r3, CONFIG_SYS_HID0_INIT@l
 358        SYNC
 359        mtspr   HID0, r3
 360
 361        lis     r3, CONFIG_SYS_HID0_FINAL@h
 362        ori     r3, r3, CONFIG_SYS_HID0_FINAL@l
 363        SYNC
 364        mtspr   HID0, r3
 365
 366        lis     r3, CONFIG_SYS_HID2@h
 367        ori     r3, r3, CONFIG_SYS_HID2@l
 368        SYNC
 369        mtspr   HID2, r3
 370        sync
 371        blr
 372
 373
 374/* Cache functions.
 375 *
 376 * Note: requires that all cache bits in
 377 * HID0 are in the low half word.
 378 */
 379        .globl  icache_enable
 380icache_enable:
 381        mfspr   r3, HID0
 382        ori     r3, r3, HID0_ICE
 383        lis     r4, 0
 384        ori     r4, r4, HID0_ILOCK
 385        andc    r3, r3, r4
 386        ori     r4, r3, HID0_ICFI
 387        isync
 388        mtspr   HID0, r4    /* sets enable and invalidate, clears lock */
 389        isync
 390        mtspr   HID0, r3        /* clears invalidate */
 391        blr
 392
 393        .globl  icache_disable
 394icache_disable:
 395        mfspr   r3, HID0
 396        lis     r4, 0
 397        ori     r4, r4, HID0_ICE|HID0_ILOCK
 398        andc    r3, r3, r4
 399        ori     r4, r3, HID0_ICFI
 400        isync
 401        mtspr   HID0, r4     /* sets invalidate, clears enable and lock*/
 402        isync
 403        mtspr   HID0, r3        /* clears invalidate */
 404        blr
 405
 406        .globl  icache_status
 407icache_status:
 408        mfspr   r3, HID0
 409        rlwinm  r3, r3, (31 - HID0_ICE_SHIFT + 1), 31, 31
 410        blr
 411
 412        .globl  dcache_enable
 413dcache_enable:
 414        mfspr   r3, HID0
 415        li      r5, HID0_DCFI|HID0_DLOCK
 416        andc    r3, r3, r5
 417        mtspr   HID0, r3                /* no invalidate, unlock */
 418        ori     r3, r3, HID0_DCE
 419        ori     r5, r3, HID0_DCFI
 420        mtspr   HID0, r5                /* enable + invalidate */
 421        mtspr   HID0, r3                /* enable */
 422        sync
 423        blr
 424
 425        .globl  dcache_disable
 426dcache_disable:
 427        mfspr   r3, HID0
 428        lis     r4, 0
 429        ori     r4, r4, HID0_DCE|HID0_DLOCK
 430        andc    r3, r3, r4
 431        ori     r4, r3, HID0_DCI
 432        sync
 433        mtspr   HID0, r4        /* sets invalidate, clears enable and lock */
 434        sync
 435        mtspr   HID0, r3        /* clears invalidate */
 436        blr
 437
 438        .globl  dcache_status
 439dcache_status:
 440        mfspr   r3, HID0
 441        rlwinm  r3, r3, (31 - HID0_DCE_SHIFT + 1), 31, 31
 442        blr
 443
 444        .globl get_pvr
 445get_pvr:
 446        mfspr   r3, PVR
 447        blr
 448
 449/*-------------------------------------------------------------------*/
 450
 451/*
 452 * void relocate_code (addr_sp, gd, addr_moni)
 453 *
 454 * This "function" does not return, instead it continues in RAM
 455 * after relocating the monitor code.
 456 *
 457 * r3 = dest
 458 * r4 = src
 459 * r5 = length in bytes
 460 * r6 = cachelinesize
 461 */
 462        .globl  relocate_code
 463relocate_code:
 464        mr      r1,  r3         /* Set new stack pointer        */
 465        mr      r9,  r4         /* Save copy of Global Data pointer */
 466        mr      r10, r5         /* Save copy of Destination Address */
 467
 468        GET_GOT
 469        mr      r3,  r5                         /* Destination Address */
 470        lis     r4, CONFIG_SYS_MONITOR_BASE@h           /* Source      Address */
 471        ori     r4, r4, CONFIG_SYS_MONITOR_BASE@l
 472        lwz     r5, GOT(__init_end)
 473        sub     r5, r5, r4
 474        li      r6, CONFIG_SYS_CACHELINE_SIZE           /* Cache Line Size */
 475
 476        /*
 477         * Fix GOT pointer:
 478         *
 479         * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE)
 480         *              + Destination Address
 481         *
 482         * Offset:
 483         */
 484        sub     r15, r10, r4
 485
 486        /* First our own GOT */
 487        add     r12, r12, r15
 488        /* then the one used by the C code */
 489        add     r30, r30, r15
 490
 491        /*
 492         * Now relocate code
 493         */
 494        cmplw   cr1,r3,r4
 495        addi    r0,r5,3
 496        srwi.   r0,r0,2
 497        beq     cr1,4f          /* In place copy is not necessary */
 498        beq     7f              /* Protect against 0 count        */
 499        mtctr   r0
 500        bge     cr1,2f
 501        la      r8,-4(r4)
 502        la      r7,-4(r3)
 503
 504        /* copy */
 5051:      lwzu    r0,4(r8)
 506        stwu    r0,4(r7)
 507        bdnz    1b
 508
 509        addi    r0,r5,3
 510        srwi.   r0,r0,2
 511        mtctr   r0
 512        la      r8,-4(r4)
 513        la      r7,-4(r3)
 514
 515        /* and compare */
 51620:     lwzu    r20,4(r8)
 517        lwzu    r21,4(r7)
 518        xor. r22, r20, r21
 519        bne  30f
 520        bdnz    20b
 521        b 4f
 522
 523        /* compare failed */
 52430:     li r3, 0
 525        blr
 526
 5272:      slwi    r0,r0,2 /* re copy in reverse order ... y do we needed it? */
 528        add     r8,r4,r0
 529        add     r7,r3,r0
 5303:      lwzu    r0,-4(r8)
 531        stwu    r0,-4(r7)
 532        bdnz    3b
 533
 534/*
 535 * Now flush the cache: note that we must start from a cache aligned
 536 * address. Otherwise we might miss one cache line.
 537 */
 5384:      cmpwi   r6,0
 539        add     r5,r3,r5
 540        beq     7f              /* Always flush prefetch queue in any case */
 541        subi    r0,r6,1
 542        andc    r3,r3,r0
 543        mr      r4,r3
 5445:      dcbst   0,r4
 545        add     r4,r4,r6
 546        cmplw   r4,r5
 547        blt     5b
 548        sync                    /* Wait for all dcbst to complete on bus */
 549        mr      r4,r3
 5506:      icbi    0,r4
 551        add     r4,r4,r6
 552        cmplw   r4,r5
 553        blt     6b
 5547:      sync                    /* Wait for all icbi to complete on bus */
 555        isync
 556
 557/*
 558 * We are done. Do not return, instead branch to second part of board
 559 * initialization, now running from RAM.
 560 */
 561        addi    r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
 562        mtlr    r0
 563        blr
 564
 565in_ram:
 566        /*
 567         * Relocation Function, r12 point to got2+0x8000
 568         *
 569         * Adjust got2 pointers, no need to check for 0, this code
 570         * already puts a few entries in the table.
 571         */
 572        li      r0,__got2_entries@sectoff@l
 573        la      r3,GOT(_GOT2_TABLE_)
 574        lwz     r11,GOT(_GOT2_TABLE_)
 575        mtctr   r0
 576        sub     r11,r3,r11
 577        addi    r3,r3,-4
 5781:      lwzu    r0,4(r3)
 579        cmpwi   r0,0
 580        beq-    2f
 581        add     r0,r0,r11
 582        stw     r0,0(r3)
 5832:      bdnz    1b
 584
 585        /*
 586         * Now adjust the fixups and the pointers to the fixups
 587         * in case we need to move ourselves again.
 588         */
 589        li      r0,__fixup_entries@sectoff@l
 590        lwz     r3,GOT(_FIXUP_TABLE_)
 591        cmpwi   r0,0
 592        mtctr   r0
 593        addi    r3,r3,-4
 594        beq     4f
 5953:      lwzu    r4,4(r3)
 596        lwzux   r0,r4,r11
 597        cmpwi   r0,0
 598        add     r0,r0,r11
 599        stw     r4,0(r3)
 600        beq-    5f
 601        stw     r0,0(r4)
 6025:      bdnz    3b
 6034:
 604clear_bss:
 605        /*
 606         * Now clear BSS segment
 607         */
 608        lwz     r3,GOT(__bss_start)
 609        lwz     r4,GOT(__bss_end)
 610
 611        cmplw   0, r3, r4
 612        beq     6f
 613
 614        li      r0, 0
 6155:
 616        stw     r0, 0(r3)
 617        addi    r3, r3, 4
 618        cmplw   0, r3, r4
 619        bne     5b
 6206:
 621        mr      r3, r9          /* Global Data pointer          */
 622        mr      r4, r10         /* Destination Address          */
 623        bl      board_init_r
 624
 625        /*
 626         * Copy exception vector code to low memory
 627         *
 628         * r3: dest_addr
 629         * r7: source address, r8: end address, r9: target address
 630         */
 631        .globl  trap_init
 632trap_init:
 633        mflr    r4              /* save link register */
 634        GET_GOT
 635        lwz     r7, GOT(_start)
 636        lwz     r8, GOT(_end_of_vectors)
 637
 638        li      r9, 0x100       /* reset vector at 0x100 */
 639
 640        cmplw   0, r7, r8
 641        bgelr                   /* return if r7>=r8 - just in case */
 6421:
 643        lwz     r0, 0(r7)
 644        stw     r0, 0(r9)
 645        addi    r7, r7, 4
 646        addi    r9, r9, 4
 647        cmplw   0, r7, r8
 648        bne     1b
 649
 650        /*
 651         * relocate `hdlr' and `int_return' entries
 652         */
 653        li      r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
 654        li      r8, Alignment - _start + EXC_OFF_SYS_RESET
 6552:
 656        bl      trap_reloc
 657        addi    r7, r7, 0x100           /* next exception vector */
 658        cmplw   0, r7, r8
 659        blt     2b
 660
 661        li      r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
 662        bl      trap_reloc
 663
 664        li      r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
 665        bl      trap_reloc
 666
 667        li      r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
 668        li      r8, SystemCall - _start + EXC_OFF_SYS_RESET
 6693:
 670        bl      trap_reloc
 671        addi    r7, r7, 0x100           /* next exception vector */
 672        cmplw   0, r7, r8
 673        blt     3b
 674
 675        li      r7, .L_Trace - _start + EXC_OFF_SYS_RESET
 676        li      r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
 6774:
 678        bl      trap_reloc
 679        addi    r7, r7, 0x100           /* next exception vector */
 680        cmplw   0, r7, r8
 681        blt     4b
 682
 683        mfmsr   r3                      /* now that the vectors have */
 684        lis     r7, MSR_IP@h            /* relocated into low memory */
 685        ori     r7, r7, MSR_IP@l        /* MSR[IP] can be turned off */
 686        andc    r3, r3, r7              /* (if it was on) */
 687        SYNC                            /* Some chip revs need this... */
 688        mtmsr   r3
 689        SYNC
 690
 691        mtlr    r4                      /* restore link register    */
 692        blr
 693