linux/arch/arm64/kernel/relocate_kernel.S
<<
>>
Prefs
   1/*
   2 * kexec for arm64
   3 *
   4 * Copyright (C) Linaro.
   5 * Copyright (C) Huawei Futurewei Technologies.
   6 *
   7 * This program is free software; you can redistribute it and/or modify
   8 * it under the terms of the GNU General Public License version 2 as
   9 * published by the Free Software Foundation.
  10 */
  11
  12#include <linux/kexec.h>
  13#include <linux/linkage.h>
  14
  15#include <asm/assembler.h>
  16#include <asm/kexec.h>
  17#include <asm/page.h>
  18#include <asm/sysreg.h>
  19
  20/*
  21 * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
  22 *
  23 * The memory that the old kernel occupies may be overwritten when coping the
  24 * new image to its final location.  To assure that the
  25 * arm64_relocate_new_kernel routine which does that copy is not overwritten,
  26 * all code and data needed by arm64_relocate_new_kernel must be between the
  27 * symbols arm64_relocate_new_kernel and arm64_relocate_new_kernel_end.  The
  28 * machine_kexec() routine will copy arm64_relocate_new_kernel to the kexec
  29 * control_code_page, a special page which has been set up to be preserved
  30 * during the copy operation.
  31 */
  32ENTRY(arm64_relocate_new_kernel)
  33
  34        /* Setup the list loop variables. */
  35        mov     x18, x2                         /* x18 = dtb address */
  36        mov     x17, x1                         /* x17 = kimage_start */
  37        mov     x16, x0                         /* x16 = kimage_head */
  38        raw_dcache_line_size x15, x0            /* x15 = dcache line size */
  39        mov     x14, xzr                        /* x14 = entry ptr */
  40        mov     x13, xzr                        /* x13 = copy dest */
  41
  42        /* Clear the sctlr_el2 flags. */
  43        mrs     x0, CurrentEL
  44        cmp     x0, #CurrentEL_EL2
  45        b.ne    1f
  46        mrs     x0, sctlr_el2
  47        mov_q   x1, SCTLR_ELx_FLAGS
  48        bic     x0, x0, x1
  49        pre_disable_mmu_workaround
  50        msr     sctlr_el2, x0
  51        isb
  521:
  53
  54        /* Check if the new image needs relocation. */
  55        tbnz    x16, IND_DONE_BIT, .Ldone
  56
  57.Lloop:
  58        and     x12, x16, PAGE_MASK             /* x12 = addr */
  59
  60        /* Test the entry flags. */
  61.Ltest_source:
  62        tbz     x16, IND_SOURCE_BIT, .Ltest_indirection
  63
  64        /* Invalidate dest page to PoC. */
  65        mov     x0, x13
  66        add     x20, x0, #PAGE_SIZE
  67        sub     x1, x15, #1
  68        bic     x0, x0, x1
  692:      dc      ivac, x0
  70        add     x0, x0, x15
  71        cmp     x0, x20
  72        b.lo    2b
  73        dsb     sy
  74
  75        mov x20, x13
  76        mov x21, x12
  77        copy_page x20, x21, x0, x1, x2, x3, x4, x5, x6, x7
  78
  79        /* dest += PAGE_SIZE */
  80        add     x13, x13, PAGE_SIZE
  81        b       .Lnext
  82
  83.Ltest_indirection:
  84        tbz     x16, IND_INDIRECTION_BIT, .Ltest_destination
  85
  86        /* ptr = addr */
  87        mov     x14, x12
  88        b       .Lnext
  89
  90.Ltest_destination:
  91        tbz     x16, IND_DESTINATION_BIT, .Lnext
  92
  93        /* dest = addr */
  94        mov     x13, x12
  95
  96.Lnext:
  97        /* entry = *ptr++ */
  98        ldr     x16, [x14], #8
  99
 100        /* while (!(entry & DONE)) */
 101        tbz     x16, IND_DONE_BIT, .Lloop
 102
 103.Ldone:
 104        /* wait for writes from copy_page to finish */
 105        dsb     nsh
 106        ic      iallu
 107        dsb     nsh
 108        isb
 109
 110        /* Start new image. */
 111        mov     x0, x18
 112        mov     x1, xzr
 113        mov     x2, xzr
 114        mov     x3, xzr
 115        br      x17
 116
 117ENDPROC(arm64_relocate_new_kernel)
 118
 119.align 3        /* To keep the 64-bit values below naturally aligned. */
 120
 121.Lcopy_end:
 122.org    KEXEC_CONTROL_PAGE_SIZE
 123
 124/*
 125 * arm64_relocate_new_kernel_size - Number of bytes to copy to the
 126 * control_code_page.
 127 */
 128.globl arm64_relocate_new_kernel_size
 129arm64_relocate_new_kernel_size:
 130        .quad   .Lcopy_end - arm64_relocate_new_kernel
 131