linux/arch/arm64/kvm/hyp/hyp-entry.S
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 * Copyright (C) 2015-2018 - ARM Ltd
   4 * Author: Marc Zyngier <marc.zyngier@arm.com>
   5 */
   6
   7#include <linux/arm-smccc.h>
   8#include <linux/linkage.h>
   9
  10#include <asm/alternative.h>
  11#include <asm/assembler.h>
  12#include <asm/cpufeature.h>
  13#include <asm/kvm_arm.h>
  14#include <asm/kvm_asm.h>
  15#include <asm/mmu.h>
  16#include <asm/spectre.h>
  17
  18.macro save_caller_saved_regs_vect
  19        /* x0 and x1 were saved in the vector entry */
  20        stp     x2, x3,   [sp, #-16]!
  21        stp     x4, x5,   [sp, #-16]!
  22        stp     x6, x7,   [sp, #-16]!
  23        stp     x8, x9,   [sp, #-16]!
  24        stp     x10, x11, [sp, #-16]!
  25        stp     x12, x13, [sp, #-16]!
  26        stp     x14, x15, [sp, #-16]!
  27        stp     x16, x17, [sp, #-16]!
  28.endm
  29
  30.macro restore_caller_saved_regs_vect
  31        ldp     x16, x17, [sp], #16
  32        ldp     x14, x15, [sp], #16
  33        ldp     x12, x13, [sp], #16
  34        ldp     x10, x11, [sp], #16
  35        ldp     x8, x9,   [sp], #16
  36        ldp     x6, x7,   [sp], #16
  37        ldp     x4, x5,   [sp], #16
  38        ldp     x2, x3,   [sp], #16
  39        ldp     x0, x1,   [sp], #16
  40.endm
  41
  42        .text
  43
  44el1_sync:                               // Guest trapped into EL2
  45
  46        mrs     x0, esr_el2
  47        lsr     x0, x0, #ESR_ELx_EC_SHIFT
  48        cmp     x0, #ESR_ELx_EC_HVC64
  49        ccmp    x0, #ESR_ELx_EC_HVC32, #4, ne
  50        b.ne    el1_trap
  51
  52        /*
  53         * Fastest possible path for ARM_SMCCC_ARCH_WORKAROUND_1.
  54         * The workaround has already been applied on the host,
  55         * so let's quickly get back to the guest. We don't bother
  56         * restoring x1, as it can be clobbered anyway.
  57         */
  58        ldr     x1, [sp]                                // Guest's x0
  59        eor     w1, w1, #ARM_SMCCC_ARCH_WORKAROUND_1
  60        cbz     w1, wa_epilogue
  61
  62        /* ARM_SMCCC_ARCH_WORKAROUND_2 handling */
  63        eor     w1, w1, #(ARM_SMCCC_ARCH_WORKAROUND_1 ^ \
  64                          ARM_SMCCC_ARCH_WORKAROUND_2)
  65        cbnz    w1, el1_trap
  66
  67wa_epilogue:
  68        mov     x0, xzr
  69        add     sp, sp, #16
  70        eret
  71        sb
  72
  73el1_trap:
  74        get_vcpu_ptr    x1, x0
  75        mov     x0, #ARM_EXCEPTION_TRAP
  76        b       __guest_exit
  77
  78el1_irq:
  79        get_vcpu_ptr    x1, x0
  80        mov     x0, #ARM_EXCEPTION_IRQ
  81        b       __guest_exit
  82
  83el1_error:
  84        get_vcpu_ptr    x1, x0
  85        mov     x0, #ARM_EXCEPTION_EL1_SERROR
  86        b       __guest_exit
  87
  88el2_sync:
  89        /* Check for illegal exception return */
  90        mrs     x0, spsr_el2
  91        tbnz    x0, #20, 1f
  92
  93        save_caller_saved_regs_vect
  94        stp     x29, x30, [sp, #-16]!
  95        bl      kvm_unexpected_el2_exception
  96        ldp     x29, x30, [sp], #16
  97        restore_caller_saved_regs_vect
  98
  99        eret
 100
 1011:
 102        /* Let's attempt a recovery from the illegal exception return */
 103        get_vcpu_ptr    x1, x0
 104        mov     x0, #ARM_EXCEPTION_IL
 105        b       __guest_exit
 106
 107
 108el2_error:
 109        save_caller_saved_regs_vect
 110        stp     x29, x30, [sp, #-16]!
 111
 112        bl      kvm_unexpected_el2_exception
 113
 114        ldp     x29, x30, [sp], #16
 115        restore_caller_saved_regs_vect
 116
 117        eret
 118        sb
 119
 120.macro invalid_vector   label, target = __guest_exit_panic
 121        .align  2
 122SYM_CODE_START_LOCAL(\label)
 123        b \target
 124SYM_CODE_END(\label)
 125.endm
 126
 127        /* None of these should ever happen */
 128        invalid_vector  el2t_sync_invalid
 129        invalid_vector  el2t_irq_invalid
 130        invalid_vector  el2t_fiq_invalid
 131        invalid_vector  el2t_error_invalid
 132        invalid_vector  el2h_irq_invalid
 133        invalid_vector  el2h_fiq_invalid
 134        invalid_vector  el1_fiq_invalid
 135
 136        .ltorg
 137
 138        .align 11
 139
 140.macro check_preamble_length start, end
 141/* kvm_patch_vector_branch() generates code that jumps over the preamble. */
 142.if ((\end-\start) != KVM_VECTOR_PREAMBLE)
 143        .error "KVM vector preamble length mismatch"
 144.endif
 145.endm
 146
 147.macro valid_vect target
 148        .align 7
 149661:
 150        esb
 151        stp     x0, x1, [sp, #-16]!
 152662:
 153        b       \target
 154
 155check_preamble_length 661b, 662b
 156.endm
 157
 158.macro invalid_vect target
 159        .align 7
 160661:
 161        nop
 162        stp     x0, x1, [sp, #-16]!
 163662:
 164        b       \target
 165
 166check_preamble_length 661b, 662b
 167.endm
 168
 169SYM_CODE_START(__kvm_hyp_vector)
 170        invalid_vect    el2t_sync_invalid       // Synchronous EL2t
 171        invalid_vect    el2t_irq_invalid        // IRQ EL2t
 172        invalid_vect    el2t_fiq_invalid        // FIQ EL2t
 173        invalid_vect    el2t_error_invalid      // Error EL2t
 174
 175        valid_vect      el2_sync                // Synchronous EL2h
 176        invalid_vect    el2h_irq_invalid        // IRQ EL2h
 177        invalid_vect    el2h_fiq_invalid        // FIQ EL2h
 178        valid_vect      el2_error               // Error EL2h
 179
 180        valid_vect      el1_sync                // Synchronous 64-bit EL1
 181        valid_vect      el1_irq                 // IRQ 64-bit EL1
 182        invalid_vect    el1_fiq_invalid         // FIQ 64-bit EL1
 183        valid_vect      el1_error               // Error 64-bit EL1
 184
 185        valid_vect      el1_sync                // Synchronous 32-bit EL1
 186        valid_vect      el1_irq                 // IRQ 32-bit EL1
 187        invalid_vect    el1_fiq_invalid         // FIQ 32-bit EL1
 188        valid_vect      el1_error               // Error 32-bit EL1
 189SYM_CODE_END(__kvm_hyp_vector)
 190
 191.macro spectrev2_smccc_wa1_smc
 192        sub     sp, sp, #(8 * 4)
 193        stp     x2, x3, [sp, #(8 * 0)]
 194        stp     x0, x1, [sp, #(8 * 2)]
 195        mov     w0, #ARM_SMCCC_ARCH_WORKAROUND_1
 196        smc     #0
 197        ldp     x2, x3, [sp, #(8 * 0)]
 198        add     sp, sp, #(8 * 2)
 199.endm
 200
 201.macro hyp_ventry       indirect, spectrev2
 202        .align  7
 2031:      esb
 204        .if \spectrev2 != 0
 205        spectrev2_smccc_wa1_smc
 206        .else
 207        stp     x0, x1, [sp, #-16]!
 208        .endif
 209        .if \indirect != 0
 210        alternative_cb  kvm_patch_vector_branch
 211        /*
 212         * For ARM64_SPECTRE_V3A configurations, these NOPs get replaced with:
 213         *
 214         * movz x0, #(addr & 0xffff)
 215         * movk x0, #((addr >> 16) & 0xffff), lsl #16
 216         * movk x0, #((addr >> 32) & 0xffff), lsl #32
 217         * br   x0
 218         *
 219         * Where:
 220         * addr = kern_hyp_va(__kvm_hyp_vector) + vector-offset + KVM_VECTOR_PREAMBLE.
 221         * See kvm_patch_vector_branch for details.
 222         */
 223        nop
 224        nop
 225        nop
 226        nop
 227        alternative_cb_end
 228        .endif
 229        b       __kvm_hyp_vector + (1b - 0b + KVM_VECTOR_PREAMBLE)
 230.endm
 231
 232.macro generate_vectors indirect, spectrev2
 2330:
 234        .rept 16
 235        hyp_ventry      \indirect, \spectrev2
 236        .endr
 237        .org 0b + SZ_2K         // Safety measure
 238.endm
 239
 240        .align  11
 241SYM_CODE_START(__bp_harden_hyp_vecs)
 242        generate_vectors indirect = 0, spectrev2 = 1 // HYP_VECTOR_SPECTRE_DIRECT
 243        generate_vectors indirect = 1, spectrev2 = 0 // HYP_VECTOR_INDIRECT
 244        generate_vectors indirect = 1, spectrev2 = 1 // HYP_VECTOR_SPECTRE_INDIRECT
 2451:      .org __bp_harden_hyp_vecs + __BP_HARDEN_HYP_VECS_SZ
 246        .org 1b
 247SYM_CODE_END(__bp_harden_hyp_vecs)
 248