linux/arch/mips/include/asm/msa.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Copyright (C) 2013 Imagination Technologies
   4 * Author: Paul Burton <paul.burton@mips.com>
   5 */
   6#ifndef _ASM_MSA_H
   7#define _ASM_MSA_H
   8
   9#include <asm/mipsregs.h>
  10
  11#ifndef __ASSEMBLY__
  12
  13#include <asm/inst.h>
  14
  15extern void _save_msa(struct task_struct *);
  16extern void _restore_msa(struct task_struct *);
  17extern void _init_msa_upper(void);
  18
  19extern void read_msa_wr_b(unsigned idx, union fpureg *to);
  20extern void read_msa_wr_h(unsigned idx, union fpureg *to);
  21extern void read_msa_wr_w(unsigned idx, union fpureg *to);
  22extern void read_msa_wr_d(unsigned idx, union fpureg *to);
  23
  24/**
  25 * read_msa_wr() - Read a single MSA vector register
  26 * @idx:        The index of the vector register to read
  27 * @to:         The FPU register union to store the registers value in
  28 * @fmt:        The format of the data in the vector register
  29 *
  30 * Read the value of MSA vector register idx into the FPU register
  31 * union to, using the format fmt.
  32 */
  33static inline void read_msa_wr(unsigned idx, union fpureg *to,
  34                               enum msa_2b_fmt fmt)
  35{
  36        switch (fmt) {
  37        case msa_fmt_b:
  38                read_msa_wr_b(idx, to);
  39                break;
  40
  41        case msa_fmt_h:
  42                read_msa_wr_h(idx, to);
  43                break;
  44
  45        case msa_fmt_w:
  46                read_msa_wr_w(idx, to);
  47                break;
  48
  49        case msa_fmt_d:
  50                read_msa_wr_d(idx, to);
  51                break;
  52
  53        default:
  54                BUG();
  55        }
  56}
  57
  58extern void write_msa_wr_b(unsigned idx, union fpureg *from);
  59extern void write_msa_wr_h(unsigned idx, union fpureg *from);
  60extern void write_msa_wr_w(unsigned idx, union fpureg *from);
  61extern void write_msa_wr_d(unsigned idx, union fpureg *from);
  62
  63/**
  64 * write_msa_wr() - Write a single MSA vector register
  65 * @idx:        The index of the vector register to write
  66 * @from:       The FPU register union to take the registers value from
  67 * @fmt:        The format of the data in the vector register
  68 *
  69 * Write the value from the FPU register union from into MSA vector
  70 * register idx, using the format fmt.
  71 */
  72static inline void write_msa_wr(unsigned idx, union fpureg *from,
  73                                enum msa_2b_fmt fmt)
  74{
  75        switch (fmt) {
  76        case msa_fmt_b:
  77                write_msa_wr_b(idx, from);
  78                break;
  79
  80        case msa_fmt_h:
  81                write_msa_wr_h(idx, from);
  82                break;
  83
  84        case msa_fmt_w:
  85                write_msa_wr_w(idx, from);
  86                break;
  87
  88        case msa_fmt_d:
  89                write_msa_wr_d(idx, from);
  90                break;
  91
  92        default:
  93                BUG();
  94        }
  95}
  96
  97static inline void enable_msa(void)
  98{
  99        if (cpu_has_msa) {
 100                set_c0_config5(MIPS_CONF5_MSAEN);
 101                enable_fpu_hazard();
 102        }
 103}
 104
 105static inline void disable_msa(void)
 106{
 107        if (cpu_has_msa) {
 108                clear_c0_config5(MIPS_CONF5_MSAEN);
 109                disable_fpu_hazard();
 110        }
 111}
 112
 113static inline int is_msa_enabled(void)
 114{
 115        if (!cpu_has_msa)
 116                return 0;
 117
 118        return read_c0_config5() & MIPS_CONF5_MSAEN;
 119}
 120
 121static inline int thread_msa_context_live(void)
 122{
 123        /*
 124         * Check cpu_has_msa only if it's a constant. This will allow the
 125         * compiler to optimise out code for CPUs without MSA without adding
 126         * an extra redundant check for CPUs with MSA.
 127         */
 128        if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
 129                return 0;
 130
 131        return test_thread_flag(TIF_MSA_CTX_LIVE);
 132}
 133
 134static inline void save_msa(struct task_struct *t)
 135{
 136        if (cpu_has_msa)
 137                _save_msa(t);
 138}
 139
 140static inline void restore_msa(struct task_struct *t)
 141{
 142        if (cpu_has_msa)
 143                _restore_msa(t);
 144}
 145
 146static inline void init_msa_upper(void)
 147{
 148        /*
 149         * Check cpu_has_msa only if it's a constant. This will allow the
 150         * compiler to optimise out code for CPUs without MSA without adding
 151         * an extra redundant check for CPUs with MSA.
 152         */
 153        if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
 154                return;
 155
 156        _init_msa_upper();
 157}
 158
 159#ifndef TOOLCHAIN_SUPPORTS_MSA
 160/*
 161 * Define assembler macros using .word for the c[ft]cmsa instructions in order
 162 * to allow compilation with toolchains that do not support MSA. Once all
 163 * toolchains in use support MSA these can be removed.
 164 */
 165_ASM_MACRO_2R(cfcmsa, rd, cs,
 166        _ASM_INSN_IF_MIPS(0x787e0019 | __cs << 11 | __rd << 6)
 167        _ASM_INSN32_IF_MM(0x587e0016 | __cs << 11 | __rd << 6));
 168_ASM_MACRO_2R(ctcmsa, cd, rs,
 169        _ASM_INSN_IF_MIPS(0x783e0019 | __rs << 11 | __cd << 6)
 170        _ASM_INSN32_IF_MM(0x583e0016 | __rs << 11 | __cd << 6));
 171#define _ASM_SET_MSA ""
 172#else /* TOOLCHAIN_SUPPORTS_MSA */
 173#define _ASM_SET_MSA ".set\tfp=64\n\t"                          \
 174                     ".set\tmsa\n\t"
 175#endif
 176
 177#define __BUILD_MSA_CTL_REG(name, cs)                           \
 178static inline unsigned int read_msa_##name(void)                \
 179{                                                               \
 180        unsigned int reg;                                       \
 181        __asm__ __volatile__(                                   \
 182        "       .set    push\n"                                 \
 183        _ASM_SET_MSA                                            \
 184        "       cfcmsa  %0, $" #cs "\n"                         \
 185        "       .set    pop\n"                                  \
 186        : "=r"(reg));                                           \
 187        return reg;                                             \
 188}                                                               \
 189                                                                \
 190static inline void write_msa_##name(unsigned int val)           \
 191{                                                               \
 192        __asm__ __volatile__(                                   \
 193        "       .set    push\n"                                 \
 194        _ASM_SET_MSA                                            \
 195        "       ctcmsa  $" #cs ", %0\n"                         \
 196        "       .set    pop\n"                                  \
 197        : : "r"(val));                                          \
 198}
 199
 200__BUILD_MSA_CTL_REG(ir, 0)
 201__BUILD_MSA_CTL_REG(csr, 1)
 202__BUILD_MSA_CTL_REG(access, 2)
 203__BUILD_MSA_CTL_REG(save, 3)
 204__BUILD_MSA_CTL_REG(modify, 4)
 205__BUILD_MSA_CTL_REG(request, 5)
 206__BUILD_MSA_CTL_REG(map, 6)
 207__BUILD_MSA_CTL_REG(unmap, 7)
 208
 209#endif /* !__ASSEMBLY__ */
 210
 211#define MSA_IR          0
 212#define MSA_CSR         1
 213#define MSA_ACCESS      2
 214#define MSA_SAVE        3
 215#define MSA_MODIFY      4
 216#define MSA_REQUEST     5
 217#define MSA_MAP         6
 218#define MSA_UNMAP       7
 219
 220/* MSA Implementation Register (MSAIR) */
 221#define MSA_IR_REVB             0
 222#define MSA_IR_REVF             (_ULCAST_(0xff) << MSA_IR_REVB)
 223#define MSA_IR_PROCB            8
 224#define MSA_IR_PROCF            (_ULCAST_(0xff) << MSA_IR_PROCB)
 225#define MSA_IR_WRPB             16
 226#define MSA_IR_WRPF             (_ULCAST_(0x1) << MSA_IR_WRPB)
 227
 228/* MSA Control & Status Register (MSACSR) */
 229#define MSA_CSR_RMB             0
 230#define MSA_CSR_RMF             (_ULCAST_(0x3) << MSA_CSR_RMB)
 231#define MSA_CSR_RM_NEAREST      0
 232#define MSA_CSR_RM_TO_ZERO      1
 233#define MSA_CSR_RM_TO_POS       2
 234#define MSA_CSR_RM_TO_NEG       3
 235#define MSA_CSR_FLAGSB          2
 236#define MSA_CSR_FLAGSF          (_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
 237#define MSA_CSR_FLAGS_IB        2
 238#define MSA_CSR_FLAGS_IF        (_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
 239#define MSA_CSR_FLAGS_UB        3
 240#define MSA_CSR_FLAGS_UF        (_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
 241#define MSA_CSR_FLAGS_OB        4
 242#define MSA_CSR_FLAGS_OF        (_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
 243#define MSA_CSR_FLAGS_ZB        5
 244#define MSA_CSR_FLAGS_ZF        (_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
 245#define MSA_CSR_FLAGS_VB        6
 246#define MSA_CSR_FLAGS_VF        (_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
 247#define MSA_CSR_ENABLESB        7
 248#define MSA_CSR_ENABLESF        (_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
 249#define MSA_CSR_ENABLES_IB      7
 250#define MSA_CSR_ENABLES_IF      (_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
 251#define MSA_CSR_ENABLES_UB      8
 252#define MSA_CSR_ENABLES_UF      (_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
 253#define MSA_CSR_ENABLES_OB      9
 254#define MSA_CSR_ENABLES_OF      (_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
 255#define MSA_CSR_ENABLES_ZB      10
 256#define MSA_CSR_ENABLES_ZF      (_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
 257#define MSA_CSR_ENABLES_VB      11
 258#define MSA_CSR_ENABLES_VF      (_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
 259#define MSA_CSR_CAUSEB          12
 260#define MSA_CSR_CAUSEF          (_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
 261#define MSA_CSR_CAUSE_IB        12
 262#define MSA_CSR_CAUSE_IF        (_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
 263#define MSA_CSR_CAUSE_UB        13
 264#define MSA_CSR_CAUSE_UF        (_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
 265#define MSA_CSR_CAUSE_OB        14
 266#define MSA_CSR_CAUSE_OF        (_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
 267#define MSA_CSR_CAUSE_ZB        15
 268#define MSA_CSR_CAUSE_ZF        (_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
 269#define MSA_CSR_CAUSE_VB        16
 270#define MSA_CSR_CAUSE_VF        (_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
 271#define MSA_CSR_CAUSE_EB        17
 272#define MSA_CSR_CAUSE_EF        (_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
 273#define MSA_CSR_NXB             18
 274#define MSA_CSR_NXF             (_ULCAST_(0x1) << MSA_CSR_NXB)
 275#define MSA_CSR_FSB             24
 276#define MSA_CSR_FSF             (_ULCAST_(0x1) << MSA_CSR_FSB)
 277
 278#endif /* _ASM_MSA_H */
 279