linux/tools/testing/selftests/powerpc/include/vmx_asm.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * Copyright 2015, Cyril Bur, IBM Corp.
   4 */
   5
   6#include "basic_asm.h"
   7
   8/* POS MUST BE 16 ALIGNED! */
   9#define PUSH_VMX(pos,reg) \
  10        li      reg,pos; \
  11        stvx    v20,reg,%r1; \
  12        addi    reg,reg,16; \
  13        stvx    v21,reg,%r1; \
  14        addi    reg,reg,16; \
  15        stvx    v22,reg,%r1; \
  16        addi    reg,reg,16; \
  17        stvx    v23,reg,%r1; \
  18        addi    reg,reg,16; \
  19        stvx    v24,reg,%r1; \
  20        addi    reg,reg,16; \
  21        stvx    v25,reg,%r1; \
  22        addi    reg,reg,16; \
  23        stvx    v26,reg,%r1; \
  24        addi    reg,reg,16; \
  25        stvx    v27,reg,%r1; \
  26        addi    reg,reg,16; \
  27        stvx    v28,reg,%r1; \
  28        addi    reg,reg,16; \
  29        stvx    v29,reg,%r1; \
  30        addi    reg,reg,16; \
  31        stvx    v30,reg,%r1; \
  32        addi    reg,reg,16; \
  33        stvx    v31,reg,%r1;
  34
  35/* POS MUST BE 16 ALIGNED! */
  36#define POP_VMX(pos,reg) \
  37        li      reg,pos; \
  38        lvx     v20,reg,%r1; \
  39        addi    reg,reg,16; \
  40        lvx     v21,reg,%r1; \
  41        addi    reg,reg,16; \
  42        lvx     v22,reg,%r1; \
  43        addi    reg,reg,16; \
  44        lvx     v23,reg,%r1; \
  45        addi    reg,reg,16; \
  46        lvx     v24,reg,%r1; \
  47        addi    reg,reg,16; \
  48        lvx     v25,reg,%r1; \
  49        addi    reg,reg,16; \
  50        lvx     v26,reg,%r1; \
  51        addi    reg,reg,16; \
  52        lvx     v27,reg,%r1; \
  53        addi    reg,reg,16; \
  54        lvx     v28,reg,%r1; \
  55        addi    reg,reg,16; \
  56        lvx     v29,reg,%r1; \
  57        addi    reg,reg,16; \
  58        lvx     v30,reg,%r1; \
  59        addi    reg,reg,16; \
  60        lvx     v31,reg,%r1;
  61
  62/*
  63 * Careful this will 'clobber' vmx (by design)
  64 * Don't call this from C
  65 */
  66FUNC_START(load_vmx)
  67        li      r5,0
  68        lvx     v20,r5,r3
  69        addi    r5,r5,16
  70        lvx     v21,r5,r3
  71        addi    r5,r5,16
  72        lvx     v22,r5,r3
  73        addi    r5,r5,16
  74        lvx     v23,r5,r3
  75        addi    r5,r5,16
  76        lvx     v24,r5,r3
  77        addi    r5,r5,16
  78        lvx     v25,r5,r3
  79        addi    r5,r5,16
  80        lvx     v26,r5,r3
  81        addi    r5,r5,16
  82        lvx     v27,r5,r3
  83        addi    r5,r5,16
  84        lvx     v28,r5,r3
  85        addi    r5,r5,16
  86        lvx     v29,r5,r3
  87        addi    r5,r5,16
  88        lvx     v30,r5,r3
  89        addi    r5,r5,16
  90        lvx     v31,r5,r3
  91        blr
  92FUNC_END(load_vmx)
  93