linux/arch/ia64/include/uapi/asm/intrinsics.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 WITH Linux-syscall-note */
   2/*
   3 * Compiler-dependent intrinsics.
   4 *
   5 * Copyright (C) 2002-2003 Hewlett-Packard Co
   6 *      David Mosberger-Tang <davidm@hpl.hp.com>
   7 */
   8#ifndef _UAPI_ASM_IA64_INTRINSICS_H
   9#define _UAPI_ASM_IA64_INTRINSICS_H
  10
  11
  12#ifndef __ASSEMBLY__
  13
  14#include <linux/types.h>
  15/* include compiler specific intrinsics */
  16#include <asm/ia64regs.h>
  17#ifdef __INTEL_COMPILER
  18# include <asm/intel_intrin.h>
  19#else
  20# include <asm/gcc_intrin.h>
  21#endif
  22#include <asm/cmpxchg.h>
  23
  24#define ia64_native_get_psr_i() (ia64_native_getreg(_IA64_REG_PSR) & IA64_PSR_I)
  25
  26#define ia64_native_set_rr0_to_rr4(val0, val1, val2, val3, val4)        \
  27do {                                                                    \
  28        ia64_native_set_rr(0x0000000000000000UL, (val0));               \
  29        ia64_native_set_rr(0x2000000000000000UL, (val1));               \
  30        ia64_native_set_rr(0x4000000000000000UL, (val2));               \
  31        ia64_native_set_rr(0x6000000000000000UL, (val3));               \
  32        ia64_native_set_rr(0x8000000000000000UL, (val4));               \
  33} while (0)
  34
  35/*
  36 * Force an unresolved reference if someone tries to use
  37 * ia64_fetch_and_add() with a bad value.
  38 */
  39extern unsigned long __bad_size_for_ia64_fetch_and_add (void);
  40extern unsigned long __bad_increment_for_ia64_fetch_and_add (void);
  41
  42#define IA64_FETCHADD(tmp,v,n,sz,sem)                                           \
  43({                                                                              \
  44        switch (sz) {                                                           \
  45              case 4:                                                           \
  46                tmp = ia64_fetchadd4_##sem((unsigned int *) v, n);              \
  47                break;                                                          \
  48                                                                                \
  49              case 8:                                                           \
  50                tmp = ia64_fetchadd8_##sem((unsigned long *) v, n);             \
  51                break;                                                          \
  52                                                                                \
  53              default:                                                          \
  54                __bad_size_for_ia64_fetch_and_add();                            \
  55        }                                                                       \
  56})
  57
  58#define ia64_fetchadd(i,v,sem)                                                          \
  59({                                                                                      \
  60        __u64 _tmp;                                                                     \
  61        volatile __typeof__(*(v)) *_v = (v);                                            \
  62        /* Can't use a switch () here: gcc isn't always smart enough for that... */     \
  63        if ((i) == -16)                                                                 \
  64                IA64_FETCHADD(_tmp, _v, -16, sizeof(*(v)), sem);                        \
  65        else if ((i) == -8)                                                             \
  66                IA64_FETCHADD(_tmp, _v, -8, sizeof(*(v)), sem);                         \
  67        else if ((i) == -4)                                                             \
  68                IA64_FETCHADD(_tmp, _v, -4, sizeof(*(v)), sem);                         \
  69        else if ((i) == -1)                                                             \
  70                IA64_FETCHADD(_tmp, _v, -1, sizeof(*(v)), sem);                         \
  71        else if ((i) == 1)                                                              \
  72                IA64_FETCHADD(_tmp, _v, 1, sizeof(*(v)), sem);                          \
  73        else if ((i) == 4)                                                              \
  74                IA64_FETCHADD(_tmp, _v, 4, sizeof(*(v)), sem);                          \
  75        else if ((i) == 8)                                                              \
  76                IA64_FETCHADD(_tmp, _v, 8, sizeof(*(v)), sem);                          \
  77        else if ((i) == 16)                                                             \
  78                IA64_FETCHADD(_tmp, _v, 16, sizeof(*(v)), sem);                         \
  79        else                                                                            \
  80                _tmp = __bad_increment_for_ia64_fetch_and_add();                        \
  81        (__typeof__(*(v))) (_tmp);      /* return old value */                          \
  82})
  83
  84#define ia64_fetch_and_add(i,v) (ia64_fetchadd(i, v, rel) + (i)) /* return new value */
  85
  86#endif
  87
  88
  89#ifndef __ASSEMBLY__
  90
  91#define IA64_INTRINSIC_API(name)        ia64_native_ ## name
  92#define IA64_INTRINSIC_MACRO(name)      ia64_native_ ## name
  93
  94
  95/************************************************/
  96/* Instructions paravirtualized for correctness */
  97/************************************************/
  98/* fc, thash, get_cpuid, get_pmd, get_eflags, set_eflags */
  99/* Note that "ttag" and "cover" are also privilege-sensitive; "ttag"
 100 * is not currently used (though it may be in a long-format VHPT system!)
 101 */
 102#define ia64_fc                         IA64_INTRINSIC_API(fc)
 103#define ia64_thash                      IA64_INTRINSIC_API(thash)
 104#define ia64_get_cpuid                  IA64_INTRINSIC_API(get_cpuid)
 105#define ia64_get_pmd                    IA64_INTRINSIC_API(get_pmd)
 106
 107
 108/************************************************/
 109/* Instructions paravirtualized for performance */
 110/************************************************/
 111#define ia64_ssm                        IA64_INTRINSIC_MACRO(ssm)
 112#define ia64_rsm                        IA64_INTRINSIC_MACRO(rsm)
 113#define ia64_getreg                     IA64_INTRINSIC_MACRO(getreg)
 114#define ia64_setreg                     IA64_INTRINSIC_API(setreg)
 115#define ia64_set_rr                     IA64_INTRINSIC_API(set_rr)
 116#define ia64_get_rr                     IA64_INTRINSIC_API(get_rr)
 117#define ia64_ptcga                      IA64_INTRINSIC_API(ptcga)
 118#define ia64_get_psr_i                  IA64_INTRINSIC_API(get_psr_i)
 119#define ia64_intrin_local_irq_restore   \
 120        IA64_INTRINSIC_API(intrin_local_irq_restore)
 121#define ia64_set_rr0_to_rr4             IA64_INTRINSIC_API(set_rr0_to_rr4)
 122
 123#endif /* !__ASSEMBLY__ */
 124
 125#endif /* _UAPI_ASM_IA64_INTRINSICS_H */
 126