uboot/arch/xtensa/include/asm/asmmacro.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0+ */
   2/*
   3 * Copyright (C) 2005 - 2013 Tensilica Inc.
   4 * Copyright (C) 2014 - 2016 Cadence Design Systems Inc.
   5 */
   6
   7#ifndef _XTENSA_ASMMACRO_H
   8#define _XTENSA_ASMMACRO_H
   9
  10#include <asm/arch/core.h>
  11
  12/*
  13 * Function entry and return macros for supported ABIs.
  14 */
  15
  16#if defined(__XTENSA_WINDOWED_ABI__)
  17#define abi_entry       entry   sp, 16
  18#define abi_ret         retw
  19#elif defined(__XTENSA_CALL0_ABI__)
  20#define abi_entry
  21#define abi_ret         ret
  22#else
  23#error Unsupported Xtensa ABI
  24#endif
  25
  26/*
  27 * Some little helpers for loops. Use zero-overhead-loops
  28 * where applicable and if supported by the processor.
  29 *
  30 * __loopi ar, at, size, inc
  31 *         ar   register initialized with the start address
  32 *         at   scratch register used by macro
  33 *         size size immediate value
  34 *         inc  increment
  35 *
  36 * __loops ar, as, at, inc_log2[, mask_log2][, cond][, ncond]
  37 *         ar   register initialized with the start address
  38 *         as   register initialized with the size
  39 *         at   scratch register use by macro
  40 *         inc_log2     increment [in log2]
  41 *         mask_log2    mask [in log2]
  42 *         cond         true condition (used in loop'cond')
  43 *         ncond        false condition (used in b'ncond')
  44 *
  45 * __loop  as
  46 *         restart loop. 'as' register must not have been modified!
  47 *
  48 * __endla ar, as, incr
  49 *         ar   start address (modified)
  50 *         as   scratch register used by __loops/__loopi macros or
  51 *              end address used by __loopt macro
  52 *         inc  increment
  53 */
  54
  55#if XCHAL_HAVE_LOOPS
  56
  57.macro  __loopi ar, at, size, incr
  58        movi    \at, ((\size + \incr - 1) / (\incr))
  59        loop    \at, 99f
  60.endm
  61
  62
  63.macro  __loops ar, as, at, incr_log2, mask_log2, cond, ncond
  64        .ifgt \incr_log2 - 1
  65                addi    \at, \as, (1 << \incr_log2) - 1
  66                .ifnc \mask_log2,
  67                        extui   \at, \at, \incr_log2, \mask_log2
  68                .else
  69                        srli    \at, \at, \incr_log2
  70                .endif
  71        .endif
  72        loop\cond       \at, 99f
  73.endm
  74
  75
  76.macro  __loopt ar, as, at, incr_log2
  77        sub     \at, \as, \ar
  78        .ifgt   \incr_log2 - 1
  79                addi    \at, \at, (1 << \incr_log2) - 1
  80                srli    \at, \at, \incr_log2
  81        .endif
  82        loop    \at, 99f
  83.endm
  84
  85
  86.macro  __loop  as
  87        loop    \as, 99f
  88.endm
  89
  90
  91.macro  __endl  ar, as
  9299:
  93.endm
  94
  95
  96#else
  97
  98.macro  __loopi ar, at, size, incr
  99        movi    \at, ((\size + \incr - 1) / (\incr))
 100        addi    \at, \ar, \size
 10198:
 102.endm
 103
 104
 105.macro  __loops ar, as, at, incr_log2, mask_log2, cond, ncond
 106        .ifnc \mask_log2,
 107                extui   \at, \as, \incr_log2, \mask_log2
 108        .else
 109                .ifnc \ncond,
 110                        srli    \at, \as, \incr_log2
 111                .endif
 112        .endif
 113        .ifnc \ncond,
 114                b\ncond \at, 99f
 115
 116        .endif
 117        .ifnc \mask_log2,
 118                slli    \at, \at, \incr_log2
 119                add     \at, \ar, \at
 120        .else
 121                add     \at, \ar, \as
 122        .endif
 12398:
 124.endm
 125
 126.macro  __loopt ar, as, at, incr_log2
 12798:
 128.endm
 129
 130
 131.macro  __loop  as
 13298:
 133.endm
 134
 135
 136.macro  __endl  ar, as
 137        bltu    \ar, \as, 98b
 13899:
 139.endm
 140
 141
 142#endif
 143
 144
 145.macro  __endla ar, as, incr
 146        addi    \ar, \ar, \incr
 147        __endl  \ar \as
 148.endm
 149
 150
 151#endif /* _XTENSA_ASMMACRO_H */
 152