linux/arch/riscv/include/asm/bitops.h
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2012 Regents of the University of California
   3 *
   4 *   This program is free software; you can redistribute it and/or
   5 *   modify it under the terms of the GNU General Public License
   6 *   as published by the Free Software Foundation, version 2.
   7 *
   8 *   This program is distributed in the hope that it will be useful,
   9 *   but WITHOUT ANY WARRANTY; without even the implied warranty of
  10 *   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
  11 *   GNU General Public License for more details.
  12 */
  13
  14#ifndef _ASM_RISCV_BITOPS_H
  15#define _ASM_RISCV_BITOPS_H
  16
  17#ifndef _LINUX_BITOPS_H
  18#error "Only <linux/bitops.h> can be included directly"
  19#endif /* _LINUX_BITOPS_H */
  20
  21#include <linux/compiler.h>
  22#include <linux/irqflags.h>
  23#include <asm/barrier.h>
  24#include <asm/bitsperlong.h>
  25
  26#ifndef smp_mb__before_clear_bit
  27#define smp_mb__before_clear_bit()  smp_mb()
  28#define smp_mb__after_clear_bit()   smp_mb()
  29#endif /* smp_mb__before_clear_bit */
  30
  31#include <asm-generic/bitops/__ffs.h>
  32#include <asm-generic/bitops/ffz.h>
  33#include <asm-generic/bitops/fls.h>
  34#include <asm-generic/bitops/__fls.h>
  35#include <asm-generic/bitops/fls64.h>
  36#include <asm-generic/bitops/find.h>
  37#include <asm-generic/bitops/sched.h>
  38#include <asm-generic/bitops/ffs.h>
  39
  40#include <asm-generic/bitops/hweight.h>
  41
  42#if (BITS_PER_LONG == 64)
  43#define __AMO(op)       "amo" #op ".d"
  44#elif (BITS_PER_LONG == 32)
  45#define __AMO(op)       "amo" #op ".w"
  46#else
  47#error "Unexpected BITS_PER_LONG"
  48#endif
  49
  50#define __test_and_op_bit_ord(op, mod, nr, addr, ord)           \
  51({                                                              \
  52        unsigned long __res, __mask;                            \
  53        __mask = BIT_MASK(nr);                                  \
  54        __asm__ __volatile__ (                                  \
  55                __AMO(op) #ord " %0, %2, %1"                    \
  56                : "=r" (__res), "+A" (addr[BIT_WORD(nr)])       \
  57                : "r" (mod(__mask))                             \
  58                : "memory");                                    \
  59        ((__res & __mask) != 0);                                \
  60})
  61
  62#define __op_bit_ord(op, mod, nr, addr, ord)                    \
  63        __asm__ __volatile__ (                                  \
  64                __AMO(op) #ord " zero, %1, %0"                  \
  65                : "+A" (addr[BIT_WORD(nr)])                     \
  66                : "r" (mod(BIT_MASK(nr)))                       \
  67                : "memory");
  68
  69#define __test_and_op_bit(op, mod, nr, addr)                    \
  70        __test_and_op_bit_ord(op, mod, nr, addr, .aqrl)
  71#define __op_bit(op, mod, nr, addr)                             \
  72        __op_bit_ord(op, mod, nr, addr, )
  73
  74/* Bitmask modifiers */
  75#define __NOP(x)        (x)
  76#define __NOT(x)        (~(x))
  77
  78/**
  79 * test_and_set_bit - Set a bit and return its old value
  80 * @nr: Bit to set
  81 * @addr: Address to count from
  82 *
  83 * This operation may be reordered on other architectures than x86.
  84 */
  85static inline int test_and_set_bit(int nr, volatile unsigned long *addr)
  86{
  87        return __test_and_op_bit(or, __NOP, nr, addr);
  88}
  89
  90/**
  91 * test_and_clear_bit - Clear a bit and return its old value
  92 * @nr: Bit to clear
  93 * @addr: Address to count from
  94 *
  95 * This operation can be reordered on other architectures other than x86.
  96 */
  97static inline int test_and_clear_bit(int nr, volatile unsigned long *addr)
  98{
  99        return __test_and_op_bit(and, __NOT, nr, addr);
 100}
 101
 102/**
 103 * test_and_change_bit - Change a bit and return its old value
 104 * @nr: Bit to change
 105 * @addr: Address to count from
 106 *
 107 * This operation is atomic and cannot be reordered.
 108 * It also implies a memory barrier.
 109 */
 110static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
 111{
 112        return __test_and_op_bit(xor, __NOP, nr, addr);
 113}
 114
 115/**
 116 * set_bit - Atomically set a bit in memory
 117 * @nr: the bit to set
 118 * @addr: the address to start counting from
 119 *
 120 * Note: there are no guarantees that this function will not be reordered
 121 * on non x86 architectures, so if you are writing portable code,
 122 * make sure not to rely on its reordering guarantees.
 123 *
 124 * Note that @nr may be almost arbitrarily large; this function is not
 125 * restricted to acting on a single-word quantity.
 126 */
 127static inline void set_bit(int nr, volatile unsigned long *addr)
 128{
 129        __op_bit(or, __NOP, nr, addr);
 130}
 131
 132/**
 133 * clear_bit - Clears a bit in memory
 134 * @nr: Bit to clear
 135 * @addr: Address to start counting from
 136 *
 137 * Note: there are no guarantees that this function will not be reordered
 138 * on non x86 architectures, so if you are writing portable code,
 139 * make sure not to rely on its reordering guarantees.
 140 */
 141static inline void clear_bit(int nr, volatile unsigned long *addr)
 142{
 143        __op_bit(and, __NOT, nr, addr);
 144}
 145
 146/**
 147 * change_bit - Toggle a bit in memory
 148 * @nr: Bit to change
 149 * @addr: Address to start counting from
 150 *
 151 * change_bit()  may be reordered on other architectures than x86.
 152 * Note that @nr may be almost arbitrarily large; this function is not
 153 * restricted to acting on a single-word quantity.
 154 */
 155static inline void change_bit(int nr, volatile unsigned long *addr)
 156{
 157        __op_bit(xor, __NOP, nr, addr);
 158}
 159
 160/**
 161 * test_and_set_bit_lock - Set a bit and return its old value, for lock
 162 * @nr: Bit to set
 163 * @addr: Address to count from
 164 *
 165 * This operation is atomic and provides acquire barrier semantics.
 166 * It can be used to implement bit locks.
 167 */
 168static inline int test_and_set_bit_lock(
 169        unsigned long nr, volatile unsigned long *addr)
 170{
 171        return __test_and_op_bit_ord(or, __NOP, nr, addr, .aq);
 172}
 173
 174/**
 175 * clear_bit_unlock - Clear a bit in memory, for unlock
 176 * @nr: the bit to set
 177 * @addr: the address to start counting from
 178 *
 179 * This operation is atomic and provides release barrier semantics.
 180 */
 181static inline void clear_bit_unlock(
 182        unsigned long nr, volatile unsigned long *addr)
 183{
 184        __op_bit_ord(and, __NOT, nr, addr, .rl);
 185}
 186
 187/**
 188 * __clear_bit_unlock - Clear a bit in memory, for unlock
 189 * @nr: the bit to set
 190 * @addr: the address to start counting from
 191 *
 192 * This operation is like clear_bit_unlock, however it is not atomic.
 193 * It does provide release barrier semantics so it can be used to unlock
 194 * a bit lock, however it would only be used if no other CPU can modify
 195 * any bits in the memory until the lock is released (a good example is
 196 * if the bit lock itself protects access to the other bits in the word).
 197 *
 198 * On RISC-V systems there seems to be no benefit to taking advantage of the
 199 * non-atomic property here: it's a lot more instructions and we still have to
 200 * provide release semantics anyway.
 201 */
 202static inline void __clear_bit_unlock(
 203        unsigned long nr, volatile unsigned long *addr)
 204{
 205        clear_bit_unlock(nr, addr);
 206}
 207
 208#undef __test_and_op_bit
 209#undef __op_bit
 210#undef __NOP
 211#undef __NOT
 212#undef __AMO
 213
 214#include <asm-generic/bitops/non-atomic.h>
 215#include <asm-generic/bitops/le.h>
 216#include <asm-generic/bitops/ext2-atomic.h>
 217
 218#endif /* _ASM_RISCV_BITOPS_H */
 219