linux/arch/powerpc/include/asm/bitops.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-or-later */
   2/*
   3 * PowerPC atomic bit operations.
   4 *
   5 * Merged version by David Gibson <david@gibson.dropbear.id.au>.
   6 * Based on ppc64 versions by: Dave Engebretsen, Todd Inglett, Don
   7 * Reed, Pat McCarthy, Peter Bergner, Anton Blanchard.  They
   8 * originally took it from the ppc32 code.
   9 *
  10 * Within a word, bits are numbered LSB first.  Lot's of places make
  11 * this assumption by directly testing bits with (val & (1<<nr)).
  12 * This can cause confusion for large (> 1 word) bitmaps on a
  13 * big-endian system because, unlike little endian, the number of each
  14 * bit depends on the word size.
  15 *
  16 * The bitop functions are defined to work on unsigned longs, so for a
  17 * ppc64 system the bits end up numbered:
  18 *   |63..............0|127............64|191...........128|255...........192|
  19 * and on ppc32:
  20 *   |31.....0|63....32|95....64|127...96|159..128|191..160|223..192|255..224|
  21 *
  22 * There are a few little-endian macros used mostly for filesystem
  23 * bitmaps, these work on similar bit arrays layouts, but
  24 * byte-oriented:
  25 *   |7...0|15...8|23...16|31...24|39...32|47...40|55...48|63...56|
  26 *
  27 * The main difference is that bit 3-5 (64b) or 3-4 (32b) in the bit
  28 * number field needs to be reversed compared to the big-endian bit
  29 * fields. This can be achieved by XOR with 0x38 (64b) or 0x18 (32b).
  30 */
  31
  32#ifndef _ASM_POWERPC_BITOPS_H
  33#define _ASM_POWERPC_BITOPS_H
  34
  35#ifdef __KERNEL__
  36
  37#ifndef _LINUX_BITOPS_H
  38#error only <linux/bitops.h> can be included directly
  39#endif
  40
  41#include <linux/compiler.h>
  42#include <asm/asm-compat.h>
  43#include <asm/synch.h>
  44
  45/* PPC bit number conversion */
  46#define PPC_BITLSHIFT(be)       (BITS_PER_LONG - 1 - (be))
  47#define PPC_BIT(bit)            (1UL << PPC_BITLSHIFT(bit))
  48#define PPC_BITMASK(bs, be)     ((PPC_BIT(bs) - PPC_BIT(be)) | PPC_BIT(bs))
  49
  50/* Put a PPC bit into a "normal" bit position */
  51#define PPC_BITEXTRACT(bits, ppc_bit, dst_bit)                  \
  52        ((((bits) >> PPC_BITLSHIFT(ppc_bit)) & 1) << (dst_bit))
  53
  54#define PPC_BITLSHIFT32(be)     (32 - 1 - (be))
  55#define PPC_BIT32(bit)          (1UL << PPC_BITLSHIFT32(bit))
  56#define PPC_BITMASK32(bs, be)   ((PPC_BIT32(bs) - PPC_BIT32(be))|PPC_BIT32(bs))
  57
  58#define PPC_BITLSHIFT8(be)      (8 - 1 - (be))
  59#define PPC_BIT8(bit)           (1UL << PPC_BITLSHIFT8(bit))
  60#define PPC_BITMASK8(bs, be)    ((PPC_BIT8(bs) - PPC_BIT8(be))|PPC_BIT8(bs))
  61
  62#include <asm/barrier.h>
  63
  64/* Macro for generating the ***_bits() functions */
  65#define DEFINE_BITOP(fn, op, prefix)            \
  66static inline void fn(unsigned long mask,       \
  67                volatile unsigned long *_p)     \
  68{                                               \
  69        unsigned long old;                      \
  70        unsigned long *p = (unsigned long *)_p; \
  71        __asm__ __volatile__ (                  \
  72        prefix                                  \
  73"1:"    PPC_LLARX "%0,0,%3,0\n"                 \
  74        stringify_in_c(op) "%0,%0,%2\n"         \
  75        PPC_STLCX "%0,0,%3\n"                   \
  76        "bne- 1b\n"                             \
  77        : "=&r" (old), "+m" (*p)                \
  78        : "r" (mask), "r" (p)                   \
  79        : "cc", "memory");                      \
  80}
  81
  82DEFINE_BITOP(set_bits, or, "")
  83DEFINE_BITOP(clear_bits, andc, "")
  84DEFINE_BITOP(clear_bits_unlock, andc, PPC_RELEASE_BARRIER)
  85DEFINE_BITOP(change_bits, xor, "")
  86
  87static inline void arch_set_bit(int nr, volatile unsigned long *addr)
  88{
  89        set_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
  90}
  91
  92static inline void arch_clear_bit(int nr, volatile unsigned long *addr)
  93{
  94        clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
  95}
  96
  97static inline void arch_clear_bit_unlock(int nr, volatile unsigned long *addr)
  98{
  99        clear_bits_unlock(BIT_MASK(nr), addr + BIT_WORD(nr));
 100}
 101
 102static inline void arch_change_bit(int nr, volatile unsigned long *addr)
 103{
 104        change_bits(BIT_MASK(nr), addr + BIT_WORD(nr));
 105}
 106
 107/* Like DEFINE_BITOP(), with changes to the arguments to 'op' and the output
 108 * operands. */
 109#define DEFINE_TESTOP(fn, op, prefix, postfix, eh)      \
 110static inline unsigned long fn(                 \
 111                unsigned long mask,                     \
 112                volatile unsigned long *_p)             \
 113{                                                       \
 114        unsigned long old, t;                           \
 115        unsigned long *p = (unsigned long *)_p;         \
 116        __asm__ __volatile__ (                          \
 117        prefix                                          \
 118"1:"    PPC_LLARX "%0,0,%3,%4\n"                        \
 119        stringify_in_c(op) "%1,%0,%2\n"                 \
 120        PPC_STLCX "%1,0,%3\n"                           \
 121        "bne- 1b\n"                                     \
 122        postfix                                         \
 123        : "=&r" (old), "=&r" (t)                        \
 124        : "r" (mask), "r" (p), "i" (IS_ENABLED(CONFIG_PPC64) ? eh : 0)  \
 125        : "cc", "memory");                              \
 126        return (old & mask);                            \
 127}
 128
 129DEFINE_TESTOP(test_and_set_bits, or, PPC_ATOMIC_ENTRY_BARRIER,
 130              PPC_ATOMIC_EXIT_BARRIER, 0)
 131DEFINE_TESTOP(test_and_set_bits_lock, or, "",
 132              PPC_ACQUIRE_BARRIER, 1)
 133DEFINE_TESTOP(test_and_clear_bits, andc, PPC_ATOMIC_ENTRY_BARRIER,
 134              PPC_ATOMIC_EXIT_BARRIER, 0)
 135DEFINE_TESTOP(test_and_change_bits, xor, PPC_ATOMIC_ENTRY_BARRIER,
 136              PPC_ATOMIC_EXIT_BARRIER, 0)
 137
 138static inline int arch_test_and_set_bit(unsigned long nr,
 139                                        volatile unsigned long *addr)
 140{
 141        return test_and_set_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
 142}
 143
 144static inline int arch_test_and_set_bit_lock(unsigned long nr,
 145                                             volatile unsigned long *addr)
 146{
 147        return test_and_set_bits_lock(BIT_MASK(nr),
 148                                addr + BIT_WORD(nr)) != 0;
 149}
 150
 151static inline int arch_test_and_clear_bit(unsigned long nr,
 152                                          volatile unsigned long *addr)
 153{
 154        return test_and_clear_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
 155}
 156
 157static inline int arch_test_and_change_bit(unsigned long nr,
 158                                           volatile unsigned long *addr)
 159{
 160        return test_and_change_bits(BIT_MASK(nr), addr + BIT_WORD(nr)) != 0;
 161}
 162
 163#ifdef CONFIG_PPC64
 164static inline unsigned long
 165clear_bit_unlock_return_word(int nr, volatile unsigned long *addr)
 166{
 167        unsigned long old, t;
 168        unsigned long *p = (unsigned long *)addr + BIT_WORD(nr);
 169        unsigned long mask = BIT_MASK(nr);
 170
 171        __asm__ __volatile__ (
 172        PPC_RELEASE_BARRIER
 173"1:"    PPC_LLARX "%0,0,%3,0\n"
 174        "andc %1,%0,%2\n"
 175        PPC_STLCX "%1,0,%3\n"
 176        "bne- 1b\n"
 177        : "=&r" (old), "=&r" (t)
 178        : "r" (mask), "r" (p)
 179        : "cc", "memory");
 180
 181        return old;
 182}
 183
 184/*
 185 * This is a special function for mm/filemap.c
 186 * Bit 7 corresponds to PG_waiters.
 187 */
 188#define arch_clear_bit_unlock_is_negative_byte(nr, addr)                \
 189        (clear_bit_unlock_return_word(nr, addr) & BIT_MASK(7))
 190
 191#endif /* CONFIG_PPC64 */
 192
 193#include <asm-generic/bitops/non-atomic.h>
 194
 195static inline void arch___clear_bit_unlock(int nr, volatile unsigned long *addr)
 196{
 197        __asm__ __volatile__(PPC_RELEASE_BARRIER "" ::: "memory");
 198        __clear_bit(nr, addr);
 199}
 200
 201/*
 202 * Return the zero-based bit position (LE, not IBM bit numbering) of
 203 * the most significant 1-bit in a double word.
 204 */
 205#define __ilog2(x)      ilog2(x)
 206
 207#include <asm-generic/bitops/ffz.h>
 208
 209#include <asm-generic/bitops/builtin-__ffs.h>
 210
 211#include <asm-generic/bitops/builtin-ffs.h>
 212
 213/*
 214 * fls: find last (most-significant) bit set.
 215 * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32.
 216 */
 217static inline int fls(unsigned int x)
 218{
 219        int lz;
 220
 221        if (__builtin_constant_p(x))
 222                return x ? 32 - __builtin_clz(x) : 0;
 223        asm("cntlzw %0,%1" : "=r" (lz) : "r" (x));
 224        return 32 - lz;
 225}
 226
 227#include <asm-generic/bitops/builtin-__fls.h>
 228
 229/*
 230 * 64-bit can do this using one cntlzd (count leading zeroes doubleword)
 231 * instruction; for 32-bit we use the generic version, which does two
 232 * 32-bit fls calls.
 233 */
 234#ifdef CONFIG_PPC64
 235static inline int fls64(__u64 x)
 236{
 237        int lz;
 238
 239        if (__builtin_constant_p(x))
 240                return x ? 64 - __builtin_clzll(x) : 0;
 241        asm("cntlzd %0,%1" : "=r" (lz) : "r" (x));
 242        return 64 - lz;
 243}
 244#else
 245#include <asm-generic/bitops/fls64.h>
 246#endif
 247
 248#ifdef CONFIG_PPC64
 249unsigned int __arch_hweight8(unsigned int w);
 250unsigned int __arch_hweight16(unsigned int w);
 251unsigned int __arch_hweight32(unsigned int w);
 252unsigned long __arch_hweight64(__u64 w);
 253#include <asm-generic/bitops/const_hweight.h>
 254#else
 255#include <asm-generic/bitops/hweight.h>
 256#endif
 257
 258#include <asm-generic/bitops/find.h>
 259
 260/* wrappers that deal with KASAN instrumentation */
 261#include <asm-generic/bitops/instrumented-atomic.h>
 262#include <asm-generic/bitops/instrumented-lock.h>
 263
 264/* Little-endian versions */
 265#include <asm-generic/bitops/le.h>
 266
 267/* Bitmap functions for the ext2 filesystem */
 268
 269#include <asm-generic/bitops/ext2-atomic-setbit.h>
 270
 271#include <asm-generic/bitops/sched.h>
 272
 273#endif /* __KERNEL__ */
 274
 275#endif /* _ASM_POWERPC_BITOPS_H */
 276