linux/arch/arc/include/asm/bitops.h
<<
>>
Prefs
   1/*
   2 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
   3 *
   4 * This program is free software; you can redistribute it and/or modify
   5 * it under the terms of the GNU General Public License version 2 as
   6 * published by the Free Software Foundation.
   7 */
   8
   9#ifndef _ASM_BITOPS_H
  10#define _ASM_BITOPS_H
  11
  12#ifndef _LINUX_BITOPS_H
  13#error only <linux/bitops.h> can be included directly
  14#endif
  15
  16#ifndef __ASSEMBLY__
  17
  18#include <linux/types.h>
  19#include <linux/compiler.h>
  20#include <asm/barrier.h>
  21#ifndef CONFIG_ARC_HAS_LLSC
  22#include <asm/smp.h>
  23#endif
  24
  25#if defined(CONFIG_ARC_HAS_LLSC)
  26
  27/*
  28 * Hardware assisted Atomic-R-M-W
  29 */
  30
  31#define BIT_OP(op, c_op, asm_op)                                        \
  32static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
  33{                                                                       \
  34        unsigned int temp;                                              \
  35                                                                        \
  36        m += nr >> 5;                                                   \
  37                                                                        \
  38        nr &= 0x1f;                                                     \
  39                                                                        \
  40        __asm__ __volatile__(                                           \
  41        "1:     llock       %0, [%1]            \n"                     \
  42        "       " #asm_op " %0, %0, %2  \n"                             \
  43        "       scond       %0, [%1]            \n"                     \
  44        "       bnz         1b                  \n"                     \
  45        : "=&r"(temp)   /* Early clobber, to prevent reg reuse */       \
  46        : "r"(m),       /* Not "m": llock only supports reg direct addr mode */ \
  47          "ir"(nr)                                                      \
  48        : "cc");                                                        \
  49}
  50
  51/*
  52 * Semantically:
  53 *    Test the bit
  54 *    if clear
  55 *        set it and return 0 (old value)
  56 *    else
  57 *        return 1 (old value).
  58 *
  59 * Since ARC lacks a equivalent h/w primitive, the bit is set unconditionally
  60 * and the old value of bit is returned
  61 */
  62#define TEST_N_BIT_OP(op, c_op, asm_op)                                 \
  63static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
  64{                                                                       \
  65        unsigned long old, temp;                                        \
  66                                                                        \
  67        m += nr >> 5;                                                   \
  68                                                                        \
  69        nr &= 0x1f;                                                     \
  70                                                                        \
  71        /*                                                              \
  72         * Explicit full memory barrier needed before/after as          \
  73         * LLOCK/SCOND themselves don't provide any such smenatic       \
  74         */                                                             \
  75        smp_mb();                                                       \
  76                                                                        \
  77        __asm__ __volatile__(                                           \
  78        "1:     llock       %0, [%2]    \n"                             \
  79        "       " #asm_op " %1, %0, %3  \n"                             \
  80        "       scond       %1, [%2]    \n"                             \
  81        "       bnz         1b          \n"                             \
  82        : "=&r"(old), "=&r"(temp)                                       \
  83        : "r"(m), "ir"(nr)                                              \
  84        : "cc");                                                        \
  85                                                                        \
  86        smp_mb();                                                       \
  87                                                                        \
  88        return (old & (1 << nr)) != 0;                                  \
  89}
  90
  91#else   /* !CONFIG_ARC_HAS_LLSC */
  92
  93/*
  94 * Non hardware assisted Atomic-R-M-W
  95 * Locking would change to irq-disabling only (UP) and spinlocks (SMP)
  96 *
  97 * There's "significant" micro-optimization in writing our own variants of
  98 * bitops (over generic variants)
  99 *
 100 * (1) The generic APIs have "signed" @nr while we have it "unsigned"
 101 *     This avoids extra code to be generated for pointer arithmatic, since
 102 *     is "not sure" that index is NOT -ve
 103 * (2) Utilize the fact that ARCompact bit fidding insn (BSET/BCLR/ASL) etc
 104 *     only consider bottom 5 bits of @nr, so NO need to mask them off.
 105 *     (GCC Quirk: however for constant @nr we still need to do the masking
 106 *             at compile time)
 107 */
 108
 109#define BIT_OP(op, c_op, asm_op)                                        \
 110static inline void op##_bit(unsigned long nr, volatile unsigned long *m)\
 111{                                                                       \
 112        unsigned long temp, flags;                                      \
 113        m += nr >> 5;                                                   \
 114                                                                        \
 115        /*                                                              \
 116         * spin lock/unlock provide the needed smp_mb() before/after    \
 117         */                                                             \
 118        bitops_lock(flags);                                             \
 119                                                                        \
 120        temp = *m;                                                      \
 121        *m = temp c_op (1UL << (nr & 0x1f));                                    \
 122                                                                        \
 123        bitops_unlock(flags);                                           \
 124}
 125
 126#define TEST_N_BIT_OP(op, c_op, asm_op)                                 \
 127static inline int test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
 128{                                                                       \
 129        unsigned long old, flags;                                       \
 130        m += nr >> 5;                                                   \
 131                                                                        \
 132        bitops_lock(flags);                                             \
 133                                                                        \
 134        old = *m;                                                       \
 135        *m = old c_op (1UL << (nr & 0x1f));                             \
 136                                                                        \
 137        bitops_unlock(flags);                                           \
 138                                                                        \
 139        return (old & (1UL << (nr & 0x1f))) != 0;                       \
 140}
 141
 142#endif /* CONFIG_ARC_HAS_LLSC */
 143
 144/***************************************
 145 * Non atomic variants
 146 **************************************/
 147
 148#define __BIT_OP(op, c_op, asm_op)                                      \
 149static inline void __##op##_bit(unsigned long nr, volatile unsigned long *m)    \
 150{                                                                       \
 151        unsigned long temp;                                             \
 152        m += nr >> 5;                                                   \
 153                                                                        \
 154        temp = *m;                                                      \
 155        *m = temp c_op (1UL << (nr & 0x1f));                            \
 156}
 157
 158#define __TEST_N_BIT_OP(op, c_op, asm_op)                               \
 159static inline int __test_and_##op##_bit(unsigned long nr, volatile unsigned long *m)\
 160{                                                                       \
 161        unsigned long old;                                              \
 162        m += nr >> 5;                                                   \
 163                                                                        \
 164        old = *m;                                                       \
 165        *m = old c_op (1UL << (nr & 0x1f));                             \
 166                                                                        \
 167        return (old & (1UL << (nr & 0x1f))) != 0;                       \
 168}
 169
 170#define BIT_OPS(op, c_op, asm_op)                                       \
 171                                                                        \
 172        /* set_bit(), clear_bit(), change_bit() */                      \
 173        BIT_OP(op, c_op, asm_op)                                        \
 174                                                                        \
 175        /* test_and_set_bit(), test_and_clear_bit(), test_and_change_bit() */\
 176        TEST_N_BIT_OP(op, c_op, asm_op)                                 \
 177                                                                        \
 178        /* __set_bit(), __clear_bit(), __change_bit() */                \
 179        __BIT_OP(op, c_op, asm_op)                                      \
 180                                                                        \
 181        /* __test_and_set_bit(), __test_and_clear_bit(), __test_and_change_bit() */\
 182        __TEST_N_BIT_OP(op, c_op, asm_op)
 183
 184BIT_OPS(set, |, bset)
 185BIT_OPS(clear, & ~, bclr)
 186BIT_OPS(change, ^, bxor)
 187
 188/*
 189 * This routine doesn't need to be atomic.
 190 */
 191static inline int
 192test_bit(unsigned int nr, const volatile unsigned long *addr)
 193{
 194        unsigned long mask;
 195
 196        addr += nr >> 5;
 197
 198        mask = 1UL << (nr & 0x1f);
 199
 200        return ((mask & *addr) != 0);
 201}
 202
 203#ifdef CONFIG_ISA_ARCOMPACT
 204
 205/*
 206 * Count the number of zeros, starting from MSB
 207 * Helper for fls( ) friends
 208 * This is a pure count, so (1-32) or (0-31) doesn't apply
 209 * It could be 0 to 32, based on num of 0's in there
 210 * clz(0x8000_0000) = 0, clz(0xFFFF_FFFF)=0, clz(0) = 32, clz(1) = 31
 211 */
 212static inline __attribute__ ((const)) int clz(unsigned int x)
 213{
 214        unsigned int res;
 215
 216        __asm__ __volatile__(
 217        "       norm.f  %0, %1          \n"
 218        "       mov.n   %0, 0           \n"
 219        "       add.p   %0, %0, 1       \n"
 220        : "=r"(res)
 221        : "r"(x)
 222        : "cc");
 223
 224        return res;
 225}
 226
 227static inline int constant_fls(int x)
 228{
 229        int r = 32;
 230
 231        if (!x)
 232                return 0;
 233        if (!(x & 0xffff0000u)) {
 234                x <<= 16;
 235                r -= 16;
 236        }
 237        if (!(x & 0xff000000u)) {
 238                x <<= 8;
 239                r -= 8;
 240        }
 241        if (!(x & 0xf0000000u)) {
 242                x <<= 4;
 243                r -= 4;
 244        }
 245        if (!(x & 0xc0000000u)) {
 246                x <<= 2;
 247                r -= 2;
 248        }
 249        if (!(x & 0x80000000u)) {
 250                x <<= 1;
 251                r -= 1;
 252        }
 253        return r;
 254}
 255
 256/*
 257 * fls = Find Last Set in word
 258 * @result: [1-32]
 259 * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
 260 */
 261static inline __attribute__ ((const)) int fls(unsigned long x)
 262{
 263        if (__builtin_constant_p(x))
 264               return constant_fls(x);
 265
 266        return 32 - clz(x);
 267}
 268
 269/*
 270 * __fls: Similar to fls, but zero based (0-31)
 271 */
 272static inline __attribute__ ((const)) int __fls(unsigned long x)
 273{
 274        if (!x)
 275                return 0;
 276        else
 277                return fls(x) - 1;
 278}
 279
 280/*
 281 * ffs = Find First Set in word (LSB to MSB)
 282 * @result: [1-32], 0 if all 0's
 283 */
 284#define ffs(x)  ({ unsigned long __t = (x); fls(__t & -__t); })
 285
 286/*
 287 * __ffs: Similar to ffs, but zero based (0-31)
 288 */
 289static inline __attribute__ ((const)) int __ffs(unsigned long word)
 290{
 291        if (!word)
 292                return word;
 293
 294        return ffs(word) - 1;
 295}
 296
 297#else   /* CONFIG_ISA_ARCV2 */
 298
 299/*
 300 * fls = Find Last Set in word
 301 * @result: [1-32]
 302 * fls(1) = 1, fls(0x80000000) = 32, fls(0) = 0
 303 */
 304static inline __attribute__ ((const)) int fls(unsigned long x)
 305{
 306        int n;
 307
 308        asm volatile(
 309        "       fls.f   %0, %1          \n"  /* 0:31; 0(Z) if src 0 */
 310        "       add.nz  %0, %0, 1       \n"  /* 0:31 -> 1:32 */
 311        : "=r"(n)       /* Early clobber not needed */
 312        : "r"(x)
 313        : "cc");
 314
 315        return n;
 316}
 317
 318/*
 319 * __fls: Similar to fls, but zero based (0-31). Also 0 if no bit set
 320 */
 321static inline __attribute__ ((const)) int __fls(unsigned long x)
 322{
 323        /* FLS insn has exactly same semantics as the API */
 324        return  __builtin_arc_fls(x);
 325}
 326
 327/*
 328 * ffs = Find First Set in word (LSB to MSB)
 329 * @result: [1-32], 0 if all 0's
 330 */
 331static inline __attribute__ ((const)) int ffs(unsigned long x)
 332{
 333        int n;
 334
 335        asm volatile(
 336        "       ffs.f   %0, %1          \n"  /* 0:31; 31(Z) if src 0 */
 337        "       add.nz  %0, %0, 1       \n"  /* 0:31 -> 1:32 */
 338        "       mov.z   %0, 0           \n"  /* 31(Z)-> 0 */
 339        : "=r"(n)       /* Early clobber not needed */
 340        : "r"(x)
 341        : "cc");
 342
 343        return n;
 344}
 345
 346/*
 347 * __ffs: Similar to ffs, but zero based (0-31)
 348 */
 349static inline __attribute__ ((const)) int __ffs(unsigned long x)
 350{
 351        int n;
 352
 353        asm volatile(
 354        "       ffs.f   %0, %1          \n"  /* 0:31; 31(Z) if src 0 */
 355        "       mov.z   %0, 0           \n"  /* 31(Z)-> 0 */
 356        : "=r"(n)
 357        : "r"(x)
 358        : "cc");
 359
 360        return n;
 361
 362}
 363
 364#endif  /* CONFIG_ISA_ARCOMPACT */
 365
 366/*
 367 * ffz = Find First Zero in word.
 368 * @return:[0-31], 32 if all 1's
 369 */
 370#define ffz(x)  __ffs(~(x))
 371
 372#include <asm-generic/bitops/hweight.h>
 373#include <asm-generic/bitops/fls64.h>
 374#include <asm-generic/bitops/sched.h>
 375#include <asm-generic/bitops/lock.h>
 376
 377#include <asm-generic/bitops/find.h>
 378#include <asm-generic/bitops/le.h>
 379#include <asm-generic/bitops/ext2-atomic-setbit.h>
 380
 381#endif /* !__ASSEMBLY__ */
 382
 383#endif
 384