linux/arch/sh/include/asm/bitops-llsc.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef __ASM_SH_BITOPS_LLSC_H
   3#define __ASM_SH_BITOPS_LLSC_H
   4
   5static inline void set_bit(int nr, volatile void *addr)
   6{
   7        int     mask;
   8        volatile unsigned int *a = addr;
   9        unsigned long tmp;
  10
  11        a += nr >> 5;
  12        mask = 1 << (nr & 0x1f);
  13
  14        __asm__ __volatile__ (
  15                "1:                                             \n\t"
  16                "movli.l        @%1, %0 ! set_bit               \n\t"
  17                "or             %2, %0                          \n\t"
  18                "movco.l        %0, @%1                         \n\t"
  19                "bf             1b                              \n\t"
  20                : "=&z" (tmp)
  21                : "r" (a), "r" (mask)
  22                : "t", "memory"
  23        );
  24}
  25
  26static inline void clear_bit(int nr, volatile void *addr)
  27{
  28        int     mask;
  29        volatile unsigned int *a = addr;
  30        unsigned long tmp;
  31
  32        a += nr >> 5;
  33        mask = 1 << (nr & 0x1f);
  34
  35        __asm__ __volatile__ (
  36                "1:                                             \n\t"
  37                "movli.l        @%1, %0 ! clear_bit             \n\t"
  38                "and            %2, %0                          \n\t"
  39                "movco.l        %0, @%1                         \n\t"
  40                "bf             1b                              \n\t"
  41                : "=&z" (tmp)
  42                : "r" (a), "r" (~mask)
  43                : "t", "memory"
  44        );
  45}
  46
  47static inline void change_bit(int nr, volatile void *addr)
  48{
  49        int     mask;
  50        volatile unsigned int *a = addr;
  51        unsigned long tmp;
  52
  53        a += nr >> 5;
  54        mask = 1 << (nr & 0x1f);
  55
  56        __asm__ __volatile__ (
  57                "1:                                             \n\t"
  58                "movli.l        @%1, %0 ! change_bit            \n\t"
  59                "xor            %2, %0                          \n\t"
  60                "movco.l        %0, @%1                         \n\t"
  61                "bf             1b                              \n\t"
  62                : "=&z" (tmp)
  63                : "r" (a), "r" (mask)
  64                : "t", "memory"
  65        );
  66}
  67
  68static inline int test_and_set_bit(int nr, volatile void *addr)
  69{
  70        int     mask, retval;
  71        volatile unsigned int *a = addr;
  72        unsigned long tmp;
  73
  74        a += nr >> 5;
  75        mask = 1 << (nr & 0x1f);
  76
  77        __asm__ __volatile__ (
  78                "1:                                             \n\t"
  79                "movli.l        @%2, %0 ! test_and_set_bit      \n\t"
  80                "mov            %0, %1                          \n\t"
  81                "or             %3, %0                          \n\t"
  82                "movco.l        %0, @%2                         \n\t"
  83                "bf             1b                              \n\t"
  84                "and            %3, %1                          \n\t"
  85                : "=&z" (tmp), "=&r" (retval)
  86                : "r" (a), "r" (mask)
  87                : "t", "memory"
  88        );
  89
  90        return retval != 0;
  91}
  92
  93static inline int test_and_clear_bit(int nr, volatile void *addr)
  94{
  95        int     mask, retval;
  96        volatile unsigned int *a = addr;
  97        unsigned long tmp;
  98
  99        a += nr >> 5;
 100        mask = 1 << (nr & 0x1f);
 101
 102        __asm__ __volatile__ (
 103                "1:                                             \n\t"
 104                "movli.l        @%2, %0 ! test_and_clear_bit    \n\t"
 105                "mov            %0, %1                          \n\t"
 106                "and            %4, %0                          \n\t"
 107                "movco.l        %0, @%2                         \n\t"
 108                "bf             1b                              \n\t"
 109                "and            %3, %1                          \n\t"
 110                "synco                                          \n\t"
 111                : "=&z" (tmp), "=&r" (retval)
 112                : "r" (a), "r" (mask), "r" (~mask)
 113                : "t", "memory"
 114        );
 115
 116        return retval != 0;
 117}
 118
 119static inline int test_and_change_bit(int nr, volatile void *addr)
 120{
 121        int     mask, retval;
 122        volatile unsigned int *a = addr;
 123        unsigned long tmp;
 124
 125        a += nr >> 5;
 126        mask = 1 << (nr & 0x1f);
 127
 128        __asm__ __volatile__ (
 129                "1:                                             \n\t"
 130                "movli.l        @%2, %0 ! test_and_change_bit   \n\t"
 131                "mov            %0, %1                          \n\t"
 132                "xor            %3, %0                          \n\t"
 133                "movco.l        %0, @%2                         \n\t"
 134                "bf             1b                              \n\t"
 135                "and            %3, %1                          \n\t"
 136                "synco                                          \n\t"
 137                : "=&z" (tmp), "=&r" (retval)
 138                : "r" (a), "r" (mask)
 139                : "t", "memory"
 140        );
 141
 142        return retval != 0;
 143}
 144
 145#include <asm-generic/bitops/non-atomic.h>
 146
 147#endif /* __ASM_SH_BITOPS_LLSC_H */
 148