linux/arch/mips/include/asm/pmc-sierra/msp71xx/msp_regops.h
<<
>>
Prefs
   1/*
   2 * SMP/VPE-safe functions to access "registers" (see note).
   3 *
   4 * NOTES:
   5* - These macros use ll/sc instructions, so it is your responsibility to
   6 * ensure these are available on your platform before including this file.
   7 * - The MIPS32 spec states that ll/sc results are undefined for uncached
   8 * accesses. This means they can't be used on HW registers accessed
   9 * through kseg1. Code which requires these macros for this purpose must
  10 * front-end the registers with cached memory "registers" and have a single
  11 * thread update the actual HW registers.
  12 * - A maximum of 2k of code can be inserted between ll and sc. Every
  13 * memory accesses between the instructions will increase the chance of
  14 * sc failing and having to loop.
  15 * - When using custom_read_reg32/custom_write_reg32 only perform the
  16 * necessary logical operations on the register value in between these
  17 * two calls. All other logic should be performed before the first call.
  18  * - There is a bug on the R10000 chips which has a workaround. If you
  19 * are affected by this bug, make sure to define the symbol 'R10000_LLSC_WAR'
  20 * to be non-zero.  If you are using this header from within linux, you may
  21 * include <asm/war.h> before including this file to have this defined
  22 * appropriately for you.
  23 *
  24 * Copyright 2005-2007 PMC-Sierra, Inc.
  25 *
  26 *  This program is free software; you can redistribute  it and/or modify it
  27 *  under  the terms of  the GNU General  Public License as published by the
  28 *  Free Software Foundation;  either version 2 of the  License, or (at your
  29 *  option) any later version.
  30 *
  31 *  THIS  SOFTWARE  IS PROVIDED   ``AS  IS'' AND   ANY  EXPRESS OR IMPLIED
  32 *  WARRANTIES,   INCLUDING, BUT NOT  LIMITED  TO, THE IMPLIED WARRANTIES OF
  33 *  MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO
  34 *  EVENT  SHALL   THE AUTHOR  BE    LIABLE FOR ANY   DIRECT, INDIRECT,
  35 *  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  36 *  LIMITED   TO, PROCUREMENT OF  SUBSTITUTE GOODS  OR SERVICES; LOSS OF USE,
  37 *  DATA,  OR PROFITS; OR  BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  38 *  THEORY OF LIABILITY, WHETHER IN  CONTRACT, STRICT LIABILITY, OR TORT
  39 *  (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  40 *  THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  41 *
  42 *  You should have received a copy of the  GNU General Public License along
  43 *  with this program; if not, write  to the Free Software Foundation, Inc., 675
  44 *  Mass Ave, Cambridge, MA 02139, USA.
  45 */
  46
  47#ifndef __ASM_REGOPS_H__
  48#define __ASM_REGOPS_H__
  49
  50#include <linux/types.h>
  51
  52#include <asm/war.h>
  53
  54#ifndef R10000_LLSC_WAR
  55#define R10000_LLSC_WAR 0
  56#endif
  57
  58#if R10000_LLSC_WAR == 1
  59#define __beqz  "beqzl  "
  60#else
  61#define __beqz  "beqz   "
  62#endif
  63
  64#ifndef _LINUX_TYPES_H
  65typedef unsigned int u32;
  66#endif
  67
  68/*
  69 * Sets all the masked bits to the corresponding value bits
  70 */
  71static inline void set_value_reg32(volatile u32 *const addr,
  72                                        u32 const mask,
  73                                        u32 const value)
  74{
  75        u32 temp;
  76
  77        __asm__ __volatile__(
  78        "       .set    push                            \n"
  79        "       .set    mips3                           \n"
  80        "1:     ll      %0, %1  # set_value_reg32       \n"
  81        "       and     %0, %2                          \n"
  82        "       or      %0, %3                          \n"
  83        "       sc      %0, %1                          \n"
  84        "       "__beqz"%0, 1b                          \n"
  85        "       nop                                     \n"
  86        "       .set    pop                             \n"
  87        : "=&r" (temp), "=m" (*addr)
  88        : "ir" (~mask), "ir" (value), "m" (*addr));
  89}
  90
  91/*
  92 * Sets all the masked bits to '1'
  93 */
  94static inline void set_reg32(volatile u32 *const addr,
  95                                u32 const mask)
  96{
  97        u32 temp;
  98
  99        __asm__ __volatile__(
 100        "       .set    push                            \n"
 101        "       .set    mips3                           \n"
 102        "1:     ll      %0, %1          # set_reg32     \n"
 103        "       or      %0, %2                          \n"
 104        "       sc      %0, %1                          \n"
 105        "       "__beqz"%0, 1b                          \n"
 106        "       nop                                     \n"
 107        "       .set    pop                             \n"
 108        : "=&r" (temp), "=m" (*addr)
 109        : "ir" (mask), "m" (*addr));
 110}
 111
 112/*
 113 * Sets all the masked bits to '0'
 114 */
 115static inline void clear_reg32(volatile u32 *const addr,
 116                                u32 const mask)
 117{
 118        u32 temp;
 119
 120        __asm__ __volatile__(
 121        "       .set    push                            \n"
 122        "       .set    mips3                           \n"
 123        "1:     ll      %0, %1          # clear_reg32   \n"
 124        "       and     %0, %2                          \n"
 125        "       sc      %0, %1                          \n"
 126        "       "__beqz"%0, 1b                          \n"
 127        "       nop                                     \n"
 128        "       .set    pop                             \n"
 129        : "=&r" (temp), "=m" (*addr)
 130        : "ir" (~mask), "m" (*addr));
 131}
 132
 133/*
 134 * Toggles all masked bits from '0' to '1' and '1' to '0'
 135 */
 136static inline void toggle_reg32(volatile u32 *const addr,
 137                                u32 const mask)
 138{
 139        u32 temp;
 140
 141        __asm__ __volatile__(
 142        "       .set    push                            \n"
 143        "       .set    mips3                           \n"
 144        "1:     ll      %0, %1          # toggle_reg32  \n"
 145        "       xor     %0, %2                          \n"
 146        "       sc      %0, %1                          \n"
 147        "       "__beqz"%0, 1b                          \n"
 148        "       nop                                     \n"
 149        "       .set    pop                             \n"
 150        : "=&r" (temp), "=m" (*addr)
 151        : "ir" (mask), "m" (*addr));
 152}
 153
 154/*
 155 * Read all masked bits others are returned as '0'
 156 */
 157static inline u32 read_reg32(volatile u32 *const addr,
 158                                u32 const mask)
 159{
 160        u32 temp;
 161
 162        __asm__ __volatile__(
 163        "       .set    push                            \n"
 164        "       .set    noreorder                       \n"
 165        "       lw      %0, %1          # read          \n"
 166        "       and     %0, %2          # mask          \n"
 167        "       .set    pop                             \n"
 168        : "=&r" (temp)
 169        : "m" (*addr), "ir" (mask));
 170
 171        return temp;
 172}
 173
 174/*
 175 * blocking_read_reg32 - Read address with blocking load
 176 *
 177 * Uncached writes need to be read back to ensure they reach RAM.
 178 * The returned value must be 'used' to prevent from becoming a
 179 * non-blocking load.
 180 */
 181static inline u32 blocking_read_reg32(volatile u32 *const addr)
 182{
 183        u32 temp;
 184
 185        __asm__ __volatile__(
 186        "       .set    push                            \n"
 187        "       .set    noreorder                       \n"
 188        "       lw      %0, %1          # read          \n"
 189        "       move    %0, %0          # block         \n"
 190        "       .set    pop                             \n"
 191        : "=&r" (temp)
 192        : "m" (*addr));
 193
 194        return temp;
 195}
 196
 197/*
 198 * For special strange cases only:
 199 *
 200 * If you need custom processing within a ll/sc loop, use the following macros
 201 * VERY CAREFULLY:
 202 *
 203 *   u32 tmp;                           <-- Define a variable to hold the data
 204 *
 205 *   custom_read_reg32(address, tmp);   <-- Reads the address and put the value
 206 *                                              in the 'tmp' variable given
 207 *
 208 *      From here on out, you are (basicly) atomic, so don't do anything too
 209 *      fancy!
 210 *      Also, this code may loop if the end of this block fails to write
 211 *      everything back safely due do the other CPU, so do NOT do anything
 212 *      with side-effects!
 213 *
 214 *   custom_write_reg32(address, tmp);  <-- Writes back 'tmp' safely.
 215 */
 216#define custom_read_reg32(address, tmp)                         \
 217        __asm__ __volatile__(                                   \
 218        "       .set    push                            \n"     \
 219        "       .set    mips3                           \n"     \
 220        "1:     ll      %0, %1  #custom_read_reg32      \n"     \
 221        "       .set    pop                             \n"     \
 222        : "=r" (tmp), "=m" (*address)                           \
 223        : "m" (*address))
 224
 225#define custom_write_reg32(address, tmp)                        \
 226        __asm__ __volatile__(                                   \
 227        "       .set    push                            \n"     \
 228        "       .set    mips3                           \n"     \
 229        "       sc      %0, %1  #custom_write_reg32     \n"     \
 230        "       "__beqz"%0, 1b                          \n"     \
 231        "       nop                                     \n"     \
 232        "       .set    pop                             \n"     \
 233        : "=&r" (tmp), "=m" (*address)                          \
 234        : "0" (tmp), "m" (*address))
 235
 236#endif  /* __ASM_REGOPS_H__ */
 237