uboot/arch/mips/include/asm/system.h
<<
>>
Prefs
   1/*
   2 * Copyright (C) 1994 - 1999 by Ralf Baechle
   3 * Copyright (C) 1996 by Paul M. Antoine
   4 * Copyright (C) 1994 - 1999 by Ralf Baechle
   5 *
   6 * Changed set_except_vector declaration to allow return of previous
   7 * vector address value - necessary for "borrowing" vectors.
   8 *
   9 * Kevin D. Kissell, kevink@mips.org and Carsten Langgaard, carstenl@mips.com
  10 * Copyright (C) 2000 MIPS Technologies, Inc.
  11 *
  12 * SPDX-License-Identifier:     GPL-2.0
  13 */
  14#ifndef _ASM_SYSTEM_H
  15#define _ASM_SYSTEM_H
  16
  17#include <asm/asm.h>
  18#include <asm/sgidefs.h>
  19#include <asm/ptrace.h>
  20#include <linux/stringify.h>
  21#if 0
  22#include <linux/kernel.h>
  23#endif
  24
  25static __inline__ void
  26__sti(void)
  27{
  28        __asm__ __volatile__(
  29                ".set\tpush\n\t"
  30                ".set\treorder\n\t"
  31                ".set\tnoat\n\t"
  32                "mfc0\t$1,$12\n\t"
  33                "ori\t$1,0x1f\n\t"
  34                "xori\t$1,0x1e\n\t"
  35                "mtc0\t$1,$12\n\t"
  36                ".set\tpop\n\t"
  37                : /* no outputs */
  38                : /* no inputs */
  39                : "$1", "memory");
  40}
  41
  42/*
  43 * For cli() we have to insert nops to make shure that the new value
  44 * has actually arrived in the status register before the end of this
  45 * macro.
  46 * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
  47 * no nops at all.
  48 */
  49static __inline__ void
  50__cli(void)
  51{
  52        __asm__ __volatile__(
  53                ".set\tpush\n\t"
  54                ".set\treorder\n\t"
  55                ".set\tnoat\n\t"
  56                "mfc0\t$1,$12\n\t"
  57                "ori\t$1,1\n\t"
  58                "xori\t$1,1\n\t"
  59                ".set\tnoreorder\n\t"
  60                "mtc0\t$1,$12\n\t"
  61                "nop\n\t"
  62                "nop\n\t"
  63                "nop\n\t"
  64                ".set\tpop\n\t"
  65                : /* no outputs */
  66                : /* no inputs */
  67                : "$1", "memory");
  68}
  69
  70#define __save_flags(x)                                                 \
  71__asm__ __volatile__(                                                   \
  72        ".set\tpush\n\t"                                                \
  73        ".set\treorder\n\t"                                             \
  74        "mfc0\t%0,$12\n\t"                                              \
  75        ".set\tpop\n\t"                                                 \
  76        : "=r" (x))
  77
  78#define __save_and_cli(x)                                               \
  79__asm__ __volatile__(                                                   \
  80        ".set\tpush\n\t"                                                \
  81        ".set\treorder\n\t"                                             \
  82        ".set\tnoat\n\t"                                                \
  83        "mfc0\t%0,$12\n\t"                                              \
  84        "ori\t$1,%0,1\n\t"                                              \
  85        "xori\t$1,1\n\t"                                                \
  86        ".set\tnoreorder\n\t"                                           \
  87        "mtc0\t$1,$12\n\t"                                              \
  88        "nop\n\t"                                                       \
  89        "nop\n\t"                                                       \
  90        "nop\n\t"                                                       \
  91        ".set\tpop\n\t"                                                 \
  92        : "=r" (x)                                                      \
  93        : /* no inputs */                                               \
  94        : "$1", "memory")
  95
  96#define __restore_flags(flags)                                          \
  97do {                                                                    \
  98        unsigned long __tmp1;                                           \
  99                                                                        \
 100        __asm__ __volatile__(                                           \
 101                ".set\tnoreorder\t\t\t# __restore_flags\n\t"            \
 102                ".set\tnoat\n\t"                                        \
 103                "mfc0\t$1, $12\n\t"                                     \
 104                "andi\t%0, 1\n\t"                                       \
 105                "ori\t$1, 1\n\t"                                        \
 106                "xori\t$1, 1\n\t"                                       \
 107                "or\t%0, $1\n\t"                                        \
 108                "mtc0\t%0, $12\n\t"                                     \
 109                "nop\n\t"                                               \
 110                "nop\n\t"                                               \
 111                "nop\n\t"                                               \
 112                ".set\tat\n\t"                                          \
 113                ".set\treorder"                                         \
 114                : "=r" (__tmp1)                                         \
 115                : "0" (flags)                                           \
 116                : "$1", "memory");                                      \
 117} while(0)
 118
 119#ifdef CONFIG_SMP
 120
 121extern void __global_sti(void);
 122extern void __global_cli(void);
 123extern unsigned long __global_save_flags(void);
 124extern void __global_restore_flags(unsigned long);
 125#  define sti() __global_sti()
 126#  define cli() __global_cli()
 127#  define save_flags(x) do { x = __global_save_flags(); } while (0)
 128#  define restore_flags(x) __global_restore_flags(x)
 129#  define save_and_cli(x) do { save_flags(x); cli(); } while(0)
 130
 131#else /* Single processor */
 132
 133#  define sti() __sti()
 134#  define cli() __cli()
 135#  define save_flags(x) __save_flags(x)
 136#  define save_and_cli(x) __save_and_cli(x)
 137#  define restore_flags(x) __restore_flags(x)
 138
 139#endif /* SMP */
 140
 141/* For spinlocks etc */
 142#define local_irq_save(x)       __save_and_cli(x);
 143#define local_irq_restore(x)    __restore_flags(x);
 144#define local_irq_disable()     __cli();
 145#define local_irq_enable()      __sti();
 146
 147/*
 148 * These are probably defined overly paranoid ...
 149 */
 150#ifdef CONFIG_CPU_HAS_WB
 151
 152#include <asm/wbflush.h>
 153#define rmb()   do { } while(0)
 154#define wmb()   wbflush()
 155#define mb()    wbflush()
 156
 157#else /* CONFIG_CPU_HAS_WB  */
 158
 159#define mb()                                            \
 160__asm__ __volatile__(                                   \
 161        "# prevent instructions being moved around\n\t" \
 162        ".set\tnoreorder\n\t"                           \
 163        "# 8 nops to fool the R4400 pipeline\n\t"       \
 164        "nop;nop;nop;nop;nop;nop;nop;nop\n\t"           \
 165        ".set\treorder"                                 \
 166        : /* no output */                               \
 167        : /* no input */                                \
 168        : "memory")
 169#define rmb() mb()
 170#define wmb() mb()
 171
 172#endif /* CONFIG_CPU_HAS_WB  */
 173
 174#ifdef CONFIG_SMP
 175#define smp_mb()        mb()
 176#define smp_rmb()       rmb()
 177#define smp_wmb()       wmb()
 178#else
 179#define smp_mb()        barrier()
 180#define smp_rmb()       barrier()
 181#define smp_wmb()       barrier()
 182#endif
 183
 184#define set_mb(var, value) \
 185do { var = value; mb(); } while (0)
 186
 187#define set_wmb(var, value) \
 188do { var = value; wmb(); } while (0)
 189
 190#if !defined (_LANGUAGE_ASSEMBLY)
 191/*
 192 * switch_to(n) should switch tasks to task nr n, first
 193 * checking that n isn't the current task, in which case it does nothing.
 194 */
 195#if 0
 196extern asmlinkage void *resume(void *last, void *next);
 197#endif
 198#endif /* !defined (_LANGUAGE_ASSEMBLY) */
 199
 200#define prepare_to_switch()     do { } while(0)
 201#define switch_to(prev,next,last) \
 202do { \
 203        (last) = resume(prev, next); \
 204} while(0)
 205
 206/*
 207 * For 32 and 64 bit operands we can take advantage of ll and sc.
 208 * FIXME: This doesn't work for R3000 machines.
 209 */
 210static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
 211{
 212#ifdef CONFIG_CPU_HAS_LLSC
 213        unsigned long dummy;
 214
 215        __asm__ __volatile__(
 216                ".set\tnoreorder\t\t\t# xchg_u32\n\t"
 217                ".set\tnoat\n\t"
 218                "ll\t%0, %3\n"
 219                "1:\tmove\t$1, %2\n\t"
 220                "sc\t$1, %1\n\t"
 221                "beqzl\t$1, 1b\n\t"
 222                " ll\t%0, %3\n\t"
 223                ".set\tat\n\t"
 224                ".set\treorder"
 225                : "=r" (val), "=o" (*m), "=r" (dummy)
 226                : "o" (*m), "2" (val)
 227                : "memory");
 228
 229        return val;
 230#else
 231        unsigned long flags, retval;
 232
 233        save_flags(flags);
 234        cli();
 235        retval = *m;
 236        *m = val;
 237        restore_flags(flags);
 238        return retval;
 239#endif /* Processor-dependent optimization */
 240}
 241
 242#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
 243#define tas(ptr) (xchg((ptr),1))
 244
 245static __inline__ unsigned long
 246__xchg(unsigned long x, volatile void * ptr, int size)
 247{
 248        switch (size) {
 249                case 4:
 250                        return xchg_u32(ptr, x);
 251        }
 252        return x;
 253}
 254
 255extern void *set_except_vector(int n, void *addr);
 256
 257extern void __die(const char *, struct pt_regs *, const char *where,
 258        unsigned long line) __attribute__((noreturn));
 259extern void __die_if_kernel(const char *, struct pt_regs *, const char *where,
 260        unsigned long line);
 261
 262#define die(msg, regs)                                                  \
 263        __die(msg, regs, __FILE__ ":"__FUNCTION__, __LINE__)
 264#define die_if_kernel(msg, regs)                                        \
 265        __die_if_kernel(msg, regs, __FILE__ ":"__FUNCTION__, __LINE__)
 266
 267static inline void execution_hazard_barrier(void)
 268{
 269        __asm__ __volatile__(
 270                ".set noreorder\n"
 271                "ehb\n"
 272                ".set reorder");
 273}
 274
 275static inline void instruction_hazard_barrier(void)
 276{
 277        unsigned long tmp;
 278
 279        asm volatile(
 280        __stringify(PTR_LA) "\t%0, 1f\n"
 281        "       jr.hb   %0\n"
 282        "1:     .insn"
 283        : "=&r"(tmp));
 284}
 285
 286#endif /* _ASM_SYSTEM_H */
 287