uboot/arch/mips/include/asm/system.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2/*
   3 * Copyright (C) 1994 - 1999 by Ralf Baechle
   4 * Copyright (C) 1996 by Paul M. Antoine
   5 * Copyright (C) 1994 - 1999 by Ralf Baechle
   6 *
   7 * Changed set_except_vector declaration to allow return of previous
   8 * vector address value - necessary for "borrowing" vectors.
   9 *
  10 * Kevin D. Kissell, kevink@mips.org and Carsten Langgaard, carstenl@mips.com
  11 * Copyright (C) 2000 MIPS Technologies, Inc.
  12 */
  13#ifndef _ASM_SYSTEM_H
  14#define _ASM_SYSTEM_H
  15
  16#include <asm/asm.h>
  17#include <asm/sgidefs.h>
  18#include <asm/ptrace.h>
  19#include <linux/stringify.h>
  20#if 0
  21#include <linux/kernel.h>
  22#endif
  23
  24static __inline__ void
  25__sti(void)
  26{
  27        __asm__ __volatile__(
  28                ".set\tpush\n\t"
  29                ".set\treorder\n\t"
  30                ".set\tnoat\n\t"
  31                "mfc0\t$1,$12\n\t"
  32                "ori\t$1,0x1f\n\t"
  33                "xori\t$1,0x1e\n\t"
  34                "mtc0\t$1,$12\n\t"
  35                ".set\tpop\n\t"
  36                : /* no outputs */
  37                : /* no inputs */
  38                : "$1", "memory");
  39}
  40
  41/*
  42 * For cli() we have to insert nops to make shure that the new value
  43 * has actually arrived in the status register before the end of this
  44 * macro.
  45 * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
  46 * no nops at all.
  47 */
  48static __inline__ void
  49__cli(void)
  50{
  51        __asm__ __volatile__(
  52                ".set\tpush\n\t"
  53                ".set\treorder\n\t"
  54                ".set\tnoat\n\t"
  55                "mfc0\t$1,$12\n\t"
  56                "ori\t$1,1\n\t"
  57                "xori\t$1,1\n\t"
  58                ".set\tnoreorder\n\t"
  59                "mtc0\t$1,$12\n\t"
  60                "nop\n\t"
  61                "nop\n\t"
  62                "nop\n\t"
  63                ".set\tpop\n\t"
  64                : /* no outputs */
  65                : /* no inputs */
  66                : "$1", "memory");
  67}
  68
  69#define __save_flags(x)                                                 \
  70__asm__ __volatile__(                                                   \
  71        ".set\tpush\n\t"                                                \
  72        ".set\treorder\n\t"                                             \
  73        "mfc0\t%0,$12\n\t"                                              \
  74        ".set\tpop\n\t"                                                 \
  75        : "=r" (x))
  76
  77#define __save_and_cli(x)                                               \
  78__asm__ __volatile__(                                                   \
  79        ".set\tpush\n\t"                                                \
  80        ".set\treorder\n\t"                                             \
  81        ".set\tnoat\n\t"                                                \
  82        "mfc0\t%0,$12\n\t"                                              \
  83        "ori\t$1,%0,1\n\t"                                              \
  84        "xori\t$1,1\n\t"                                                \
  85        ".set\tnoreorder\n\t"                                           \
  86        "mtc0\t$1,$12\n\t"                                              \
  87        "nop\n\t"                                                       \
  88        "nop\n\t"                                                       \
  89        "nop\n\t"                                                       \
  90        ".set\tpop\n\t"                                                 \
  91        : "=r" (x)                                                      \
  92        : /* no inputs */                                               \
  93        : "$1", "memory")
  94
  95#define __restore_flags(flags)                                          \
  96do {                                                                    \
  97        unsigned long __tmp1;                                           \
  98                                                                        \
  99        __asm__ __volatile__(                                           \
 100                ".set\tnoreorder\t\t\t# __restore_flags\n\t"            \
 101                ".set\tnoat\n\t"                                        \
 102                "mfc0\t$1, $12\n\t"                                     \
 103                "andi\t%0, 1\n\t"                                       \
 104                "ori\t$1, 1\n\t"                                        \
 105                "xori\t$1, 1\n\t"                                       \
 106                "or\t%0, $1\n\t"                                        \
 107                "mtc0\t%0, $12\n\t"                                     \
 108                "nop\n\t"                                               \
 109                "nop\n\t"                                               \
 110                "nop\n\t"                                               \
 111                ".set\tat\n\t"                                          \
 112                ".set\treorder"                                         \
 113                : "=r" (__tmp1)                                         \
 114                : "0" (flags)                                           \
 115                : "$1", "memory");                                      \
 116} while(0)
 117
 118#ifdef CONFIG_SMP
 119
 120extern void __global_sti(void);
 121extern void __global_cli(void);
 122extern unsigned long __global_save_flags(void);
 123extern void __global_restore_flags(unsigned long);
 124#  define sti() __global_sti()
 125#  define cli() __global_cli()
 126#  define save_flags(x) do { x = __global_save_flags(); } while (0)
 127#  define restore_flags(x) __global_restore_flags(x)
 128#  define save_and_cli(x) do { save_flags(x); cli(); } while(0)
 129
 130#else /* Single processor */
 131
 132#  define sti() __sti()
 133#  define cli() __cli()
 134#  define save_flags(x) __save_flags(x)
 135#  define save_and_cli(x) __save_and_cli(x)
 136#  define restore_flags(x) __restore_flags(x)
 137
 138#endif /* SMP */
 139
 140/* For spinlocks etc */
 141#define local_irq_save(x)       __save_and_cli(x);
 142#define local_irq_restore(x)    __restore_flags(x);
 143#define local_irq_disable()     __cli();
 144#define local_irq_enable()      __sti();
 145
 146/*
 147 * These are probably defined overly paranoid ...
 148 */
 149#ifdef CONFIG_CPU_HAS_WB
 150
 151#include <asm/wbflush.h>
 152#define rmb()   do { } while(0)
 153#define wmb()   wbflush()
 154#define mb()    wbflush()
 155
 156#else /* CONFIG_CPU_HAS_WB  */
 157
 158#define mb()                                            \
 159__asm__ __volatile__(                                   \
 160        "# prevent instructions being moved around\n\t" \
 161        ".set\tnoreorder\n\t"                           \
 162        "# 8 nops to fool the R4400 pipeline\n\t"       \
 163        "nop;nop;nop;nop;nop;nop;nop;nop\n\t"           \
 164        ".set\treorder"                                 \
 165        : /* no output */                               \
 166        : /* no input */                                \
 167        : "memory")
 168#define rmb() mb()
 169#define wmb() mb()
 170
 171#endif /* CONFIG_CPU_HAS_WB  */
 172
 173#ifdef CONFIG_SMP
 174#define smp_mb()        mb()
 175#define smp_rmb()       rmb()
 176#define smp_wmb()       wmb()
 177#else
 178#define smp_mb()        barrier()
 179#define smp_rmb()       barrier()
 180#define smp_wmb()       barrier()
 181#endif
 182
 183#define set_mb(var, value) \
 184do { var = value; mb(); } while (0)
 185
 186#define set_wmb(var, value) \
 187do { var = value; wmb(); } while (0)
 188
 189#if !defined (_LANGUAGE_ASSEMBLY)
 190/*
 191 * switch_to(n) should switch tasks to task nr n, first
 192 * checking that n isn't the current task, in which case it does nothing.
 193 */
 194#if 0
 195extern asmlinkage void *resume(void *last, void *next);
 196#endif
 197#endif /* !defined (_LANGUAGE_ASSEMBLY) */
 198
 199#define prepare_to_switch()     do { } while(0)
 200#define switch_to(prev,next,last) \
 201do { \
 202        (last) = resume(prev, next); \
 203} while(0)
 204
 205/*
 206 * For 32 and 64 bit operands we can take advantage of ll and sc.
 207 * FIXME: This doesn't work for R3000 machines.
 208 */
 209static __inline__ unsigned long xchg_u32(volatile int * m, unsigned long val)
 210{
 211#ifdef CONFIG_CPU_HAS_LLSC
 212        unsigned long dummy;
 213
 214        __asm__ __volatile__(
 215                ".set\tnoreorder\t\t\t# xchg_u32\n\t"
 216                ".set\tnoat\n\t"
 217                "ll\t%0, %3\n"
 218                "1:\tmove\t$1, %2\n\t"
 219                "sc\t$1, %1\n\t"
 220                "beqzl\t$1, 1b\n\t"
 221                " ll\t%0, %3\n\t"
 222                ".set\tat\n\t"
 223                ".set\treorder"
 224                : "=r" (val), "=o" (*m), "=r" (dummy)
 225                : "o" (*m), "2" (val)
 226                : "memory");
 227
 228        return val;
 229#else
 230        unsigned long flags, retval;
 231
 232        save_flags(flags);
 233        cli();
 234        retval = *m;
 235        *m = val;
 236        restore_flags(flags);
 237        return retval;
 238#endif /* Processor-dependent optimization */
 239}
 240
 241#define xchg(ptr,x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
 242#define tas(ptr) (xchg((ptr),1))
 243
 244static __inline__ unsigned long
 245__xchg(unsigned long x, volatile void * ptr, int size)
 246{
 247        switch (size) {
 248                case 4:
 249                        return xchg_u32(ptr, x);
 250        }
 251        return x;
 252}
 253
 254extern void *set_except_vector(int n, void *addr);
 255
 256extern void __die(const char *, struct pt_regs *, const char *where,
 257        unsigned long line) __attribute__((noreturn));
 258extern void __die_if_kernel(const char *, struct pt_regs *, const char *where,
 259        unsigned long line);
 260
 261#define die(msg, regs)                                                  \
 262        __die(msg, regs, __FILE__ ":"__FUNCTION__, __LINE__)
 263#define die_if_kernel(msg, regs)                                        \
 264        __die_if_kernel(msg, regs, __FILE__ ":"__FUNCTION__, __LINE__)
 265
 266static inline void execution_hazard_barrier(void)
 267{
 268        __asm__ __volatile__(
 269                ".set noreorder\n"
 270                "ehb\n"
 271                ".set reorder");
 272}
 273
 274static inline void instruction_hazard_barrier(void)
 275{
 276        unsigned long tmp;
 277
 278        asm volatile(
 279        __stringify(PTR_LA) "\t%0, 1f\n"
 280        "       jr.hb   %0\n"
 281        "1:     .insn"
 282        : "=&r"(tmp));
 283}
 284
 285#endif /* _ASM_SYSTEM_H */
 286