linux/arch/s390/include/asm/preempt.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0 */
   2#ifndef __ASM_PREEMPT_H
   3#define __ASM_PREEMPT_H
   4
   5#include <asm/current.h>
   6#include <linux/thread_info.h>
   7#include <asm/atomic_ops.h>
   8
   9#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  10
  11/* We use the MSB mostly because its available */
  12#define PREEMPT_NEED_RESCHED    0x80000000
  13#define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
  14
  15static inline int preempt_count(void)
  16{
  17        return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED;
  18}
  19
  20static inline void preempt_count_set(int pc)
  21{
  22        int old, new;
  23
  24        do {
  25                old = READ_ONCE(S390_lowcore.preempt_count);
  26                new = (old & PREEMPT_NEED_RESCHED) |
  27                        (pc & ~PREEMPT_NEED_RESCHED);
  28        } while (__atomic_cmpxchg(&S390_lowcore.preempt_count,
  29                                  old, new) != old);
  30}
  31
  32#define init_task_preempt_count(p)      do { } while (0)
  33
  34#define init_idle_preempt_count(p, cpu) do { \
  35        S390_lowcore.preempt_count = PREEMPT_ENABLED; \
  36} while (0)
  37
  38static inline void set_preempt_need_resched(void)
  39{
  40        __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count);
  41}
  42
  43static inline void clear_preempt_need_resched(void)
  44{
  45        __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count);
  46}
  47
  48static inline bool test_preempt_need_resched(void)
  49{
  50        return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED);
  51}
  52
  53static inline void __preempt_count_add(int val)
  54{
  55        if (__builtin_constant_p(val) && (val >= -128) && (val <= 127))
  56                __atomic_add_const(val, &S390_lowcore.preempt_count);
  57        else
  58                __atomic_add(val, &S390_lowcore.preempt_count);
  59}
  60
  61static inline void __preempt_count_sub(int val)
  62{
  63        __preempt_count_add(-val);
  64}
  65
  66static inline bool __preempt_count_dec_and_test(void)
  67{
  68        return __atomic_add(-1, &S390_lowcore.preempt_count) == 1;
  69}
  70
  71static inline bool should_resched(int preempt_offset)
  72{
  73        return unlikely(READ_ONCE(S390_lowcore.preempt_count) ==
  74                        preempt_offset);
  75}
  76
  77#else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  78
  79#define PREEMPT_ENABLED (0)
  80
  81static inline int preempt_count(void)
  82{
  83        return READ_ONCE(S390_lowcore.preempt_count);
  84}
  85
  86static inline void preempt_count_set(int pc)
  87{
  88        S390_lowcore.preempt_count = pc;
  89}
  90
  91#define init_task_preempt_count(p)      do { } while (0)
  92
  93#define init_idle_preempt_count(p, cpu) do { \
  94        S390_lowcore.preempt_count = PREEMPT_ENABLED; \
  95} while (0)
  96
  97static inline void set_preempt_need_resched(void)
  98{
  99}
 100
 101static inline void clear_preempt_need_resched(void)
 102{
 103}
 104
 105static inline bool test_preempt_need_resched(void)
 106{
 107        return false;
 108}
 109
 110static inline void __preempt_count_add(int val)
 111{
 112        S390_lowcore.preempt_count += val;
 113}
 114
 115static inline void __preempt_count_sub(int val)
 116{
 117        S390_lowcore.preempt_count -= val;
 118}
 119
 120static inline bool __preempt_count_dec_and_test(void)
 121{
 122        return !--S390_lowcore.preempt_count && tif_need_resched();
 123}
 124
 125static inline bool should_resched(int preempt_offset)
 126{
 127        return unlikely(preempt_count() == preempt_offset &&
 128                        tif_need_resched());
 129}
 130
 131#endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
 132
 133#ifdef CONFIG_PREEMPT
 134extern asmlinkage void preempt_schedule(void);
 135#define __preempt_schedule() preempt_schedule()
 136extern asmlinkage void preempt_schedule_notrace(void);
 137#define __preempt_schedule_notrace() preempt_schedule_notrace()
 138#endif /* CONFIG_PREEMPT */
 139
 140#endif /* __ASM_PREEMPT_H */
 141