linux/include/asm-generic/preempt.h
<<
>>
Prefs
   1#ifndef __ASM_PREEMPT_H
   2#define __ASM_PREEMPT_H
   3
   4#include <linux/thread_info.h>
   5
   6#define PREEMPT_ENABLED (0)
   7
   8static __always_inline int preempt_count(void)
   9{
  10        return current_thread_info()->preempt_count;
  11}
  12
  13static __always_inline int *preempt_count_ptr(void)
  14{
  15        return &current_thread_info()->preempt_count;
  16}
  17
  18static __always_inline void preempt_count_set(int pc)
  19{
  20        *preempt_count_ptr() = pc;
  21}
  22
  23/*
  24 * must be macros to avoid header recursion hell
  25 */
  26#define init_task_preempt_count(p) do { \
  27        task_thread_info(p)->preempt_count = FORK_PREEMPT_COUNT; \
  28} while (0)
  29
  30#define init_idle_preempt_count(p, cpu) do { \
  31        task_thread_info(p)->preempt_count = PREEMPT_ENABLED; \
  32} while (0)
  33
  34static __always_inline void set_preempt_need_resched(void)
  35{
  36}
  37
  38static __always_inline void clear_preempt_need_resched(void)
  39{
  40}
  41
  42static __always_inline bool test_preempt_need_resched(void)
  43{
  44        return false;
  45}
  46
  47/*
  48 * The various preempt_count add/sub methods
  49 */
  50
  51static __always_inline void __preempt_count_add(int val)
  52{
  53        *preempt_count_ptr() += val;
  54}
  55
  56static __always_inline void __preempt_count_sub(int val)
  57{
  58        *preempt_count_ptr() -= val;
  59}
  60
  61static __always_inline bool __preempt_count_dec_and_test(void)
  62{
  63        /*
  64         * Because of load-store architectures cannot do per-cpu atomic
  65         * operations; we cannot use PREEMPT_NEED_RESCHED because it might get
  66         * lost.
  67         */
  68        return !--*preempt_count_ptr() && tif_need_resched();
  69}
  70
  71/*
  72 * Returns true when we need to resched and can (barring IRQ state).
  73 */
  74static __always_inline bool should_resched(int preempt_offset)
  75{
  76        return unlikely(preempt_count() == preempt_offset &&
  77                        tif_need_resched());
  78}
  79
  80#ifdef CONFIG_PREEMPT
  81extern asmlinkage void preempt_schedule(void);
  82#define __preempt_schedule() preempt_schedule()
  83extern asmlinkage void preempt_schedule_notrace(void);
  84#define __preempt_schedule_notrace() preempt_schedule_notrace()
  85#endif /* CONFIG_PREEMPT */
  86
  87#endif /* __ASM_PREEMPT_H */
  88