linux/include/asm-generic/percpu.h
<<
>>
Prefs
   1#ifndef _ASM_GENERIC_PERCPU_H_
   2#define _ASM_GENERIC_PERCPU_H_
   3
   4#include <linux/compiler.h>
   5#include <linux/threads.h>
   6#include <linux/percpu-defs.h>
   7
   8#ifdef CONFIG_SMP
   9
  10/*
  11 * per_cpu_offset() is the offset that has to be added to a
  12 * percpu variable to get to the instance for a certain processor.
  13 *
  14 * Most arches use the __per_cpu_offset array for those offsets but
  15 * some arches have their own ways of determining the offset (x86_64, s390).
  16 */
  17#ifndef __per_cpu_offset
  18extern unsigned long __per_cpu_offset[NR_CPUS];
  19
  20#define per_cpu_offset(x) (__per_cpu_offset[x])
  21#endif
  22
  23/*
  24 * Determine the offset for the currently active processor.
  25 * An arch may define __my_cpu_offset to provide a more effective
  26 * means of obtaining the offset to the per cpu variables of the
  27 * current processor.
  28 */
  29#ifndef __my_cpu_offset
  30#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  31#endif
  32#ifdef CONFIG_DEBUG_PREEMPT
  33#define my_cpu_offset per_cpu_offset(smp_processor_id())
  34#else
  35#define my_cpu_offset __my_cpu_offset
  36#endif
  37
  38/*
  39 * Add a offset to a pointer but keep the pointer as is.
  40 *
  41 * Only S390 provides its own means of moving the pointer.
  42 */
  43#ifndef SHIFT_PERCPU_PTR
  44#define SHIFT_PERCPU_PTR(__p, __offset) RELOC_HIDE((__p), (__offset))
  45#endif
  46
  47/*
  48 * A percpu variable may point to a discarded regions. The following are
  49 * established ways to produce a usable pointer from the percpu variable
  50 * offset.
  51 */
  52#define per_cpu(var, cpu) \
  53        (*SHIFT_PERCPU_PTR(&per_cpu_var(var), per_cpu_offset(cpu)))
  54#define __get_cpu_var(var) \
  55        (*SHIFT_PERCPU_PTR(&per_cpu_var(var), my_cpu_offset))
  56#define __raw_get_cpu_var(var) \
  57        (*SHIFT_PERCPU_PTR(&per_cpu_var(var), __my_cpu_offset))
  58
  59
  60#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  61extern void setup_per_cpu_areas(void);
  62#endif
  63
  64#else /* ! SMP */
  65
  66#define per_cpu(var, cpu)                       (*((void)(cpu), &per_cpu_var(var)))
  67#define __get_cpu_var(var)                      per_cpu_var(var)
  68#define __raw_get_cpu_var(var)                  per_cpu_var(var)
  69
  70#endif  /* SMP */
  71
  72#ifndef PER_CPU_BASE_SECTION
  73#ifdef CONFIG_SMP
  74#define PER_CPU_BASE_SECTION ".data.percpu"
  75#else
  76#define PER_CPU_BASE_SECTION ".data"
  77#endif
  78#endif
  79
  80#ifdef CONFIG_SMP
  81
  82#ifdef MODULE
  83#define PER_CPU_SHARED_ALIGNED_SECTION ""
  84#define PER_CPU_ALIGNED_SECTION ""
  85#else
  86#define PER_CPU_SHARED_ALIGNED_SECTION ".shared_aligned"
  87#define PER_CPU_ALIGNED_SECTION ".shared_aligned"
  88#endif
  89#define PER_CPU_FIRST_SECTION ".first"
  90
  91#else
  92
  93#define PER_CPU_SHARED_ALIGNED_SECTION ""
  94#define PER_CPU_ALIGNED_SECTION ".shared_aligned"
  95#define PER_CPU_FIRST_SECTION ""
  96
  97#endif
  98
  99#ifndef PER_CPU_ATTRIBUTES
 100#define PER_CPU_ATTRIBUTES
 101#endif
 102
 103#ifndef PER_CPU_DEF_ATTRIBUTES
 104#define PER_CPU_DEF_ATTRIBUTES
 105#endif
 106
 107#endif /* _ASM_GENERIC_PERCPU_H_ */
 108