1
2#ifndef _ASM_X86_SMP_H
3#define _ASM_X86_SMP_H
4#ifndef __ASSEMBLY__
5#include <linux/cpumask.h>
6#include <asm/percpu.h>
7
8
9
10
11#ifdef CONFIG_X86_LOCAL_APIC
12# include <asm/mpspec.h>
13# include <asm/apic.h>
14# ifdef CONFIG_X86_IO_APIC
15# include <asm/io_apic.h>
16# endif
17#endif
18#include <asm/thread_info.h>
19#include <asm/cpumask.h>
20
21extern int smp_num_siblings;
22extern unsigned int num_processors;
23
24DECLARE_PER_CPU_READ_MOSTLY(cpumask_var_t, cpu_sibling_map);
25DECLARE_PER_CPU_READ_MOSTLY(cpumask_var_t, cpu_core_map);
26
27DECLARE_PER_CPU_READ_MOSTLY(cpumask_var_t, cpu_llc_shared_map);
28DECLARE_PER_CPU_READ_MOSTLY(u16, cpu_llc_id);
29DECLARE_PER_CPU_READ_MOSTLY(int, cpu_number);
30
31static inline struct cpumask *cpu_llc_shared_mask(int cpu)
32{
33 return per_cpu(cpu_llc_shared_map, cpu);
34}
35
36DECLARE_EARLY_PER_CPU_READ_MOSTLY(u16, x86_cpu_to_apicid);
37DECLARE_EARLY_PER_CPU_READ_MOSTLY(u32, x86_cpu_to_acpiid);
38DECLARE_EARLY_PER_CPU_READ_MOSTLY(u16, x86_bios_cpu_apicid);
39#if defined(CONFIG_X86_LOCAL_APIC) && defined(CONFIG_X86_32)
40DECLARE_EARLY_PER_CPU_READ_MOSTLY(int, x86_cpu_to_logical_apicid);
41#endif
42
43struct task_struct;
44
45struct smp_ops {
46 void (*smp_prepare_boot_cpu)(void);
47 void (*smp_prepare_cpus)(unsigned max_cpus);
48 void (*smp_cpus_done)(unsigned max_cpus);
49
50 void (*stop_other_cpus)(int wait);
51 void (*crash_stop_other_cpus)(void);
52 void (*smp_send_reschedule)(int cpu);
53
54 int (*cpu_up)(unsigned cpu, struct task_struct *tidle);
55 int (*cpu_disable)(void);
56 void (*cpu_die)(unsigned int cpu);
57 void (*play_dead)(void);
58
59 void (*send_call_func_ipi)(const struct cpumask *mask);
60 void (*send_call_func_single_ipi)(int cpu);
61};
62
63
64extern void set_cpu_sibling_map(int cpu);
65
66#ifdef CONFIG_SMP
67extern struct smp_ops smp_ops;
68
69static inline void smp_send_stop(void)
70{
71 smp_ops.stop_other_cpus(0);
72}
73
74static inline void stop_other_cpus(void)
75{
76 smp_ops.stop_other_cpus(1);
77}
78
79static inline void smp_prepare_boot_cpu(void)
80{
81 smp_ops.smp_prepare_boot_cpu();
82}
83
84static inline void smp_prepare_cpus(unsigned int max_cpus)
85{
86 smp_ops.smp_prepare_cpus(max_cpus);
87}
88
89static inline void smp_cpus_done(unsigned int max_cpus)
90{
91 smp_ops.smp_cpus_done(max_cpus);
92}
93
94static inline int __cpu_up(unsigned int cpu, struct task_struct *tidle)
95{
96 return smp_ops.cpu_up(cpu, tidle);
97}
98
99static inline int __cpu_disable(void)
100{
101 return smp_ops.cpu_disable();
102}
103
104static inline void __cpu_die(unsigned int cpu)
105{
106 smp_ops.cpu_die(cpu);
107}
108
109static inline void play_dead(void)
110{
111 smp_ops.play_dead();
112}
113
114static inline void smp_send_reschedule(int cpu)
115{
116 smp_ops.smp_send_reschedule(cpu);
117}
118
119static inline void arch_send_call_function_single_ipi(int cpu)
120{
121 smp_ops.send_call_func_single_ipi(cpu);
122}
123
124static inline void arch_send_call_function_ipi_mask(const struct cpumask *mask)
125{
126 smp_ops.send_call_func_ipi(mask);
127}
128
129void cpu_disable_common(void);
130void native_smp_prepare_boot_cpu(void);
131void native_smp_prepare_cpus(unsigned int max_cpus);
132void calculate_max_logical_packages(void);
133void native_smp_cpus_done(unsigned int max_cpus);
134void common_cpu_up(unsigned int cpunum, struct task_struct *tidle);
135int native_cpu_up(unsigned int cpunum, struct task_struct *tidle);
136int native_cpu_disable(void);
137int common_cpu_die(unsigned int cpu);
138void native_cpu_die(unsigned int cpu);
139void hlt_play_dead(void);
140void native_play_dead(void);
141void play_dead_common(void);
142void wbinvd_on_cpu(int cpu);
143int wbinvd_on_all_cpus(void);
144
145void native_send_call_func_ipi(const struct cpumask *mask);
146void native_send_call_func_single_ipi(int cpu);
147void x86_idle_thread_init(unsigned int cpu, struct task_struct *idle);
148
149void smp_store_boot_cpu_info(void);
150void smp_store_cpu_info(int id);
151#define cpu_physical_id(cpu) per_cpu(x86_cpu_to_apicid, cpu)
152#define cpu_acpi_id(cpu) per_cpu(x86_cpu_to_acpiid, cpu)
153
154
155
156
157
158
159#define raw_smp_processor_id() (this_cpu_read(cpu_number))
160
161#ifdef CONFIG_X86_32
162extern int safe_smp_processor_id(void);
163#else
164# define safe_smp_processor_id() smp_processor_id()
165#endif
166
167#else
168#define wbinvd_on_cpu(cpu) wbinvd()
169static inline int wbinvd_on_all_cpus(void)
170{
171 wbinvd();
172 return 0;
173}
174#endif
175
176extern unsigned disabled_cpus;
177
178#ifdef CONFIG_X86_LOCAL_APIC
179extern int hard_smp_processor_id(void);
180
181#else
182#define hard_smp_processor_id() 0
183#endif
184
185#ifdef CONFIG_DEBUG_NMI_SELFTEST
186extern void nmi_selftest(void);
187#else
188#define nmi_selftest() do { } while (0)
189#endif
190
191#endif
192#endif
193