1
2
3
4
5
6
7
8
9
10
11#include <asm/irqflags.h>
12#include <asm/hazards.h>
13#include <linux/compiler.h>
14#include <linux/preempt.h>
15#include <linux/export.h>
16#include <linux/stringify.h>
17
18#if !defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_MIPSR6)
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38notrace void arch_local_irq_disable(void)
39{
40 preempt_disable();
41
42 __asm__ __volatile__(
43 " .set push \n"
44 " .set noat \n"
45 " mfc0 $1,$12 \n"
46 " ori $1,0x1f \n"
47 " xori $1,0x1f \n"
48 " .set noreorder \n"
49 " mtc0 $1,$12 \n"
50 " " __stringify(__irq_disable_hazard) " \n"
51 " .set pop \n"
52 :
53 :
54 : "memory");
55
56 preempt_enable();
57}
58EXPORT_SYMBOL(arch_local_irq_disable);
59
60notrace unsigned long arch_local_irq_save(void)
61{
62 unsigned long flags;
63
64 preempt_disable();
65
66 __asm__ __volatile__(
67 " .set push \n"
68 " .set reorder \n"
69 " .set noat \n"
70 " mfc0 %[flags], $12 \n"
71 " ori $1, %[flags], 0x1f \n"
72 " xori $1, 0x1f \n"
73 " .set noreorder \n"
74 " mtc0 $1, $12 \n"
75 " " __stringify(__irq_disable_hazard) " \n"
76 " .set pop \n"
77 : [flags] "=r" (flags)
78 :
79 : "memory");
80
81 preempt_enable();
82
83 return flags;
84}
85EXPORT_SYMBOL(arch_local_irq_save);
86
87notrace void arch_local_irq_restore(unsigned long flags)
88{
89 unsigned long __tmp1;
90
91 preempt_disable();
92
93 __asm__ __volatile__(
94 " .set push \n"
95 " .set noreorder \n"
96 " .set noat \n"
97 " mfc0 $1, $12 \n"
98 " andi %[flags], 1 \n"
99 " ori $1, 0x1f \n"
100 " xori $1, 0x1f \n"
101 " or %[flags], $1 \n"
102 " mtc0 %[flags], $12 \n"
103 " " __stringify(__irq_disable_hazard) " \n"
104 " .set pop \n"
105 : [flags] "=r" (__tmp1)
106 : "0" (flags)
107 : "memory");
108
109 preempt_enable();
110}
111EXPORT_SYMBOL(arch_local_irq_restore);
112
113#endif
114