1
2#ifdef CONFIG_VIRT_CPU_ACCOUNTING_NATIVE
3
4#define XEN_ACCOUNT_GET_STAMP \
5 MOV_FROM_ITC(pUStk, p6, r20, r2);
6#else
7#define XEN_ACCOUNT_GET_STAMP
8#endif
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36#define XEN_DO_SAVE_MIN(__COVER,SAVE_IFS,EXTRA,WORKAROUND) \
37 mov r16=IA64_KR(CURRENT); \
38 mov r27=ar.rsc; \
39 mov r20=r1; \
40 mov r25=ar.unat; \
41 MOV_FROM_IPSR(p0,r29); \
42 MOV_FROM_IIP(r28); \
43 mov r21=ar.fpsr; \
44 mov r26=ar.pfs; \
45 __COVER; \
46 adds r16=IA64_TASK_THREAD_ON_USTACK_OFFSET,r16; \
47 ;; \
48 ld1 r17=[r16]; \
49 st1 [r16]=r0; \
50 adds r1=-IA64_TASK_THREAD_ON_USTACK_OFFSET,r16 \
51 \
52 ;; \
53 invala; \
54 \
55 cmp.eq pKStk,pUStk=r0,r17; \
56 ;; \
57(pUStk) mov ar.rsc=0; \
58 ;; \
59(pUStk) mov.m r24=ar.rnat; \
60(pUStk) addl r22=IA64_RBS_OFFSET,r1; \
61(pKStk) mov r1=sp; \
62 ;; \
63(pUStk) lfetch.fault.excl.nt1 [r22]; \
64(pUStk) addl r1=IA64_STK_OFFSET-IA64_PT_REGS_SIZE,r1; \
65(pUStk) mov r23=ar.bspstore; \
66 ;; \
67(pUStk) mov ar.bspstore=r22; \
68(pKStk) addl r1=-IA64_PT_REGS_SIZE,r1; \
69 ;; \
70(pUStk) mov r18=ar.bsp; \
71(pUStk) mov ar.rsc=0x3; \
72 adds r17=2*L1_CACHE_BYTES,r1; \
73 adds r16=PT(CR_IPSR),r1; \
74 ;; \
75 lfetch.fault.excl.nt1 [r17],L1_CACHE_BYTES; \
76 st8 [r16]=r29; \
77 ;; \
78 lfetch.fault.excl.nt1 [r17]; \
79 tbit.nz p15,p0=r29,IA64_PSR_I_BIT; \
80 mov r29=b0 \
81 ;; \
82 WORKAROUND; \
83 adds r16=PT(R8),r1; \
84 adds r17=PT(R9),r1; \
85(pKStk) mov r18=r0; \
86 ;; \
87.mem.offset 0,0; st8.spill [r16]=r8,16; \
88.mem.offset 8,0; st8.spill [r17]=r9,16; \
89 ;; \
90.mem.offset 0,0; st8.spill [r16]=r10,24; \
91 movl r8=XSI_PRECOVER_IFS; \
92.mem.offset 8,0; st8.spill [r17]=r11,24; \
93 ;; \
94 \
95 \
96 ld8 r30=[r8]; \
97(pUStk) sub r18=r18,r22; \
98 st8 [r16]=r28,16; \
99 ;; \
100 st8 [r17]=r30,16; \
101 mov r8=ar.ccv; \
102 mov r9=ar.csd; \
103 mov r10=ar.ssd; \
104 movl r11=FPSR_DEFAULT; \
105 ;; \
106 st8 [r16]=r25,16; \
107 st8 [r17]=r26,16; \
108 shl r18=r18,16; \
109 ;; \
110 st8 [r16]=r27,16; \
111(pUStk) st8 [r17]=r24,16; \
112(pKStk) adds r17=16,r17; \
113 ;; \
114(pUStk) st8 [r16]=r23,16; \
115 st8 [r17]=r31,16; \
116(pKStk) adds r16=16,r16; \
117 ;; \
118 st8 [r16]=r29,16; \
119 st8 [r17]=r18,16; \
120 cmp.eq pNonSys,pSys=r0,r0 \
121 ;; \
122.mem.offset 0,0; st8.spill [r16]=r20,16; \
123.mem.offset 8,0; st8.spill [r17]=r12,16; \
124 adds r12=-16,r1; \
125 ;; \
126.mem.offset 0,0; st8.spill [r16]=r13,16; \
127.mem.offset 8,0; st8.spill [r17]=r21,16; \
128 mov r13=IA64_KR(CURRENT); \
129 ;; \
130.mem.offset 0,0; st8.spill [r16]=r15,16; \
131.mem.offset 8,0; st8.spill [r17]=r14,16; \
132 ;; \
133.mem.offset 0,0; st8.spill [r16]=r2,16; \
134.mem.offset 8,0; st8.spill [r17]=r3,16; \
135 XEN_ACCOUNT_GET_STAMP \
136 adds r2=IA64_PT_REGS_R16_OFFSET,r1; \
137 ;; \
138 EXTRA; \
139 movl r1=__gp; \
140 ;; \
141 ACCOUNT_SYS_ENTER \
142 BSW_1(r3,r14); \
143 ;;
144