1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20#ifndef ARM_CPU_H
21#define ARM_CPU_H
22
23#include "kvm-consts.h"
24#include "hw/registerfields.h"
25
26#if defined(TARGET_AARCH64)
27
28# define TARGET_LONG_BITS 64
29#else
30# define TARGET_LONG_BITS 32
31#endif
32
33
34#define TCG_GUEST_DEFAULT_MO (0)
35
36#define CPUArchState struct CPUARMState
37
38#include "qemu-common.h"
39#include "cpu-qom.h"
40#include "exec/cpu-defs.h"
41
42#include "fpu/softfloat.h"
43
44#define EXCP_UDEF 1
45#define EXCP_SWI 2
46#define EXCP_PREFETCH_ABORT 3
47#define EXCP_DATA_ABORT 4
48#define EXCP_IRQ 5
49#define EXCP_FIQ 6
50#define EXCP_BKPT 7
51#define EXCP_EXCEPTION_EXIT 8
52#define EXCP_KERNEL_TRAP 9
53#define EXCP_HVC 11
54#define EXCP_HYP_TRAP 12
55#define EXCP_SMC 13
56#define EXCP_VIRQ 14
57#define EXCP_VFIQ 15
58#define EXCP_SEMIHOST 16
59#define EXCP_NOCP 17
60#define EXCP_INVSTATE 18
61
62
63#define ARMV7M_EXCP_RESET 1
64#define ARMV7M_EXCP_NMI 2
65#define ARMV7M_EXCP_HARD 3
66#define ARMV7M_EXCP_MEM 4
67#define ARMV7M_EXCP_BUS 5
68#define ARMV7M_EXCP_USAGE 6
69#define ARMV7M_EXCP_SVC 11
70#define ARMV7M_EXCP_DEBUG 12
71#define ARMV7M_EXCP_PENDSV 14
72#define ARMV7M_EXCP_SYSTICK 15
73
74
75#define CPU_INTERRUPT_FIQ CPU_INTERRUPT_TGT_EXT_1
76#define CPU_INTERRUPT_VIRQ CPU_INTERRUPT_TGT_EXT_2
77#define CPU_INTERRUPT_VFIQ CPU_INTERRUPT_TGT_EXT_3
78
79
80
81
82
83
84
85#ifdef HOST_WORDS_BIGENDIAN
86#define offsetoflow32(S, M) (offsetof(S, M) + sizeof(uint32_t))
87#define offsetofhigh32(S, M) offsetof(S, M)
88#else
89#define offsetoflow32(S, M) offsetof(S, M)
90#define offsetofhigh32(S, M) (offsetof(S, M) + sizeof(uint32_t))
91#endif
92
93
94#define ARM_CPU_IRQ 0
95#define ARM_CPU_FIQ 1
96#define ARM_CPU_VIRQ 2
97#define ARM_CPU_VFIQ 3
98
99#define NB_MMU_MODES 7
100
101
102
103
104#define TARGET_INSN_START_EXTRA_WORDS 2
105
106
107
108
109
110
111#define ARM_INSN_START_WORD2_MASK ((1 << 26) - 1)
112#define ARM_INSN_START_WORD2_SHIFT 14
113
114
115
116
117
118
119
120
121
122
123typedef struct ARMGenericTimer {
124 uint64_t cval;
125 uint64_t ctl;
126} ARMGenericTimer;
127
128#define GTIMER_PHYS 0
129#define GTIMER_VIRT 1
130#define GTIMER_HYP 2
131#define GTIMER_SEC 3
132#define NUM_GTIMERS 4
133
134typedef struct {
135 uint64_t raw_tcr;
136 uint32_t mask;
137 uint32_t base_mask;
138} TCR;
139
140typedef struct CPUARMState {
141
142 uint32_t regs[16];
143
144
145
146
147
148
149 uint64_t xregs[32];
150 uint64_t pc;
151
152
153
154
155
156
157
158
159
160
161
162 uint32_t pstate;
163 uint32_t aarch64;
164
165
166
167
168 uint32_t uncached_cpsr;
169 uint32_t spsr;
170
171
172 uint64_t banked_spsr[8];
173 uint32_t banked_r13[8];
174 uint32_t banked_r14[8];
175
176
177 uint32_t usr_regs[5];
178 uint32_t fiq_regs[5];
179
180
181 uint32_t CF;
182 uint32_t VF;
183 uint32_t NF;
184 uint32_t ZF;
185 uint32_t QF;
186 uint32_t GE;
187 uint32_t thumb;
188 uint32_t condexec_bits;
189 uint64_t daif;
190
191 uint64_t elr_el[4];
192 uint64_t sp_el[4];
193
194
195 struct {
196 uint32_t c0_cpuid;
197 union {
198 struct {
199 uint64_t _unused_csselr0;
200 uint64_t csselr_ns;
201 uint64_t _unused_csselr1;
202 uint64_t csselr_s;
203 };
204 uint64_t csselr_el[4];
205 };
206 union {
207 struct {
208 uint64_t _unused_sctlr;
209 uint64_t sctlr_ns;
210 uint64_t hsctlr;
211 uint64_t sctlr_s;
212 };
213 uint64_t sctlr_el[4];
214 };
215 uint64_t cpacr_el1;
216 uint64_t cptr_el[4];
217 uint32_t c1_xscaleauxcr;
218 uint64_t sder;
219 uint32_t nsacr;
220 union {
221 struct {
222 uint64_t _unused_ttbr0_0;
223 uint64_t ttbr0_ns;
224 uint64_t _unused_ttbr0_1;
225 uint64_t ttbr0_s;
226 };
227 uint64_t ttbr0_el[4];
228 };
229 union {
230 struct {
231 uint64_t _unused_ttbr1_0;
232 uint64_t ttbr1_ns;
233 uint64_t _unused_ttbr1_1;
234 uint64_t ttbr1_s;
235 };
236 uint64_t ttbr1_el[4];
237 };
238 uint64_t vttbr_el2;
239
240 TCR tcr_el[4];
241 TCR vtcr_el2;
242 uint32_t c2_data;
243 uint32_t c2_insn;
244 union {
245
246
247 struct {
248 uint64_t dacr_ns;
249 uint64_t dacr_s;
250 };
251 struct {
252 uint64_t dacr32_el2;
253 };
254 };
255 uint32_t pmsav5_data_ap;
256 uint32_t pmsav5_insn_ap;
257 uint64_t hcr_el2;
258 uint64_t scr_el3;
259 union {
260 struct {
261 uint64_t ifsr_ns;
262 uint64_t ifsr_s;
263 };
264 struct {
265 uint64_t ifsr32_el2;
266 };
267 };
268 union {
269 struct {
270 uint64_t _unused_dfsr;
271 uint64_t dfsr_ns;
272 uint64_t hsr;
273 uint64_t dfsr_s;
274 };
275 uint64_t esr_el[4];
276 };
277 uint32_t c6_region[8];
278 union {
279 struct {
280 uint64_t _unused_far0;
281#ifdef HOST_WORDS_BIGENDIAN
282 uint32_t ifar_ns;
283 uint32_t dfar_ns;
284 uint32_t ifar_s;
285 uint32_t dfar_s;
286#else
287 uint32_t dfar_ns;
288 uint32_t ifar_ns;
289 uint32_t dfar_s;
290 uint32_t ifar_s;
291#endif
292 uint64_t _unused_far3;
293 };
294 uint64_t far_el[4];
295 };
296 uint64_t hpfar_el2;
297 uint64_t hstr_el2;
298 union {
299 struct {
300 uint64_t _unused_par_0;
301 uint64_t par_ns;
302 uint64_t _unused_par_1;
303 uint64_t par_s;
304 };
305 uint64_t par_el[4];
306 };
307
308 uint32_t c9_insn;
309 uint32_t c9_data;
310 uint64_t c9_pmcr;
311 uint64_t c9_pmcnten;
312 uint32_t c9_pmovsr;
313 uint32_t c9_pmuserenr;
314 uint64_t c9_pmselr;
315 uint64_t c9_pminten;
316 union {
317 struct {
318#ifdef HOST_WORDS_BIGENDIAN
319 uint64_t _unused_mair_0;
320 uint32_t mair1_ns;
321 uint32_t mair0_ns;
322 uint64_t _unused_mair_1;
323 uint32_t mair1_s;
324 uint32_t mair0_s;
325#else
326 uint64_t _unused_mair_0;
327 uint32_t mair0_ns;
328 uint32_t mair1_ns;
329 uint64_t _unused_mair_1;
330 uint32_t mair0_s;
331 uint32_t mair1_s;
332#endif
333 };
334 uint64_t mair_el[4];
335 };
336 union {
337 struct {
338 uint64_t _unused_vbar;
339 uint64_t vbar_ns;
340 uint64_t hvbar;
341 uint64_t vbar_s;
342 };
343 uint64_t vbar_el[4];
344 };
345 uint32_t mvbar;
346 struct {
347 uint32_t fcseidr_ns;
348 uint32_t fcseidr_s;
349 };
350 union {
351 struct {
352 uint64_t _unused_contextidr_0;
353 uint64_t contextidr_ns;
354 uint64_t _unused_contextidr_1;
355 uint64_t contextidr_s;
356 };
357 uint64_t contextidr_el[4];
358 };
359 union {
360 struct {
361 uint64_t tpidrurw_ns;
362 uint64_t tpidrprw_ns;
363 uint64_t htpidr;
364 uint64_t _tpidr_el3;
365 };
366 uint64_t tpidr_el[4];
367 };
368
369 uint64_t tpidrurw_s;
370 uint64_t tpidrprw_s;
371 uint64_t tpidruro_s;
372
373 union {
374 uint64_t tpidruro_ns;
375 uint64_t tpidrro_el[1];
376 };
377 uint64_t c14_cntfrq;
378 uint64_t c14_cntkctl;
379 uint32_t cnthctl_el2;
380 uint64_t cntvoff_el2;
381 ARMGenericTimer c14_timer[NUM_GTIMERS];
382 uint32_t c15_cpar;
383 uint32_t c15_ticonfig;
384 uint32_t c15_i_max;
385 uint32_t c15_i_min;
386 uint32_t c15_threadid;
387 uint32_t c15_config_base_address;
388 uint32_t c15_diagnostic;
389 uint32_t c15_power_diagnostic;
390 uint32_t c15_power_control;
391 uint64_t dbgbvr[16];
392 uint64_t dbgbcr[16];
393 uint64_t dbgwvr[16];
394 uint64_t dbgwcr[16];
395 uint64_t mdscr_el1;
396 uint64_t oslsr_el1;
397 uint64_t mdcr_el2;
398 uint64_t mdcr_el3;
399
400
401
402 uint64_t c15_ccnt;
403 uint64_t pmccfiltr_el0;
404 uint64_t vpidr_el2;
405 uint64_t vmpidr_el2;
406 } cp15;
407
408 struct {
409 uint32_t other_sp;
410 uint32_t vecbase;
411 uint32_t basepri;
412 uint32_t control;
413 uint32_t ccr;
414 uint32_t cfsr;
415 uint32_t hfsr;
416 uint32_t dfsr;
417 uint32_t mmfar;
418 uint32_t bfar;
419 unsigned mpu_ctrl;
420 int exception;
421 } v7m;
422
423
424
425
426
427
428
429 struct {
430 uint32_t syndrome;
431 uint32_t fsr;
432 uint64_t vaddress;
433 uint32_t target_el;
434
435
436
437 } exception;
438
439
440 uint32_t teecr;
441 uint32_t teehbr;
442
443
444 struct {
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460 float64 regs[64];
461
462 uint32_t xregs[16];
463
464 int vec_len;
465 int vec_stride;
466
467
468 uint32_t scratch[8];
469
470
471
472
473
474
475
476
477
478
479
480
481
482 float_status fp_status;
483 float_status standard_fp_status;
484 } vfp;
485 uint64_t exclusive_addr;
486 uint64_t exclusive_val;
487 uint64_t exclusive_high;
488
489
490 struct {
491 uint64_t regs[16];
492 uint64_t val;
493
494 uint32_t cregs[16];
495 } iwmmxt;
496
497#if defined(CONFIG_USER_ONLY)
498
499 int eabi;
500#endif
501
502 struct CPUBreakpoint *cpu_breakpoint[16];
503 struct CPUWatchpoint *cpu_watchpoint[16];
504
505
506 struct {} end_reset_fields;
507
508 CPU_COMMON
509
510
511
512
513 uint64_t features;
514
515
516 struct {
517 uint32_t *drbar;
518 uint32_t *drsr;
519 uint32_t *dracr;
520 uint32_t rnr;
521 } pmsav7;
522
523 void *nvic;
524 const struct arm_boot_info *boot_info;
525
526 void *gicv3state;
527} CPUARMState;
528
529
530
531
532
533
534typedef void ARMELChangeHook(ARMCPU *cpu, void *opaque);
535
536
537
538
539typedef enum ARMPSCIState {
540 PSCI_ON = 0,
541 PSCI_OFF = 1,
542 PSCI_ON_PENDING = 2
543} ARMPSCIState;
544
545
546
547
548
549
550
551struct ARMCPU {
552
553 CPUState parent_obj;
554
555
556 CPUARMState env;
557
558
559 GHashTable *cp_regs;
560
561
562
563
564
565
566
567 uint64_t *cpreg_indexes;
568
569 uint64_t *cpreg_values;
570
571 int32_t cpreg_array_len;
572
573
574
575
576 uint64_t *cpreg_vmstate_indexes;
577 uint64_t *cpreg_vmstate_values;
578 int32_t cpreg_vmstate_array_len;
579
580
581 QEMUTimer *gt_timer[NUM_GTIMERS];
582
583 qemu_irq gt_timer_outputs[NUM_GTIMERS];
584
585 qemu_irq gicv3_maintenance_interrupt;
586
587
588 MemoryRegion *secure_memory;
589
590
591 const char *dtb_compatible;
592
593
594
595
596
597 uint32_t psci_version;
598
599
600 bool start_powered_off;
601
602
603 ARMPSCIState power_state;
604
605
606 bool has_el2;
607
608 bool has_el3;
609
610 bool has_pmu;
611
612
613 bool has_mpu;
614
615 uint32_t pmsav7_dregion;
616
617
618
619
620 uint32_t psci_conduit;
621
622
623
624
625 uint32_t kvm_target;
626
627
628 uint32_t kvm_init_features[7];
629
630
631 bool mp_is_up;
632
633
634
635
636
637
638
639
640
641
642
643 uint32_t midr;
644 uint32_t revidr;
645 uint32_t reset_fpsid;
646 uint32_t mvfr0;
647 uint32_t mvfr1;
648 uint32_t mvfr2;
649 uint32_t ctr;
650 uint32_t reset_sctlr;
651 uint32_t id_pfr0;
652 uint32_t id_pfr1;
653 uint32_t id_dfr0;
654 uint32_t pmceid0;
655 uint32_t pmceid1;
656 uint32_t id_afr0;
657 uint32_t id_mmfr0;
658 uint32_t id_mmfr1;
659 uint32_t id_mmfr2;
660 uint32_t id_mmfr3;
661 uint32_t id_mmfr4;
662 uint32_t id_isar0;
663 uint32_t id_isar1;
664 uint32_t id_isar2;
665 uint32_t id_isar3;
666 uint32_t id_isar4;
667 uint32_t id_isar5;
668 uint64_t id_aa64pfr0;
669 uint64_t id_aa64pfr1;
670 uint64_t id_aa64dfr0;
671 uint64_t id_aa64dfr1;
672 uint64_t id_aa64afr0;
673 uint64_t id_aa64afr1;
674 uint64_t id_aa64isar0;
675 uint64_t id_aa64isar1;
676 uint64_t id_aa64mmfr0;
677 uint64_t id_aa64mmfr1;
678 uint32_t dbgdidr;
679 uint32_t clidr;
680 uint64_t mp_affinity;
681
682
683
684 uint32_t ccsidr[16];
685 uint64_t reset_cbar;
686 uint32_t reset_auxcr;
687 bool reset_hivecs;
688
689 uint32_t dcz_blocksize;
690 uint64_t rvbar;
691
692
693 int gic_num_lrs;
694 int gic_vpribits;
695 int gic_vprebits;
696
697
698
699
700
701
702 bool cfgend;
703
704 ARMELChangeHook *el_change_hook;
705 void *el_change_hook_opaque;
706
707 int32_t node_id;
708
709
710 uint8_t device_irq_level;
711};
712
713static inline ARMCPU *arm_env_get_cpu(CPUARMState *env)
714{
715 return container_of(env, ARMCPU, env);
716}
717
718uint64_t arm_cpu_mp_affinity(int idx, uint8_t clustersz);
719
720#define ENV_GET_CPU(e) CPU(arm_env_get_cpu(e))
721
722#define ENV_OFFSET offsetof(ARMCPU, env)
723
724#ifndef CONFIG_USER_ONLY
725extern const struct VMStateDescription vmstate_arm_cpu;
726#endif
727
728void arm_cpu_do_interrupt(CPUState *cpu);
729void arm_v7m_cpu_do_interrupt(CPUState *cpu);
730bool arm_cpu_exec_interrupt(CPUState *cpu, int int_req);
731
732void arm_cpu_dump_state(CPUState *cs, FILE *f, fprintf_function cpu_fprintf,
733 int flags);
734
735hwaddr arm_cpu_get_phys_page_attrs_debug(CPUState *cpu, vaddr addr,
736 MemTxAttrs *attrs);
737
738int arm_cpu_gdb_read_register(CPUState *cpu, uint8_t *buf, int reg);
739int arm_cpu_gdb_write_register(CPUState *cpu, uint8_t *buf, int reg);
740
741int arm_cpu_write_elf64_note(WriteCoreDumpFunction f, CPUState *cs,
742 int cpuid, void *opaque);
743int arm_cpu_write_elf32_note(WriteCoreDumpFunction f, CPUState *cs,
744 int cpuid, void *opaque);
745
746#ifdef TARGET_AARCH64
747int aarch64_cpu_gdb_read_register(CPUState *cpu, uint8_t *buf, int reg);
748int aarch64_cpu_gdb_write_register(CPUState *cpu, uint8_t *buf, int reg);
749#endif
750
751ARMCPU *cpu_arm_init(const char *cpu_model);
752target_ulong do_arm_semihosting(CPUARMState *env);
753void aarch64_sync_32_to_64(CPUARMState *env);
754void aarch64_sync_64_to_32(CPUARMState *env);
755
756static inline bool is_a64(CPUARMState *env)
757{
758 return env->aarch64;
759}
760
761
762
763
764int cpu_arm_signal_handler(int host_signum, void *pinfo,
765 void *puc);
766
767
768
769
770
771
772
773
774
775
776void pmccntr_sync(CPUARMState *env);
777
778
779
780
781
782
783
784#define SCTLR_M (1U << 0)
785#define SCTLR_A (1U << 1)
786#define SCTLR_C (1U << 2)
787#define SCTLR_W (1U << 3)
788#define SCTLR_SA (1U << 3)
789#define SCTLR_P (1U << 4)
790#define SCTLR_SA0 (1U << 4)
791#define SCTLR_D (1U << 5)
792#define SCTLR_CP15BEN (1U << 5)
793#define SCTLR_L (1U << 6)
794#define SCTLR_B (1U << 7)
795#define SCTLR_ITD (1U << 7)
796#define SCTLR_S (1U << 8)
797#define SCTLR_SED (1U << 8)
798#define SCTLR_R (1U << 9)
799#define SCTLR_UMA (1U << 9)
800#define SCTLR_F (1U << 10)
801#define SCTLR_SW (1U << 10)
802#define SCTLR_Z (1U << 11)
803#define SCTLR_I (1U << 12)
804#define SCTLR_V (1U << 13)
805#define SCTLR_RR (1U << 14)
806#define SCTLR_DZE (1U << 14)
807#define SCTLR_L4 (1U << 15)
808#define SCTLR_UCT (1U << 15)
809#define SCTLR_DT (1U << 16)
810#define SCTLR_nTWI (1U << 16)
811#define SCTLR_HA (1U << 17)
812#define SCTLR_BR (1U << 17)
813#define SCTLR_IT (1U << 18)
814#define SCTLR_nTWE (1U << 18)
815#define SCTLR_WXN (1U << 19)
816#define SCTLR_ST (1U << 20)
817#define SCTLR_UWXN (1U << 20)
818#define SCTLR_FI (1U << 21)
819#define SCTLR_U (1U << 22)
820#define SCTLR_XP (1U << 23)
821#define SCTLR_VE (1U << 24)
822#define SCTLR_E0E (1U << 24)
823#define SCTLR_EE (1U << 25)
824#define SCTLR_L2 (1U << 26)
825#define SCTLR_UCI (1U << 26)
826#define SCTLR_NMFI (1U << 27)
827#define SCTLR_TRE (1U << 28)
828#define SCTLR_AFE (1U << 29)
829#define SCTLR_TE (1U << 30)
830
831#define CPTR_TCPAC (1U << 31)
832#define CPTR_TTA (1U << 20)
833#define CPTR_TFP (1U << 10)
834
835#define MDCR_EPMAD (1U << 21)
836#define MDCR_EDAD (1U << 20)
837#define MDCR_SPME (1U << 17)
838#define MDCR_SDD (1U << 16)
839#define MDCR_SPD (3U << 14)
840#define MDCR_TDRA (1U << 11)
841#define MDCR_TDOSA (1U << 10)
842#define MDCR_TDA (1U << 9)
843#define MDCR_TDE (1U << 8)
844#define MDCR_HPME (1U << 7)
845#define MDCR_TPM (1U << 6)
846#define MDCR_TPMCR (1U << 5)
847
848
849#define SDCR_VALID_MASK (MDCR_EPMAD | MDCR_EDAD | MDCR_SPME | MDCR_SPD)
850
851#define CPSR_M (0x1fU)
852#define CPSR_T (1U << 5)
853#define CPSR_F (1U << 6)
854#define CPSR_I (1U << 7)
855#define CPSR_A (1U << 8)
856#define CPSR_E (1U << 9)
857#define CPSR_IT_2_7 (0xfc00U)
858#define CPSR_GE (0xfU << 16)
859#define CPSR_IL (1U << 20)
860
861
862
863
864
865#define CPSR_RESERVED (0x7U << 21)
866#define CPSR_J (1U << 24)
867#define CPSR_IT_0_1 (3U << 25)
868#define CPSR_Q (1U << 27)
869#define CPSR_V (1U << 28)
870#define CPSR_C (1U << 29)
871#define CPSR_Z (1U << 30)
872#define CPSR_N (1U << 31)
873#define CPSR_NZCV (CPSR_N | CPSR_Z | CPSR_C | CPSR_V)
874#define CPSR_AIF (CPSR_A | CPSR_I | CPSR_F)
875
876#define CPSR_IT (CPSR_IT_0_1 | CPSR_IT_2_7)
877#define CACHED_CPSR_BITS (CPSR_T | CPSR_AIF | CPSR_GE | CPSR_IT | CPSR_Q \
878 | CPSR_NZCV)
879
880#define CPSR_USER (CPSR_NZCV | CPSR_Q | CPSR_GE)
881
882#define CPSR_EXEC (CPSR_T | CPSR_IT | CPSR_J | CPSR_IL)
883
884#define CPSR_ERET_MASK (~CPSR_RESERVED)
885
886#define TTBCR_N (7U << 0)
887#define TTBCR_T0SZ (7U << 0)
888#define TTBCR_PD0 (1U << 4)
889#define TTBCR_PD1 (1U << 5)
890#define TTBCR_EPD0 (1U << 7)
891#define TTBCR_IRGN0 (3U << 8)
892#define TTBCR_ORGN0 (3U << 10)
893#define TTBCR_SH0 (3U << 12)
894#define TTBCR_T1SZ (3U << 16)
895#define TTBCR_A1 (1U << 22)
896#define TTBCR_EPD1 (1U << 23)
897#define TTBCR_IRGN1 (3U << 24)
898#define TTBCR_ORGN1 (3U << 26)
899#define TTBCR_SH1 (1U << 28)
900#define TTBCR_EAE (1U << 31)
901
902
903
904
905
906#define PSTATE_SP (1U)
907#define PSTATE_M (0xFU)
908#define PSTATE_nRW (1U << 4)
909#define PSTATE_F (1U << 6)
910#define PSTATE_I (1U << 7)
911#define PSTATE_A (1U << 8)
912#define PSTATE_D (1U << 9)
913#define PSTATE_IL (1U << 20)
914#define PSTATE_SS (1U << 21)
915#define PSTATE_V (1U << 28)
916#define PSTATE_C (1U << 29)
917#define PSTATE_Z (1U << 30)
918#define PSTATE_N (1U << 31)
919#define PSTATE_NZCV (PSTATE_N | PSTATE_Z | PSTATE_C | PSTATE_V)
920#define PSTATE_DAIF (PSTATE_D | PSTATE_A | PSTATE_I | PSTATE_F)
921#define CACHED_PSTATE_BITS (PSTATE_NZCV | PSTATE_DAIF)
922
923#define PSTATE_MODE_EL3h 13
924#define PSTATE_MODE_EL3t 12
925#define PSTATE_MODE_EL2h 9
926#define PSTATE_MODE_EL2t 8
927#define PSTATE_MODE_EL1h 5
928#define PSTATE_MODE_EL1t 4
929#define PSTATE_MODE_EL0t 0
930
931
932static inline unsigned int aarch64_pstate_mode(unsigned int el, bool handler)
933{
934 return (el << 2) | handler;
935}
936
937
938
939
940
941static inline uint32_t pstate_read(CPUARMState *env)
942{
943 int ZF;
944
945 ZF = (env->ZF == 0);
946 return (env->NF & 0x80000000) | (ZF << 30)
947 | (env->CF << 29) | ((env->VF & 0x80000000) >> 3)
948 | env->pstate | env->daif;
949}
950
951static inline void pstate_write(CPUARMState *env, uint32_t val)
952{
953 env->ZF = (~val) & PSTATE_Z;
954 env->NF = val;
955 env->CF = (val >> 29) & 1;
956 env->VF = (val << 3) & 0x80000000;
957 env->daif = val & PSTATE_DAIF;
958 env->pstate = val & ~CACHED_PSTATE_BITS;
959}
960
961
962uint32_t cpsr_read(CPUARMState *env);
963
964typedef enum CPSRWriteType {
965 CPSRWriteByInstr = 0,
966 CPSRWriteExceptionReturn = 1,
967 CPSRWriteRaw = 2,
968 CPSRWriteByGDBStub = 3,
969} CPSRWriteType;
970
971
972void cpsr_write(CPUARMState *env, uint32_t val, uint32_t mask,
973 CPSRWriteType write_type);
974
975
976static inline uint32_t xpsr_read(CPUARMState *env)
977{
978 int ZF;
979 ZF = (env->ZF == 0);
980 return (env->NF & 0x80000000) | (ZF << 30)
981 | (env->CF << 29) | ((env->VF & 0x80000000) >> 3) | (env->QF << 27)
982 | (env->thumb << 24) | ((env->condexec_bits & 3) << 25)
983 | ((env->condexec_bits & 0xfc) << 8)
984 | env->v7m.exception;
985}
986
987
988static inline void xpsr_write(CPUARMState *env, uint32_t val, uint32_t mask)
989{
990 if (mask & CPSR_NZCV) {
991 env->ZF = (~val) & CPSR_Z;
992 env->NF = val;
993 env->CF = (val >> 29) & 1;
994 env->VF = (val << 3) & 0x80000000;
995 }
996 if (mask & CPSR_Q)
997 env->QF = ((val & CPSR_Q) != 0);
998 if (mask & (1 << 24))
999 env->thumb = ((val & (1 << 24)) != 0);
1000 if (mask & CPSR_IT_0_1) {
1001 env->condexec_bits &= ~3;
1002 env->condexec_bits |= (val >> 25) & 3;
1003 }
1004 if (mask & CPSR_IT_2_7) {
1005 env->condexec_bits &= 3;
1006 env->condexec_bits |= (val >> 8) & 0xfc;
1007 }
1008 if (mask & 0x1ff) {
1009 env->v7m.exception = val & 0x1ff;
1010 }
1011}
1012
1013#define HCR_VM (1ULL << 0)
1014#define HCR_SWIO (1ULL << 1)
1015#define HCR_PTW (1ULL << 2)
1016#define HCR_FMO (1ULL << 3)
1017#define HCR_IMO (1ULL << 4)
1018#define HCR_AMO (1ULL << 5)
1019#define HCR_VF (1ULL << 6)
1020#define HCR_VI (1ULL << 7)
1021#define HCR_VSE (1ULL << 8)
1022#define HCR_FB (1ULL << 9)
1023#define HCR_BSU_MASK (3ULL << 10)
1024#define HCR_DC (1ULL << 12)
1025#define HCR_TWI (1ULL << 13)
1026#define HCR_TWE (1ULL << 14)
1027#define HCR_TID0 (1ULL << 15)
1028#define HCR_TID1 (1ULL << 16)
1029#define HCR_TID2 (1ULL << 17)
1030#define HCR_TID3 (1ULL << 18)
1031#define HCR_TSC (1ULL << 19)
1032#define HCR_TIDCP (1ULL << 20)
1033#define HCR_TACR (1ULL << 21)
1034#define HCR_TSW (1ULL << 22)
1035#define HCR_TPC (1ULL << 23)
1036#define HCR_TPU (1ULL << 24)
1037#define HCR_TTLB (1ULL << 25)
1038#define HCR_TVM (1ULL << 26)
1039#define HCR_TGE (1ULL << 27)
1040#define HCR_TDZ (1ULL << 28)
1041#define HCR_HCD (1ULL << 29)
1042#define HCR_TRVM (1ULL << 30)
1043#define HCR_RW (1ULL << 31)
1044#define HCR_CD (1ULL << 32)
1045#define HCR_ID (1ULL << 33)
1046#define HCR_MASK ((1ULL << 34) - 1)
1047
1048#define SCR_NS (1U << 0)
1049#define SCR_IRQ (1U << 1)
1050#define SCR_FIQ (1U << 2)
1051#define SCR_EA (1U << 3)
1052#define SCR_FW (1U << 4)
1053#define SCR_AW (1U << 5)
1054#define SCR_NET (1U << 6)
1055#define SCR_SMD (1U << 7)
1056#define SCR_HCE (1U << 8)
1057#define SCR_SIF (1U << 9)
1058#define SCR_RW (1U << 10)
1059#define SCR_ST (1U << 11)
1060#define SCR_TWI (1U << 12)
1061#define SCR_TWE (1U << 13)
1062#define SCR_AARCH32_MASK (0x3fff & ~(SCR_RW | SCR_ST))
1063#define SCR_AARCH64_MASK (0x3fff & ~SCR_NET)
1064
1065
1066uint32_t vfp_get_fpscr(CPUARMState *env);
1067void vfp_set_fpscr(CPUARMState *env, uint32_t val);
1068
1069
1070
1071
1072
1073#define FPSR_MASK 0xf800009f
1074#define FPCR_MASK 0x07f79f00
1075static inline uint32_t vfp_get_fpsr(CPUARMState *env)
1076{
1077 return vfp_get_fpscr(env) & FPSR_MASK;
1078}
1079
1080static inline void vfp_set_fpsr(CPUARMState *env, uint32_t val)
1081{
1082 uint32_t new_fpscr = (vfp_get_fpscr(env) & ~FPSR_MASK) | (val & FPSR_MASK);
1083 vfp_set_fpscr(env, new_fpscr);
1084}
1085
1086static inline uint32_t vfp_get_fpcr(CPUARMState *env)
1087{
1088 return vfp_get_fpscr(env) & FPCR_MASK;
1089}
1090
1091static inline void vfp_set_fpcr(CPUARMState *env, uint32_t val)
1092{
1093 uint32_t new_fpscr = (vfp_get_fpscr(env) & ~FPCR_MASK) | (val & FPCR_MASK);
1094 vfp_set_fpscr(env, new_fpscr);
1095}
1096
1097enum arm_cpu_mode {
1098 ARM_CPU_MODE_USR = 0x10,
1099 ARM_CPU_MODE_FIQ = 0x11,
1100 ARM_CPU_MODE_IRQ = 0x12,
1101 ARM_CPU_MODE_SVC = 0x13,
1102 ARM_CPU_MODE_MON = 0x16,
1103 ARM_CPU_MODE_ABT = 0x17,
1104 ARM_CPU_MODE_HYP = 0x1a,
1105 ARM_CPU_MODE_UND = 0x1b,
1106 ARM_CPU_MODE_SYS = 0x1f
1107};
1108
1109
1110#define ARM_VFP_FPSID 0
1111#define ARM_VFP_FPSCR 1
1112#define ARM_VFP_MVFR2 5
1113#define ARM_VFP_MVFR1 6
1114#define ARM_VFP_MVFR0 7
1115#define ARM_VFP_FPEXC 8
1116#define ARM_VFP_FPINST 9
1117#define ARM_VFP_FPINST2 10
1118
1119
1120#define ARM_IWMMXT_wCID 0
1121#define ARM_IWMMXT_wCon 1
1122#define ARM_IWMMXT_wCSSF 2
1123#define ARM_IWMMXT_wCASF 3
1124#define ARM_IWMMXT_wCGR0 8
1125#define ARM_IWMMXT_wCGR1 9
1126#define ARM_IWMMXT_wCGR2 10
1127#define ARM_IWMMXT_wCGR3 11
1128
1129
1130FIELD(V7M_CCR, NONBASETHRDENA, 0, 1)
1131FIELD(V7M_CCR, USERSETMPEND, 1, 1)
1132FIELD(V7M_CCR, UNALIGN_TRP, 3, 1)
1133FIELD(V7M_CCR, DIV_0_TRP, 4, 1)
1134FIELD(V7M_CCR, BFHFNMIGN, 8, 1)
1135FIELD(V7M_CCR, STKALIGN, 9, 1)
1136FIELD(V7M_CCR, DC, 16, 1)
1137FIELD(V7M_CCR, IC, 17, 1)
1138
1139
1140FIELD(V7M_CFSR, IACCVIOL, 0, 1)
1141FIELD(V7M_CFSR, DACCVIOL, 1, 1)
1142FIELD(V7M_CFSR, MUNSTKERR, 3, 1)
1143FIELD(V7M_CFSR, MSTKERR, 4, 1)
1144FIELD(V7M_CFSR, MLSPERR, 5, 1)
1145FIELD(V7M_CFSR, MMARVALID, 7, 1)
1146
1147
1148FIELD(V7M_CFSR, IBUSERR, 8 + 0, 1)
1149FIELD(V7M_CFSR, PRECISERR, 8 + 1, 1)
1150FIELD(V7M_CFSR, IMPRECISERR, 8 + 2, 1)
1151FIELD(V7M_CFSR, UNSTKERR, 8 + 3, 1)
1152FIELD(V7M_CFSR, STKERR, 8 + 4, 1)
1153FIELD(V7M_CFSR, LSPERR, 8 + 5, 1)
1154FIELD(V7M_CFSR, BFARVALID, 8 + 7, 1)
1155
1156
1157FIELD(V7M_CFSR, UNDEFINSTR, 16 + 0, 1)
1158FIELD(V7M_CFSR, INVSTATE, 16 + 1, 1)
1159FIELD(V7M_CFSR, INVPC, 16 + 2, 1)
1160FIELD(V7M_CFSR, NOCP, 16 + 3, 1)
1161FIELD(V7M_CFSR, UNALIGNED, 16 + 8, 1)
1162FIELD(V7M_CFSR, DIVBYZERO, 16 + 9, 1)
1163
1164
1165FIELD(V7M_HFSR, VECTTBL, 1, 1)
1166FIELD(V7M_HFSR, FORCED, 30, 1)
1167FIELD(V7M_HFSR, DEBUGEVT, 31, 1)
1168
1169
1170FIELD(V7M_DFSR, HALTED, 0, 1)
1171FIELD(V7M_DFSR, BKPT, 1, 1)
1172FIELD(V7M_DFSR, DWTTRAP, 2, 1)
1173FIELD(V7M_DFSR, VCATCH, 3, 1)
1174FIELD(V7M_DFSR, EXTERNAL, 4, 1)
1175
1176
1177FIELD(V7M_MPU_CTRL, ENABLE, 0, 1)
1178FIELD(V7M_MPU_CTRL, HFNMIENA, 1, 1)
1179FIELD(V7M_MPU_CTRL, PRIVDEFENA, 2, 1)
1180
1181
1182
1183
1184
1185enum arm_features {
1186 ARM_FEATURE_VFP,
1187 ARM_FEATURE_AUXCR,
1188 ARM_FEATURE_XSCALE,
1189 ARM_FEATURE_IWMMXT,
1190 ARM_FEATURE_V6,
1191 ARM_FEATURE_V6K,
1192 ARM_FEATURE_V7,
1193 ARM_FEATURE_THUMB2,
1194 ARM_FEATURE_PMSA,
1195 ARM_FEATURE_VFP3,
1196 ARM_FEATURE_VFP_FP16,
1197 ARM_FEATURE_NEON,
1198 ARM_FEATURE_THUMB_DIV,
1199 ARM_FEATURE_M,
1200 ARM_FEATURE_OMAPCP,
1201 ARM_FEATURE_THUMB2EE,
1202 ARM_FEATURE_V7MP,
1203 ARM_FEATURE_V4T,
1204 ARM_FEATURE_V5,
1205 ARM_FEATURE_STRONGARM,
1206 ARM_FEATURE_VAPA,
1207 ARM_FEATURE_ARM_DIV,
1208 ARM_FEATURE_VFP4,
1209 ARM_FEATURE_GENERIC_TIMER,
1210 ARM_FEATURE_MVFR,
1211 ARM_FEATURE_DUMMY_C15_REGS,
1212 ARM_FEATURE_CACHE_TEST_CLEAN,
1213 ARM_FEATURE_CACHE_DIRTY_REG,
1214 ARM_FEATURE_CACHE_BLOCK_OPS,
1215 ARM_FEATURE_MPIDR,
1216 ARM_FEATURE_PXN,
1217 ARM_FEATURE_LPAE,
1218 ARM_FEATURE_V8,
1219 ARM_FEATURE_AARCH64,
1220 ARM_FEATURE_V8_AES,
1221 ARM_FEATURE_CBAR,
1222 ARM_FEATURE_CRC,
1223 ARM_FEATURE_CBAR_RO,
1224 ARM_FEATURE_EL2,
1225 ARM_FEATURE_EL3,
1226 ARM_FEATURE_V8_SHA1,
1227 ARM_FEATURE_V8_SHA256,
1228 ARM_FEATURE_V8_PMULL,
1229 ARM_FEATURE_THUMB_DSP,
1230 ARM_FEATURE_PMU,
1231 ARM_FEATURE_VBAR,
1232};
1233
1234static inline int arm_feature(CPUARMState *env, int feature)
1235{
1236 return (env->features & (1ULL << feature)) != 0;
1237}
1238
1239#if !defined(CONFIG_USER_ONLY)
1240
1241
1242
1243
1244
1245
1246static inline bool arm_is_secure_below_el3(CPUARMState *env)
1247{
1248 if (arm_feature(env, ARM_FEATURE_EL3)) {
1249 return !(env->cp15.scr_el3 & SCR_NS);
1250 } else {
1251
1252
1253
1254 return false;
1255 }
1256}
1257
1258
1259static inline bool arm_is_el3_or_mon(CPUARMState *env)
1260{
1261 if (arm_feature(env, ARM_FEATURE_EL3)) {
1262 if (is_a64(env) && extract32(env->pstate, 2, 2) == 3) {
1263
1264 return true;
1265 } else if (!is_a64(env) &&
1266 (env->uncached_cpsr & CPSR_M) == ARM_CPU_MODE_MON) {
1267
1268 return true;
1269 }
1270 }
1271 return false;
1272}
1273
1274
1275static inline bool arm_is_secure(CPUARMState *env)
1276{
1277 if (arm_is_el3_or_mon(env)) {
1278 return true;
1279 }
1280 return arm_is_secure_below_el3(env);
1281}
1282
1283#else
1284static inline bool arm_is_secure_below_el3(CPUARMState *env)
1285{
1286 return false;
1287}
1288
1289static inline bool arm_is_secure(CPUARMState *env)
1290{
1291 return false;
1292}
1293#endif
1294
1295
1296static inline bool arm_el_is_aa64(CPUARMState *env, int el)
1297{
1298
1299
1300
1301 assert(el >= 1 && el <= 3);
1302 bool aa64 = arm_feature(env, ARM_FEATURE_AARCH64);
1303
1304
1305
1306
1307
1308 if (el == 3) {
1309 return aa64;
1310 }
1311
1312 if (arm_feature(env, ARM_FEATURE_EL3)) {
1313 aa64 = aa64 && (env->cp15.scr_el3 & SCR_RW);
1314 }
1315
1316 if (el == 2) {
1317 return aa64;
1318 }
1319
1320 if (arm_feature(env, ARM_FEATURE_EL2) && !arm_is_secure_below_el3(env)) {
1321 aa64 = aa64 && (env->cp15.hcr_el2 & HCR_RW);
1322 }
1323
1324 return aa64;
1325}
1326
1327
1328
1329
1330
1331
1332
1333
1334static inline bool access_secure_reg(CPUARMState *env)
1335{
1336 bool ret = (arm_feature(env, ARM_FEATURE_EL3) &&
1337 !arm_el_is_aa64(env, 3) &&
1338 !(env->cp15.scr_el3 & SCR_NS));
1339
1340 return ret;
1341}
1342
1343
1344#define A32_BANKED_REG_GET(_env, _regname, _secure) \
1345 ((_secure) ? (_env)->cp15._regname##_s : (_env)->cp15._regname##_ns)
1346
1347#define A32_BANKED_REG_SET(_env, _regname, _secure, _val) \
1348 do { \
1349 if (_secure) { \
1350 (_env)->cp15._regname##_s = (_val); \
1351 } else { \
1352 (_env)->cp15._regname##_ns = (_val); \
1353 } \
1354 } while (0)
1355
1356
1357
1358
1359
1360
1361#define A32_BANKED_CURRENT_REG_GET(_env, _regname) \
1362 A32_BANKED_REG_GET((_env), _regname, \
1363 (arm_is_secure(_env) && !arm_el_is_aa64((_env), 3)))
1364
1365#define A32_BANKED_CURRENT_REG_SET(_env, _regname, _val) \
1366 A32_BANKED_REG_SET((_env), _regname, \
1367 (arm_is_secure(_env) && !arm_el_is_aa64((_env), 3)), \
1368 (_val))
1369
1370void arm_cpu_list(FILE *f, fprintf_function cpu_fprintf);
1371uint32_t arm_phys_excp_target_el(CPUState *cs, uint32_t excp_idx,
1372 uint32_t cur_el, bool secure);
1373
1374
1375#ifndef CONFIG_USER_ONLY
1376bool armv7m_nvic_can_take_pending_exception(void *opaque);
1377#else
1378static inline bool armv7m_nvic_can_take_pending_exception(void *opaque)
1379{
1380 return true;
1381}
1382#endif
1383void armv7m_nvic_set_pending(void *opaque, int irq);
1384void armv7m_nvic_acknowledge_irq(void *opaque);
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395int armv7m_nvic_complete_irq(void *opaque, int irq);
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423#define CP_REG_AA64_SHIFT 28
1424#define CP_REG_AA64_MASK (1 << CP_REG_AA64_SHIFT)
1425
1426
1427
1428
1429
1430#define CP_REG_NS_SHIFT 29
1431#define CP_REG_NS_MASK (1 << CP_REG_NS_SHIFT)
1432
1433#define ENCODE_CP_REG(cp, is64, ns, crn, crm, opc1, opc2) \
1434 ((ns) << CP_REG_NS_SHIFT | ((cp) << 16) | ((is64) << 15) | \
1435 ((crn) << 11) | ((crm) << 7) | ((opc1) << 3) | (opc2))
1436
1437#define ENCODE_AA64_CP_REG(cp, crn, crm, op0, op1, op2) \
1438 (CP_REG_AA64_MASK | \
1439 ((cp) << CP_REG_ARM_COPROC_SHIFT) | \
1440 ((op0) << CP_REG_ARM64_SYSREG_OP0_SHIFT) | \
1441 ((op1) << CP_REG_ARM64_SYSREG_OP1_SHIFT) | \
1442 ((crn) << CP_REG_ARM64_SYSREG_CRN_SHIFT) | \
1443 ((crm) << CP_REG_ARM64_SYSREG_CRM_SHIFT) | \
1444 ((op2) << CP_REG_ARM64_SYSREG_OP2_SHIFT))
1445
1446
1447
1448
1449static inline uint32_t kvm_to_cpreg_id(uint64_t kvmid)
1450{
1451 uint32_t cpregid = kvmid;
1452 if ((kvmid & CP_REG_ARCH_MASK) == CP_REG_ARM64) {
1453 cpregid |= CP_REG_AA64_MASK;
1454 } else {
1455 if ((kvmid & CP_REG_SIZE_MASK) == CP_REG_SIZE_U64) {
1456 cpregid |= (1 << 15);
1457 }
1458
1459
1460
1461
1462 cpregid |= 1 << CP_REG_NS_SHIFT;
1463 }
1464 return cpregid;
1465}
1466
1467
1468
1469
1470static inline uint64_t cpreg_to_kvm_id(uint32_t cpregid)
1471{
1472 uint64_t kvmid;
1473
1474 if (cpregid & CP_REG_AA64_MASK) {
1475 kvmid = cpregid & ~CP_REG_AA64_MASK;
1476 kvmid |= CP_REG_SIZE_U64 | CP_REG_ARM64;
1477 } else {
1478 kvmid = cpregid & ~(1 << 15);
1479 if (cpregid & (1 << 15)) {
1480 kvmid |= CP_REG_SIZE_U64 | CP_REG_ARM;
1481 } else {
1482 kvmid |= CP_REG_SIZE_U32 | CP_REG_ARM;
1483 }
1484 }
1485 return kvmid;
1486}
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510#define ARM_CP_SPECIAL 1
1511#define ARM_CP_CONST 2
1512#define ARM_CP_64BIT 4
1513#define ARM_CP_SUPPRESS_TB_END 8
1514#define ARM_CP_OVERRIDE 16
1515#define ARM_CP_ALIAS 32
1516#define ARM_CP_IO 64
1517#define ARM_CP_NO_RAW 128
1518#define ARM_CP_NOP (ARM_CP_SPECIAL | (1 << 8))
1519#define ARM_CP_WFI (ARM_CP_SPECIAL | (2 << 8))
1520#define ARM_CP_NZCV (ARM_CP_SPECIAL | (3 << 8))
1521#define ARM_CP_CURRENTEL (ARM_CP_SPECIAL | (4 << 8))
1522#define ARM_CP_DC_ZVA (ARM_CP_SPECIAL | (5 << 8))
1523#define ARM_LAST_SPECIAL ARM_CP_DC_ZVA
1524
1525#define ARM_CP_SENTINEL 0xffff
1526
1527#define ARM_CP_FLAG_MASK 0xff
1528
1529
1530
1531
1532
1533
1534
1535
1536
1537
1538enum {
1539 ARM_CP_STATE_AA32 = 0,
1540 ARM_CP_STATE_AA64 = 1,
1541 ARM_CP_STATE_BOTH = 2,
1542};
1543
1544
1545
1546
1547
1548
1549
1550
1551
1552
1553
1554enum {
1555 ARM_CP_SECSTATE_S = (1 << 0),
1556 ARM_CP_SECSTATE_NS = (1 << 1),
1557};
1558
1559
1560
1561
1562
1563static inline bool cptype_valid(int cptype)
1564{
1565 return ((cptype & ~ARM_CP_FLAG_MASK) == 0)
1566 || ((cptype & ARM_CP_SPECIAL) &&
1567 ((cptype & ~ARM_CP_FLAG_MASK) <= ARM_LAST_SPECIAL));
1568}
1569
1570
1571
1572
1573
1574
1575
1576
1577
1578
1579
1580
1581
1582
1583
1584
1585
1586
1587#define PL3_R 0x80
1588#define PL3_W 0x40
1589#define PL2_R (0x20 | PL3_R)
1590#define PL2_W (0x10 | PL3_W)
1591#define PL1_R (0x08 | PL2_R)
1592#define PL1_W (0x04 | PL2_W)
1593#define PL0_R (0x02 | PL1_R)
1594#define PL0_W (0x01 | PL1_W)
1595
1596#define PL3_RW (PL3_R | PL3_W)
1597#define PL2_RW (PL2_R | PL2_W)
1598#define PL1_RW (PL1_R | PL1_W)
1599#define PL0_RW (PL0_R | PL0_W)
1600
1601
1602static inline int arm_highest_el(CPUARMState *env)
1603{
1604 if (arm_feature(env, ARM_FEATURE_EL3)) {
1605 return 3;
1606 }
1607 if (arm_feature(env, ARM_FEATURE_EL2)) {
1608 return 2;
1609 }
1610 return 1;
1611}
1612
1613
1614
1615
1616static inline int arm_current_el(CPUARMState *env)
1617{
1618 if (arm_feature(env, ARM_FEATURE_M)) {
1619 return !((env->v7m.exception == 0) && (env->v7m.control & 1));
1620 }
1621
1622 if (is_a64(env)) {
1623 return extract32(env->pstate, 2, 2);
1624 }
1625
1626 switch (env->uncached_cpsr & 0x1f) {
1627 case ARM_CPU_MODE_USR:
1628 return 0;
1629 case ARM_CPU_MODE_HYP:
1630 return 2;
1631 case ARM_CPU_MODE_MON:
1632 return 3;
1633 default:
1634 if (arm_is_secure(env) && !arm_el_is_aa64(env, 3)) {
1635
1636
1637
1638 return 3;
1639 }
1640
1641 return 1;
1642 }
1643}
1644
1645typedef struct ARMCPRegInfo ARMCPRegInfo;
1646
1647typedef enum CPAccessResult {
1648
1649 CP_ACCESS_OK = 0,
1650
1651
1652
1653
1654
1655
1656 CP_ACCESS_TRAP = 1,
1657
1658
1659
1660
1661 CP_ACCESS_TRAP_UNCATEGORIZED = 2,
1662
1663 CP_ACCESS_TRAP_EL2 = 3,
1664 CP_ACCESS_TRAP_EL3 = 4,
1665
1666 CP_ACCESS_TRAP_UNCATEGORIZED_EL2 = 5,
1667 CP_ACCESS_TRAP_UNCATEGORIZED_EL3 = 6,
1668
1669
1670
1671 CP_ACCESS_TRAP_FP_EL2 = 7,
1672 CP_ACCESS_TRAP_FP_EL3 = 8,
1673} CPAccessResult;
1674
1675
1676
1677
1678typedef uint64_t CPReadFn(CPUARMState *env, const ARMCPRegInfo *opaque);
1679typedef void CPWriteFn(CPUARMState *env, const ARMCPRegInfo *opaque,
1680 uint64_t value);
1681
1682typedef CPAccessResult CPAccessFn(CPUARMState *env,
1683 const ARMCPRegInfo *opaque,
1684 bool isread);
1685
1686typedef void CPResetFn(CPUARMState *env, const ARMCPRegInfo *opaque);
1687
1688#define CP_ANY 0xff
1689
1690
1691struct ARMCPRegInfo {
1692
1693 const char *name;
1694
1695
1696
1697
1698
1699
1700
1701
1702
1703
1704
1705
1706
1707
1708
1709
1710
1711 uint8_t cp;
1712 uint8_t crn;
1713 uint8_t crm;
1714 uint8_t opc0;
1715 uint8_t opc1;
1716 uint8_t opc2;
1717
1718 int state;
1719
1720 int type;
1721
1722 int access;
1723
1724 int secure;
1725
1726
1727
1728
1729 void *opaque;
1730
1731
1732
1733 uint64_t resetvalue;
1734
1735
1736
1737
1738
1739
1740 ptrdiff_t fieldoffset;
1741
1742
1743
1744
1745
1746
1747
1748
1749
1750
1751
1752
1753 ptrdiff_t bank_fieldoffsets[2];
1754
1755
1756
1757
1758
1759
1760 CPAccessFn *accessfn;
1761
1762
1763
1764
1765 CPReadFn *readfn;
1766
1767
1768
1769
1770 CPWriteFn *writefn;
1771
1772
1773
1774
1775
1776 CPReadFn *raw_readfn;
1777
1778
1779
1780
1781
1782
1783 CPWriteFn *raw_writefn;
1784
1785
1786
1787
1788 CPResetFn *resetfn;
1789};
1790
1791
1792
1793
1794#define CPREG_FIELD32(env, ri) \
1795 (*(uint32_t *)((char *)(env) + (ri)->fieldoffset))
1796#define CPREG_FIELD64(env, ri) \
1797 (*(uint64_t *)((char *)(env) + (ri)->fieldoffset))
1798
1799#define REGINFO_SENTINEL { .type = ARM_CP_SENTINEL }
1800
1801void define_arm_cp_regs_with_opaque(ARMCPU *cpu,
1802 const ARMCPRegInfo *regs, void *opaque);
1803void define_one_arm_cp_reg_with_opaque(ARMCPU *cpu,
1804 const ARMCPRegInfo *regs, void *opaque);
1805static inline void define_arm_cp_regs(ARMCPU *cpu, const ARMCPRegInfo *regs)
1806{
1807 define_arm_cp_regs_with_opaque(cpu, regs, 0);
1808}
1809static inline void define_one_arm_cp_reg(ARMCPU *cpu, const ARMCPRegInfo *regs)
1810{
1811 define_one_arm_cp_reg_with_opaque(cpu, regs, 0);
1812}
1813const ARMCPRegInfo *get_arm_cp_reginfo(GHashTable *cpregs, uint32_t encoded_cp);
1814
1815
1816void arm_cp_write_ignore(CPUARMState *env, const ARMCPRegInfo *ri,
1817 uint64_t value);
1818
1819uint64_t arm_cp_read_zero(CPUARMState *env, const ARMCPRegInfo *ri);
1820
1821
1822
1823
1824void arm_cp_reset_ignore(CPUARMState *env, const ARMCPRegInfo *opaque);
1825
1826
1827
1828
1829static inline bool cpreg_field_is_64bit(const ARMCPRegInfo *ri)
1830{
1831 return (ri->state == ARM_CP_STATE_AA64) || (ri->type & ARM_CP_64BIT);
1832}
1833
1834static inline bool cp_access_ok(int current_el,
1835 const ARMCPRegInfo *ri, int isread)
1836{
1837 return (ri->access >> ((current_el * 2) + isread)) & 1;
1838}
1839
1840
1841uint64_t read_raw_cp_reg(CPUARMState *env, const ARMCPRegInfo *ri);
1842
1843
1844
1845
1846
1847
1848
1849
1850
1851
1852
1853
1854
1855
1856
1857bool write_list_to_cpustate(ARMCPU *cpu);
1858
1859
1860
1861
1862
1863
1864
1865
1866
1867
1868
1869
1870
1871
1872
1873bool write_cpustate_to_list(ARMCPU *cpu);
1874
1875#define ARM_CPUID_TI915T 0x54029152
1876#define ARM_CPUID_TI925T 0x54029252
1877
1878#if defined(CONFIG_USER_ONLY)
1879#define TARGET_PAGE_BITS 12
1880#else
1881
1882
1883
1884#define TARGET_PAGE_BITS_VARY
1885#define TARGET_PAGE_BITS_MIN 10
1886#endif
1887
1888#if defined(TARGET_AARCH64)
1889# define TARGET_PHYS_ADDR_SPACE_BITS 48
1890# define TARGET_VIRT_ADDR_SPACE_BITS 64
1891#else
1892# define TARGET_PHYS_ADDR_SPACE_BITS 40
1893# define TARGET_VIRT_ADDR_SPACE_BITS 32
1894#endif
1895
1896static inline bool arm_excp_unmasked(CPUState *cs, unsigned int excp_idx,
1897 unsigned int target_el)
1898{
1899 CPUARMState *env = cs->env_ptr;
1900 unsigned int cur_el = arm_current_el(env);
1901 bool secure = arm_is_secure(env);
1902 bool pstate_unmasked;
1903 int8_t unmasked = 0;
1904
1905
1906
1907
1908
1909 if (cur_el > target_el) {
1910 return false;
1911 }
1912
1913 switch (excp_idx) {
1914 case EXCP_FIQ:
1915 pstate_unmasked = !(env->daif & PSTATE_F);
1916 break;
1917
1918 case EXCP_IRQ:
1919 pstate_unmasked = !(env->daif & PSTATE_I);
1920 break;
1921
1922 case EXCP_VFIQ:
1923 if (secure || !(env->cp15.hcr_el2 & HCR_FMO)) {
1924
1925 return false;
1926 }
1927 return !(env->daif & PSTATE_F);
1928 case EXCP_VIRQ:
1929 if (secure || !(env->cp15.hcr_el2 & HCR_IMO)) {
1930
1931 return false;
1932 }
1933 return !(env->daif & PSTATE_I);
1934 default:
1935 g_assert_not_reached();
1936 }
1937
1938
1939
1940
1941
1942 if ((target_el > cur_el) && (target_el != 1)) {
1943
1944 if (arm_feature(env, ARM_FEATURE_AARCH64)) {
1945
1946
1947
1948
1949
1950 if (target_el == 3 || !secure) {
1951 unmasked = 1;
1952 }
1953 } else {
1954
1955
1956
1957
1958 bool hcr, scr;
1959
1960 switch (excp_idx) {
1961 case EXCP_FIQ:
1962
1963
1964
1965
1966
1967
1968 hcr = (env->cp15.hcr_el2 & HCR_FMO);
1969 scr = (env->cp15.scr_el3 & SCR_FIQ);
1970
1971
1972
1973
1974
1975
1976 scr = scr && !((env->cp15.scr_el3 & SCR_FW) && !hcr);
1977 break;
1978 case EXCP_IRQ:
1979
1980
1981
1982
1983
1984
1985 hcr = (env->cp15.hcr_el2 & HCR_IMO);
1986 scr = false;
1987 break;
1988 default:
1989 g_assert_not_reached();
1990 }
1991
1992 if ((scr || hcr) && !secure) {
1993 unmasked = 1;
1994 }
1995 }
1996 }
1997
1998
1999
2000
2001 return unmasked || pstate_unmasked;
2002}
2003
2004#define cpu_init(cpu_model) CPU(cpu_arm_init(cpu_model))
2005
2006#define cpu_signal_handler cpu_arm_signal_handler
2007#define cpu_list arm_cpu_list
2008
2009
2010
2011
2012
2013
2014
2015
2016
2017
2018
2019
2020
2021
2022
2023
2024
2025
2026
2027
2028
2029
2030
2031
2032
2033
2034
2035
2036
2037
2038
2039
2040
2041
2042
2043
2044
2045
2046
2047
2048
2049
2050
2051
2052
2053
2054
2055
2056
2057
2058
2059
2060
2061
2062
2063
2064
2065
2066
2067
2068
2069
2070
2071
2072
2073
2074
2075
2076
2077
2078
2079
2080
2081
2082#define ARM_MMU_IDX_A 0x10
2083#define ARM_MMU_IDX_NOTLB 0x20
2084#define ARM_MMU_IDX_M 0x40
2085
2086#define ARM_MMU_IDX_TYPE_MASK (~0x7)
2087#define ARM_MMU_IDX_COREIDX_MASK 0x7
2088
2089typedef enum ARMMMUIdx {
2090 ARMMMUIdx_S12NSE0 = 0 | ARM_MMU_IDX_A,
2091 ARMMMUIdx_S12NSE1 = 1 | ARM_MMU_IDX_A,
2092 ARMMMUIdx_S1E2 = 2 | ARM_MMU_IDX_A,
2093 ARMMMUIdx_S1E3 = 3 | ARM_MMU_IDX_A,
2094 ARMMMUIdx_S1SE0 = 4 | ARM_MMU_IDX_A,
2095 ARMMMUIdx_S1SE1 = 5 | ARM_MMU_IDX_A,
2096 ARMMMUIdx_S2NS = 6 | ARM_MMU_IDX_A,
2097 ARMMMUIdx_MUser = 0 | ARM_MMU_IDX_M,
2098 ARMMMUIdx_MPriv = 1 | ARM_MMU_IDX_M,
2099 ARMMMUIdx_MNegPri = 2 | ARM_MMU_IDX_M,
2100
2101
2102
2103 ARMMMUIdx_S1NSE0 = 0 | ARM_MMU_IDX_NOTLB,
2104 ARMMMUIdx_S1NSE1 = 1 | ARM_MMU_IDX_NOTLB,
2105} ARMMMUIdx;
2106
2107
2108
2109
2110typedef enum ARMMMUIdxBit {
2111 ARMMMUIdxBit_S12NSE0 = 1 << 0,
2112 ARMMMUIdxBit_S12NSE1 = 1 << 1,
2113 ARMMMUIdxBit_S1E2 = 1 << 2,
2114 ARMMMUIdxBit_S1E3 = 1 << 3,
2115 ARMMMUIdxBit_S1SE0 = 1 << 4,
2116 ARMMMUIdxBit_S1SE1 = 1 << 5,
2117 ARMMMUIdxBit_S2NS = 1 << 6,
2118 ARMMMUIdxBit_MUser = 1 << 0,
2119 ARMMMUIdxBit_MPriv = 1 << 1,
2120 ARMMMUIdxBit_MNegPri = 1 << 2,
2121} ARMMMUIdxBit;
2122
2123#define MMU_USER_IDX 0
2124
2125static inline int arm_to_core_mmu_idx(ARMMMUIdx mmu_idx)
2126{
2127 return mmu_idx & ARM_MMU_IDX_COREIDX_MASK;
2128}
2129
2130static inline ARMMMUIdx core_to_arm_mmu_idx(CPUARMState *env, int mmu_idx)
2131{
2132 if (arm_feature(env, ARM_FEATURE_M)) {
2133 return mmu_idx | ARM_MMU_IDX_M;
2134 } else {
2135 return mmu_idx | ARM_MMU_IDX_A;
2136 }
2137}
2138
2139
2140static inline int arm_mmu_idx_to_el(ARMMMUIdx mmu_idx)
2141{
2142 switch (mmu_idx & ARM_MMU_IDX_TYPE_MASK) {
2143 case ARM_MMU_IDX_A:
2144 return mmu_idx & 3;
2145 case ARM_MMU_IDX_M:
2146 return mmu_idx == ARMMMUIdx_MUser ? 0 : 1;
2147 default:
2148 g_assert_not_reached();
2149 }
2150}
2151
2152
2153static inline int cpu_mmu_index(CPUARMState *env, bool ifetch)
2154{
2155 int el = arm_current_el(env);
2156
2157 if (arm_feature(env, ARM_FEATURE_M)) {
2158 ARMMMUIdx mmu_idx = el == 0 ? ARMMMUIdx_MUser : ARMMMUIdx_MPriv;
2159
2160
2161
2162
2163 if ((env->v7m.exception > 0 && env->v7m.exception <= 3)
2164 || env->daif & PSTATE_F) {
2165 return arm_to_core_mmu_idx(ARMMMUIdx_MNegPri);
2166 }
2167
2168 return arm_to_core_mmu_idx(mmu_idx);
2169 }
2170
2171 if (el < 2 && arm_is_secure_below_el3(env)) {
2172 return arm_to_core_mmu_idx(ARMMMUIdx_S1SE0 + el);
2173 }
2174 return el;
2175}
2176
2177
2178typedef enum ARMASIdx {
2179 ARMASIdx_NS = 0,
2180 ARMASIdx_S = 1,
2181} ARMASIdx;
2182
2183
2184static inline int arm_debug_target_el(CPUARMState *env)
2185{
2186 bool secure = arm_is_secure(env);
2187 bool route_to_el2 = false;
2188
2189 if (arm_feature(env, ARM_FEATURE_EL2) && !secure) {
2190 route_to_el2 = env->cp15.hcr_el2 & HCR_TGE ||
2191 env->cp15.mdcr_el2 & (1 << 8);
2192 }
2193
2194 if (route_to_el2) {
2195 return 2;
2196 } else if (arm_feature(env, ARM_FEATURE_EL3) &&
2197 !arm_el_is_aa64(env, 3) && secure) {
2198 return 3;
2199 } else {
2200 return 1;
2201 }
2202}
2203
2204static inline bool aa64_generate_debug_exceptions(CPUARMState *env)
2205{
2206 if (arm_is_secure(env)) {
2207
2208 if (extract32(env->cp15.mdcr_el3, 16, 1) != 0
2209 || arm_current_el(env) == 3) {
2210 return false;
2211 }
2212 }
2213
2214 if (arm_current_el(env) == arm_debug_target_el(env)) {
2215 if ((extract32(env->cp15.mdscr_el1, 13, 1) == 0)
2216 || (env->daif & PSTATE_D)) {
2217 return false;
2218 }
2219 }
2220 return true;
2221}
2222
2223static inline bool aa32_generate_debug_exceptions(CPUARMState *env)
2224{
2225 int el = arm_current_el(env);
2226
2227 if (el == 0 && arm_el_is_aa64(env, 1)) {
2228 return aa64_generate_debug_exceptions(env);
2229 }
2230
2231 if (arm_is_secure(env)) {
2232 int spd;
2233
2234 if (el == 0 && (env->cp15.sder & 1)) {
2235
2236
2237
2238
2239 return true;
2240 }
2241
2242 spd = extract32(env->cp15.mdcr_el3, 14, 2);
2243 switch (spd) {
2244 case 1:
2245
2246 case 0:
2247
2248
2249
2250
2251
2252 return true;
2253 case 2:
2254 return false;
2255 case 3:
2256 return true;
2257 }
2258 }
2259
2260 return el != 2;
2261}
2262
2263
2264
2265
2266
2267
2268
2269
2270
2271
2272
2273
2274
2275
2276
2277static inline bool arm_generate_debug_exceptions(CPUARMState *env)
2278{
2279 if (env->aarch64) {
2280 return aa64_generate_debug_exceptions(env);
2281 } else {
2282 return aa32_generate_debug_exceptions(env);
2283 }
2284}
2285
2286
2287
2288
2289static inline bool arm_singlestep_active(CPUARMState *env)
2290{
2291 return extract32(env->cp15.mdscr_el1, 0, 1)
2292 && arm_el_is_aa64(env, arm_debug_target_el(env))
2293 && arm_generate_debug_exceptions(env);
2294}
2295
2296static inline bool arm_sctlr_b(CPUARMState *env)
2297{
2298 return
2299
2300
2301
2302
2303#ifndef CONFIG_USER_ONLY
2304 !arm_feature(env, ARM_FEATURE_V7) &&
2305#endif
2306 (env->cp15.sctlr_el[1] & SCTLR_B) != 0;
2307}
2308
2309
2310static inline bool arm_cpu_data_is_big_endian(CPUARMState *env)
2311{
2312 int cur_el;
2313
2314
2315 if (!is_a64(env)) {
2316 return
2317#ifdef CONFIG_USER_ONLY
2318
2319
2320
2321
2322
2323
2324
2325
2326
2327
2328
2329 arm_sctlr_b(env) ||
2330#endif
2331 ((env->uncached_cpsr & CPSR_E) ? 1 : 0);
2332 }
2333
2334 cur_el = arm_current_el(env);
2335
2336 if (cur_el == 0) {
2337 return (env->cp15.sctlr_el[1] & SCTLR_E0E) != 0;
2338 }
2339
2340 return (env->cp15.sctlr_el[cur_el] & SCTLR_EE) != 0;
2341}
2342
2343#include "exec/cpu-all.h"
2344
2345
2346
2347
2348
2349
2350#define ARM_TBFLAG_AARCH64_STATE_SHIFT 31
2351#define ARM_TBFLAG_AARCH64_STATE_MASK (1U << ARM_TBFLAG_AARCH64_STATE_SHIFT)
2352#define ARM_TBFLAG_MMUIDX_SHIFT 28
2353#define ARM_TBFLAG_MMUIDX_MASK (0x7 << ARM_TBFLAG_MMUIDX_SHIFT)
2354#define ARM_TBFLAG_SS_ACTIVE_SHIFT 27
2355#define ARM_TBFLAG_SS_ACTIVE_MASK (1 << ARM_TBFLAG_SS_ACTIVE_SHIFT)
2356#define ARM_TBFLAG_PSTATE_SS_SHIFT 26
2357#define ARM_TBFLAG_PSTATE_SS_MASK (1 << ARM_TBFLAG_PSTATE_SS_SHIFT)
2358
2359#define ARM_TBFLAG_FPEXC_EL_SHIFT 24
2360#define ARM_TBFLAG_FPEXC_EL_MASK (0x3 << ARM_TBFLAG_FPEXC_EL_SHIFT)
2361
2362
2363#define ARM_TBFLAG_THUMB_SHIFT 0
2364#define ARM_TBFLAG_THUMB_MASK (1 << ARM_TBFLAG_THUMB_SHIFT)
2365#define ARM_TBFLAG_VECLEN_SHIFT 1
2366#define ARM_TBFLAG_VECLEN_MASK (0x7 << ARM_TBFLAG_VECLEN_SHIFT)
2367#define ARM_TBFLAG_VECSTRIDE_SHIFT 4
2368#define ARM_TBFLAG_VECSTRIDE_MASK (0x3 << ARM_TBFLAG_VECSTRIDE_SHIFT)
2369#define ARM_TBFLAG_VFPEN_SHIFT 7
2370#define ARM_TBFLAG_VFPEN_MASK (1 << ARM_TBFLAG_VFPEN_SHIFT)
2371#define ARM_TBFLAG_CONDEXEC_SHIFT 8
2372#define ARM_TBFLAG_CONDEXEC_MASK (0xff << ARM_TBFLAG_CONDEXEC_SHIFT)
2373#define ARM_TBFLAG_SCTLR_B_SHIFT 16
2374#define ARM_TBFLAG_SCTLR_B_MASK (1 << ARM_TBFLAG_SCTLR_B_SHIFT)
2375
2376
2377
2378#define ARM_TBFLAG_XSCALE_CPAR_SHIFT 17
2379#define ARM_TBFLAG_XSCALE_CPAR_MASK (3 << ARM_TBFLAG_XSCALE_CPAR_SHIFT)
2380
2381
2382
2383
2384#define ARM_TBFLAG_NS_SHIFT 19
2385#define ARM_TBFLAG_NS_MASK (1 << ARM_TBFLAG_NS_SHIFT)
2386#define ARM_TBFLAG_BE_DATA_SHIFT 20
2387#define ARM_TBFLAG_BE_DATA_MASK (1 << ARM_TBFLAG_BE_DATA_SHIFT)
2388
2389#define ARM_TBFLAG_HANDLER_SHIFT 21
2390#define ARM_TBFLAG_HANDLER_MASK (1 << ARM_TBFLAG_HANDLER_SHIFT)
2391
2392
2393#define ARM_TBFLAG_TBI0_SHIFT 0
2394#define ARM_TBFLAG_TBI0_MASK (0x1ull << ARM_TBFLAG_TBI0_SHIFT)
2395#define ARM_TBFLAG_TBI1_SHIFT 1
2396#define ARM_TBFLAG_TBI1_MASK (0x1ull << ARM_TBFLAG_TBI1_SHIFT)
2397
2398
2399#define ARM_TBFLAG_AARCH64_STATE(F) \
2400 (((F) & ARM_TBFLAG_AARCH64_STATE_MASK) >> ARM_TBFLAG_AARCH64_STATE_SHIFT)
2401#define ARM_TBFLAG_MMUIDX(F) \
2402 (((F) & ARM_TBFLAG_MMUIDX_MASK) >> ARM_TBFLAG_MMUIDX_SHIFT)
2403#define ARM_TBFLAG_SS_ACTIVE(F) \
2404 (((F) & ARM_TBFLAG_SS_ACTIVE_MASK) >> ARM_TBFLAG_SS_ACTIVE_SHIFT)
2405#define ARM_TBFLAG_PSTATE_SS(F) \
2406 (((F) & ARM_TBFLAG_PSTATE_SS_MASK) >> ARM_TBFLAG_PSTATE_SS_SHIFT)
2407#define ARM_TBFLAG_FPEXC_EL(F) \
2408 (((F) & ARM_TBFLAG_FPEXC_EL_MASK) >> ARM_TBFLAG_FPEXC_EL_SHIFT)
2409#define ARM_TBFLAG_THUMB(F) \
2410 (((F) & ARM_TBFLAG_THUMB_MASK) >> ARM_TBFLAG_THUMB_SHIFT)
2411#define ARM_TBFLAG_VECLEN(F) \
2412 (((F) & ARM_TBFLAG_VECLEN_MASK) >> ARM_TBFLAG_VECLEN_SHIFT)
2413#define ARM_TBFLAG_VECSTRIDE(F) \
2414 (((F) & ARM_TBFLAG_VECSTRIDE_MASK) >> ARM_TBFLAG_VECSTRIDE_SHIFT)
2415#define ARM_TBFLAG_VFPEN(F) \
2416 (((F) & ARM_TBFLAG_VFPEN_MASK) >> ARM_TBFLAG_VFPEN_SHIFT)
2417#define ARM_TBFLAG_CONDEXEC(F) \
2418 (((F) & ARM_TBFLAG_CONDEXEC_MASK) >> ARM_TBFLAG_CONDEXEC_SHIFT)
2419#define ARM_TBFLAG_SCTLR_B(F) \
2420 (((F) & ARM_TBFLAG_SCTLR_B_MASK) >> ARM_TBFLAG_SCTLR_B_SHIFT)
2421#define ARM_TBFLAG_XSCALE_CPAR(F) \
2422 (((F) & ARM_TBFLAG_XSCALE_CPAR_MASK) >> ARM_TBFLAG_XSCALE_CPAR_SHIFT)
2423#define ARM_TBFLAG_NS(F) \
2424 (((F) & ARM_TBFLAG_NS_MASK) >> ARM_TBFLAG_NS_SHIFT)
2425#define ARM_TBFLAG_BE_DATA(F) \
2426 (((F) & ARM_TBFLAG_BE_DATA_MASK) >> ARM_TBFLAG_BE_DATA_SHIFT)
2427#define ARM_TBFLAG_HANDLER(F) \
2428 (((F) & ARM_TBFLAG_HANDLER_MASK) >> ARM_TBFLAG_HANDLER_SHIFT)
2429#define ARM_TBFLAG_TBI0(F) \
2430 (((F) & ARM_TBFLAG_TBI0_MASK) >> ARM_TBFLAG_TBI0_SHIFT)
2431#define ARM_TBFLAG_TBI1(F) \
2432 (((F) & ARM_TBFLAG_TBI1_MASK) >> ARM_TBFLAG_TBI1_SHIFT)
2433
2434static inline bool bswap_code(bool sctlr_b)
2435{
2436#ifdef CONFIG_USER_ONLY
2437
2438
2439
2440
2441 return
2442#ifdef TARGET_WORDS_BIGENDIAN
2443 1 ^
2444#endif
2445 sctlr_b;
2446#else
2447
2448
2449
2450 return 0;
2451#endif
2452}
2453
2454
2455
2456
2457static inline int fp_exception_el(CPUARMState *env)
2458{
2459 int fpen;
2460 int cur_el = arm_current_el(env);
2461
2462
2463
2464
2465 if (!arm_feature(env, ARM_FEATURE_V6)) {
2466 return 0;
2467 }
2468
2469
2470
2471
2472
2473
2474 fpen = extract32(env->cp15.cpacr_el1, 20, 2);
2475 switch (fpen) {
2476 case 0:
2477 case 2:
2478 if (cur_el == 0 || cur_el == 1) {
2479
2480 if (arm_is_secure(env) && !arm_el_is_aa64(env, 3)) {
2481 return 3;
2482 }
2483 return 1;
2484 }
2485 if (cur_el == 3 && !is_a64(env)) {
2486
2487 return 3;
2488 }
2489 break;
2490 case 1:
2491 if (cur_el == 0) {
2492 return 1;
2493 }
2494 break;
2495 case 3:
2496 break;
2497 }
2498
2499
2500
2501
2502
2503
2504 if (cur_el <= 2 && extract32(env->cp15.cptr_el[2], 10, 1)
2505 && !arm_is_secure_below_el3(env)) {
2506
2507 return 2;
2508 }
2509
2510
2511 if (extract32(env->cp15.cptr_el[3], 10, 1)) {
2512
2513 return 3;
2514 }
2515
2516 return 0;
2517}
2518
2519#ifdef CONFIG_USER_ONLY
2520static inline bool arm_cpu_bswap_data(CPUARMState *env)
2521{
2522 return
2523#ifdef TARGET_WORDS_BIGENDIAN
2524 1 ^
2525#endif
2526 arm_cpu_data_is_big_endian(env);
2527}
2528#endif
2529
2530#ifndef CONFIG_USER_ONLY
2531
2532
2533
2534
2535
2536
2537
2538
2539
2540uint32_t arm_regime_tbi0(CPUARMState *env, ARMMMUIdx mmu_idx);
2541
2542
2543
2544
2545
2546
2547
2548
2549
2550
2551uint32_t arm_regime_tbi1(CPUARMState *env, ARMMMUIdx mmu_idx);
2552#else
2553
2554static inline uint32_t arm_regime_tbi0(CPUARMState *env, ARMMMUIdx mmu_idx)
2555{
2556 return 0;
2557}
2558
2559static inline uint32_t arm_regime_tbi1(CPUARMState *env, ARMMMUIdx mmu_idx)
2560{
2561 return 0;
2562}
2563#endif
2564
2565static inline void cpu_get_tb_cpu_state(CPUARMState *env, target_ulong *pc,
2566 target_ulong *cs_base, uint32_t *flags)
2567{
2568 ARMMMUIdx mmu_idx = core_to_arm_mmu_idx(env, cpu_mmu_index(env, false));
2569 if (is_a64(env)) {
2570 *pc = env->pc;
2571 *flags = ARM_TBFLAG_AARCH64_STATE_MASK;
2572
2573 *flags |= (arm_regime_tbi0(env, mmu_idx) << ARM_TBFLAG_TBI0_SHIFT);
2574 *flags |= (arm_regime_tbi1(env, mmu_idx) << ARM_TBFLAG_TBI1_SHIFT);
2575 } else {
2576 *pc = env->regs[15];
2577 *flags = (env->thumb << ARM_TBFLAG_THUMB_SHIFT)
2578 | (env->vfp.vec_len << ARM_TBFLAG_VECLEN_SHIFT)
2579 | (env->vfp.vec_stride << ARM_TBFLAG_VECSTRIDE_SHIFT)
2580 | (env->condexec_bits << ARM_TBFLAG_CONDEXEC_SHIFT)
2581 | (arm_sctlr_b(env) << ARM_TBFLAG_SCTLR_B_SHIFT);
2582 if (!(access_secure_reg(env))) {
2583 *flags |= ARM_TBFLAG_NS_MASK;
2584 }
2585 if (env->vfp.xregs[ARM_VFP_FPEXC] & (1 << 30)
2586 || arm_el_is_aa64(env, 1)) {
2587 *flags |= ARM_TBFLAG_VFPEN_MASK;
2588 }
2589 *flags |= (extract32(env->cp15.c15_cpar, 0, 2)
2590 << ARM_TBFLAG_XSCALE_CPAR_SHIFT);
2591 }
2592
2593 *flags |= (arm_to_core_mmu_idx(mmu_idx) << ARM_TBFLAG_MMUIDX_SHIFT);
2594
2595
2596
2597
2598
2599
2600
2601
2602 if (arm_singlestep_active(env)) {
2603 *flags |= ARM_TBFLAG_SS_ACTIVE_MASK;
2604 if (is_a64(env)) {
2605 if (env->pstate & PSTATE_SS) {
2606 *flags |= ARM_TBFLAG_PSTATE_SS_MASK;
2607 }
2608 } else {
2609 if (env->uncached_cpsr & PSTATE_SS) {
2610 *flags |= ARM_TBFLAG_PSTATE_SS_MASK;
2611 }
2612 }
2613 }
2614 if (arm_cpu_data_is_big_endian(env)) {
2615 *flags |= ARM_TBFLAG_BE_DATA_MASK;
2616 }
2617 *flags |= fp_exception_el(env) << ARM_TBFLAG_FPEXC_EL_SHIFT;
2618
2619 if (env->v7m.exception != 0) {
2620 *flags |= ARM_TBFLAG_HANDLER_MASK;
2621 }
2622
2623 *cs_base = 0;
2624}
2625
2626enum {
2627 QEMU_PSCI_CONDUIT_DISABLED = 0,
2628 QEMU_PSCI_CONDUIT_SMC = 1,
2629 QEMU_PSCI_CONDUIT_HVC = 2,
2630};
2631
2632#ifndef CONFIG_USER_ONLY
2633
2634static inline int arm_asidx_from_attrs(CPUState *cs, MemTxAttrs attrs)
2635{
2636 return attrs.secure ? ARMASIdx_S : ARMASIdx_NS;
2637}
2638
2639
2640
2641
2642
2643static inline AddressSpace *arm_addressspace(CPUState *cs, MemTxAttrs attrs)
2644{
2645 return cpu_get_address_space(cs, arm_asidx_from_attrs(cs, attrs));
2646}
2647#endif
2648
2649
2650
2651
2652
2653
2654
2655
2656
2657
2658
2659
2660void arm_register_el_change_hook(ARMCPU *cpu, ARMELChangeHook *hook,
2661 void *opaque);
2662
2663
2664
2665
2666
2667
2668static inline void *arm_get_el_change_hook_opaque(ARMCPU *cpu)
2669{
2670 return cpu->el_change_hook_opaque;
2671}
2672
2673#endif
2674