9 #include <linux/compiler.h>
10 #include <linux/irqchip/arm-gic-v3.h>
11 #include <linux/kvm_host.h>
13 #include <asm/kvm_emulate.h>
14 #include <asm/kvm_hyp.h>
15 #include <asm/kvm_mmu.h>
17 #define vtr_to_max_lr_idx(v) ((v) & 0xf)
18 #define vtr_to_nr_pre_bits(v) ((((u32)(v) >> 26) & 7) + 1)
19 #define vtr_to_nr_apr_regs(v) (1 << (vtr_to_nr_pre_bits(v) - 5))
25 return read_gicreg(ICH_LR0_EL2);
27 return read_gicreg(ICH_LR1_EL2);
29 return read_gicreg(ICH_LR2_EL2);
31 return read_gicreg(ICH_LR3_EL2);
33 return read_gicreg(ICH_LR4_EL2);
35 return read_gicreg(ICH_LR5_EL2);
37 return read_gicreg(ICH_LR6_EL2);
39 return read_gicreg(ICH_LR7_EL2);
41 return read_gicreg(ICH_LR8_EL2);
43 return read_gicreg(ICH_LR9_EL2);
45 return read_gicreg(ICH_LR10_EL2);
47 return read_gicreg(ICH_LR11_EL2);
49 return read_gicreg(ICH_LR12_EL2);
51 return read_gicreg(ICH_LR13_EL2);
53 return read_gicreg(ICH_LR14_EL2);
55 return read_gicreg(ICH_LR15_EL2);
65 write_gicreg(val, ICH_LR0_EL2);
68 write_gicreg(val, ICH_LR1_EL2);
71 write_gicreg(val, ICH_LR2_EL2);
74 write_gicreg(val, ICH_LR3_EL2);
77 write_gicreg(val, ICH_LR4_EL2);
80 write_gicreg(val, ICH_LR5_EL2);
83 write_gicreg(val, ICH_LR6_EL2);
86 write_gicreg(val, ICH_LR7_EL2);
89 write_gicreg(val, ICH_LR8_EL2);
92 write_gicreg(val, ICH_LR9_EL2);
95 write_gicreg(val, ICH_LR10_EL2);
98 write_gicreg(val, ICH_LR11_EL2);
101 write_gicreg(val, ICH_LR12_EL2);
104 write_gicreg(val, ICH_LR13_EL2);
107 write_gicreg(val, ICH_LR14_EL2);
110 write_gicreg(val, ICH_LR15_EL2);
119 write_gicreg(val, ICH_AP0R0_EL2);
122 write_gicreg(val, ICH_AP0R1_EL2);
125 write_gicreg(val, ICH_AP0R2_EL2);
128 write_gicreg(val, ICH_AP0R3_EL2);
137 write_gicreg(val, ICH_AP1R0_EL2);
140 write_gicreg(val, ICH_AP1R1_EL2);
143 write_gicreg(val, ICH_AP1R2_EL2);
146 write_gicreg(val, ICH_AP1R3_EL2);
157 val = read_gicreg(ICH_AP0R0_EL2);
160 val = read_gicreg(ICH_AP0R1_EL2);
163 val = read_gicreg(ICH_AP0R2_EL2);
166 val = read_gicreg(ICH_AP0R3_EL2);
181 val = read_gicreg(ICH_AP1R0_EL2);
184 val = read_gicreg(ICH_AP1R1_EL2);
187 val = read_gicreg(ICH_AP1R2_EL2);
190 val = read_gicreg(ICH_AP1R3_EL2);
208 if (used_lrs || !has_vhe()) {
215 if (used_lrs || cpu_if->
its_vpe.its_vm) {
219 elrsr = read_gicreg(ICH_ELRSR_EL2);
221 write_gicreg(cpu_if->
vgic_hcr & ~ICH_HCR_EN, ICH_HCR_EL2);
223 for (i = 0; i < used_lrs; i++) {
224 if (elrsr & (1 << i))
225 cpu_if->
vgic_lr[i] &= ~ICH_LR_STATE;
239 if (used_lrs || cpu_if->
its_vpe.its_vm) {
240 write_gicreg(cpu_if->
vgic_hcr, ICH_HCR_EL2);
242 for (i = 0; i < used_lrs; i++)
252 if (used_lrs || !has_vhe()) {
273 write_gicreg(0, ICC_SRE_EL1);
275 write_gicreg(cpu_if->
vgic_vmcr, ICH_VMCR_EL2);
294 write_gicreg(read_gicreg(ICC_SRE_EL2) & ~ICC_SRE_EL2_ENABLE,
304 write_gicreg(cpu_if->
vgic_hcr, ICH_HCR_EL2);
312 cpu_if->
vgic_vmcr = read_gicreg(ICH_VMCR_EL2);
315 val = read_gicreg(ICC_SRE_EL2);
316 write_gicreg(val | ICC_SRE_EL2_ENABLE, ICC_SRE_EL2);
321 write_gicreg(1, ICC_SRE_EL1);
330 write_gicreg(0, ICH_HCR_EL2);
338 val = read_gicreg(ICH_VTR_EL2);
341 switch (nr_pre_bits) {
353 switch (nr_pre_bits) {
371 val = read_gicreg(ICH_VTR_EL2);
374 switch (nr_pre_bits) {
386 switch (nr_pre_bits) {
404 for (i = 0; i <= max_lr_idx; i++)
416 u64 val, sre = read_gicreg(ICC_SRE_EL1);
417 unsigned long flags = 0;
430 flags = local_daif_save();
437 sysreg_clear_set(hcr_el2, 0, HCR_AMO | HCR_FMO | HCR_IMO);
439 write_gicreg(0, ICC_SRE_EL1);
442 val = read_gicreg(ICC_SRE_EL1);
444 write_gicreg(sre, ICC_SRE_EL1);
446 sysreg_clear_set(hcr_el2, HCR_AMO | HCR_FMO | HCR_IMO, 0);
450 local_daif_restore(flags);
452 val = (val & ICC_SRE_EL1_SRE) ? 0 : (1ULL << 63);
453 val |= read_gicreg(ICH_VTR_EL2);
460 return read_gicreg(ICH_VMCR_EL2);
465 write_gicreg(vmcr, ICH_VMCR_EL2);
476 u64 esr = kvm_vcpu_get_esr(vcpu);
477 u8 crm = (esr & ESR_ELx_SYS64_ISS_CRM_MASK) >> ESR_ELx_SYS64_ISS_CRM_SHIFT;
482 #define GICv3_IDLE_PRIORITY 0xff
487 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
491 for (i = 0; i < used_lrs; i++) {
493 u8 lr_prio = (val & ICH_LR_PRIORITY_MASK) >> ICH_LR_PRIORITY_SHIFT;
496 if ((val & ICH_LR_STATE) != ICH_LR_PENDING_BIT)
500 if (!(val & ICH_LR_GROUP) && !(vmcr & ICH_VMCR_ENG0_MASK))
504 if ((val & ICH_LR_GROUP) && !(vmcr & ICH_VMCR_ENG1_MASK))
508 if (lr_prio >= priority)
518 *lr_val = ICC_IAR1_EL1_SPURIOUS;
526 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs;
529 for (i = 0; i < used_lrs; i++) {
532 if ((val & ICH_LR_VIRTUAL_ID_MASK) == intid &&
533 (val & ICH_LR_ACTIVE_BIT)) {
539 *lr_val = ICC_IAR1_EL1_SPURIOUS;
549 for (i = 0; i < nr_apr_regs; i++) {
577 return (vmcr & ICH_VMCR_BPR0_MASK) >> ICH_VMCR_BPR0_SHIFT;
584 if (vmcr & ICH_VMCR_CBPR_MASK) {
589 bpr = (vmcr & ICH_VMCR_BPR1_MASK) >> ICH_VMCR_BPR1_SHIFT;
608 return pri & (GENMASK(7, 0) << bpr);
642 for (i = 0; i < nr_apr_regs; i++) {
653 c0 = ap0 ? __ffs(ap0) : 32;
654 c1 = ap1 ? __ffs(ap1) : 32;
686 if (grp != !!(lr_val & ICH_LR_GROUP))
689 pmr = (vmcr & ICH_VMCR_PMR_MASK) >> ICH_VMCR_PMR_SHIFT;
690 lr_prio = (lr_val & ICH_LR_PRIORITY_MASK) >> ICH_LR_PRIORITY_SHIFT;
697 lr_val &= ~ICH_LR_STATE;
698 lr_val |= ICH_LR_ACTIVE_BIT;
701 vcpu_set_reg(vcpu, rt, lr_val & ICH_LR_VIRTUAL_ID_MASK);
705 vcpu_set_reg(vcpu, rt, ICC_IAR1_EL1_SPURIOUS);
710 lr_val &= ~ICH_LR_ACTIVE_BIT;
711 if (lr_val & ICH_LR_HW) {
714 pid = (lr_val & ICH_LR_PHYS_ID_MASK) >> ICH_LR_PHYS_ID_SHIFT;
725 hcr = read_gicreg(ICH_HCR_EL2);
726 hcr += 1 << ICH_HCR_EOIcount_SHIFT;
727 write_gicreg(hcr, ICH_HCR_EL2);
732 u32 vid = vcpu_get_reg(vcpu, rt);
737 if (!(vmcr & ICH_VMCR_EOIM_MASK))
755 u32 vid = vcpu_get_reg(vcpu, rt);
757 u8 lr_prio, act_prio;
774 if ((vmcr & ICH_VMCR_EOIM_MASK) && !(vid >=
VGIC_MIN_LPI))
777 lr_prio = (lr_val & ICH_LR_PRIORITY_MASK) >> ICH_LR_PRIORITY_SHIFT;
780 if (grp != !!(lr_val & ICH_LR_GROUP) ||
790 vcpu_set_reg(vcpu, rt, !!(vmcr & ICH_VMCR_ENG0_MASK));
795 vcpu_set_reg(vcpu, rt, !!(vmcr & ICH_VMCR_ENG1_MASK));
800 u64 val = vcpu_get_reg(vcpu, rt);
803 vmcr |= ICH_VMCR_ENG0_MASK;
805 vmcr &= ~ICH_VMCR_ENG0_MASK;
812 u64 val = vcpu_get_reg(vcpu, rt);
815 vmcr |= ICH_VMCR_ENG1_MASK;
817 vmcr &= ~ICH_VMCR_ENG1_MASK;
834 u64 val = vcpu_get_reg(vcpu, rt);
841 val <<= ICH_VMCR_BPR0_SHIFT;
842 val &= ICH_VMCR_BPR0_MASK;
843 vmcr &= ~ICH_VMCR_BPR0_MASK;
851 u64 val = vcpu_get_reg(vcpu, rt);
854 if (vmcr & ICH_VMCR_CBPR_MASK)
861 val <<= ICH_VMCR_BPR1_SHIFT;
862 val &= ICH_VMCR_BPR1_MASK;
863 vmcr &= ~ICH_VMCR_BPR1_MASK;
878 vcpu_set_reg(vcpu, rt, val);
883 u32 val = vcpu_get_reg(vcpu, rt);
944 lr_grp = !!(lr_val & ICH_LR_GROUP);
946 lr_val = ICC_IAR1_EL1_SPURIOUS;
949 vcpu_set_reg(vcpu, rt, lr_val & ICH_LR_VIRTUAL_ID_MASK);
954 vmcr &= ICH_VMCR_PMR_MASK;
955 vmcr >>= ICH_VMCR_PMR_SHIFT;
956 vcpu_set_reg(vcpu, rt, vmcr);
961 u32 val = vcpu_get_reg(vcpu, rt);
963 val <<= ICH_VMCR_PMR_SHIFT;
964 val &= ICH_VMCR_PMR_MASK;
965 vmcr &= ~ICH_VMCR_PMR_MASK;
968 write_gicreg(vmcr, ICH_VMCR_EL2);
974 vcpu_set_reg(vcpu, rt, val);
981 vtr = read_gicreg(ICH_VTR_EL2);
983 val = ((vtr >> 29) & 7) << ICC_CTLR_EL1_PRI_BITS_SHIFT;
985 val |= ((vtr >> 23) & 7) << ICC_CTLR_EL1_ID_BITS_SHIFT;
988 val |= BIT(ICC_CTLR_EL1_SEIS_SHIFT);
990 val |= ((vtr >> 21) & 1) << ICC_CTLR_EL1_A3V_SHIFT;
992 val |= ((vmcr & ICH_VMCR_EOIM_MASK) >> ICH_VMCR_EOIM_SHIFT) << ICC_CTLR_EL1_EOImode_SHIFT;
994 val |= (vmcr & ICH_VMCR_CBPR_MASK) >> ICH_VMCR_CBPR_SHIFT;
996 vcpu_set_reg(vcpu, rt, val);
1001 u32 val = vcpu_get_reg(vcpu, rt);
1003 if (val & ICC_CTLR_EL1_CBPR_MASK)
1004 vmcr |= ICH_VMCR_CBPR_MASK;
1006 vmcr &= ~ICH_VMCR_CBPR_MASK;
1008 if (val & ICC_CTLR_EL1_EOImode_MASK)
1009 vmcr |= ICH_VMCR_EOIM_MASK;
1011 vmcr &= ~ICH_VMCR_EOIM_MASK;
1013 write_gicreg(vmcr, ICH_VMCR_EL2);
1021 void (*fn)(
struct kvm_vcpu *, u32, int);
1025 esr = kvm_vcpu_get_esr(vcpu);
1026 if (vcpu_mode_is_32bit(vcpu)) {
1027 if (!kvm_condition_valid(vcpu)) {
1032 sysreg = esr_cp15_to_sysreg(esr);
1034 sysreg = esr_sys64_to_sysreg(esr);
1037 is_read = (esr & ESR_ELx_SYS64_ISS_DIR_MASK) == ESR_ELx_SYS64_ISS_DIR_READ;
1040 case SYS_ICC_IAR0_EL1:
1041 case SYS_ICC_IAR1_EL1:
1042 if (unlikely(!is_read))
1046 case SYS_ICC_EOIR0_EL1:
1047 case SYS_ICC_EOIR1_EL1:
1048 if (unlikely(is_read))
1052 case SYS_ICC_IGRPEN1_EL1:
1058 case SYS_ICC_BPR1_EL1:
1064 case SYS_ICC_AP0Rn_EL1(0):
1065 case SYS_ICC_AP1Rn_EL1(0):
1071 case SYS_ICC_AP0Rn_EL1(1):
1072 case SYS_ICC_AP1Rn_EL1(1):
1078 case SYS_ICC_AP0Rn_EL1(2):
1079 case SYS_ICC_AP1Rn_EL1(2):
1085 case SYS_ICC_AP0Rn_EL1(3):
1086 case SYS_ICC_AP1Rn_EL1(3):
1092 case SYS_ICC_HPPIR0_EL1:
1093 case SYS_ICC_HPPIR1_EL1:
1094 if (unlikely(!is_read))
1098 case SYS_ICC_IGRPEN0_EL1:
1104 case SYS_ICC_BPR0_EL1:
1110 case SYS_ICC_DIR_EL1:
1111 if (unlikely(is_read))
1115 case SYS_ICC_RPR_EL1:
1116 if (unlikely(!is_read))
1120 case SYS_ICC_CTLR_EL1:
1126 case SYS_ICC_PMR_EL1:
1137 rt = kvm_vcpu_sys_get_rt(vcpu);
static void __kvm_skip_instr(struct kvm_vcpu *vcpu)
struct vgic_global kvm_vgic_global_state
struct static_key_false vgic_v3_cpuif_trap
u64 vgic_lr[VGIC_V3_MAX_LRS]
static void __vgic_v3_read_apxr3(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_apxr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_read_apxr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_apxr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_igrpen1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static u32 __vgic_v3_read_ap1rn(int n)
static void __vgic_v3_read_pmr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static unsigned int __vgic_v3_get_bpr0(u32 vmcr)
static void __gic_v3_set_lr(u64 val, int lr)
#define GICv3_IDLE_PRIORITY
static void __vgic_v3_read_rpr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
void __vgic_v3_restore_aprs(struct vgic_v3_cpu_if *cpu_if)
#define vtr_to_nr_pre_bits(v)
static void __vgic_v3_set_active_priority(u8 pri, u32 vmcr, int grp)
#define vtr_to_nr_apr_regs(v)
static u32 __vgic_v3_read_ap0rn(int n)
static void __vgic_v3_write_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_pmr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
int __vgic_v3_perform_cpuif_access(struct kvm_vcpu *vcpu)
static void __vgic_v3_read_bpr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_apxrn(struct kvm_vcpu *vcpu, int rt, int n)
static void __vgic_v3_read_apxrn(struct kvm_vcpu *vcpu, int rt, int n)
static void __vgic_v3_read_iar(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static int __vgic_v3_find_active_lr(struct kvm_vcpu *vcpu, int intid, u64 *lr_val)
static int __vgic_v3_get_highest_active_priority(void)
void __vgic_v3_init_lrs(void)
static void __vgic_v3_write_ap0rn(u32 val, int n)
static void __vgic_v3_write_dir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
u64 __vgic_v3_read_vmcr(void)
static u8 __vgic_v3_pri_to_pre(u8 pri, u32 vmcr, int grp)
static unsigned int __vgic_v3_get_bpr1(u32 vmcr)
void __vgic_v3_restore_state(struct vgic_v3_cpu_if *cpu_if)
static void __vgic_v3_write_bpr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
void __vgic_v3_save_aprs(struct vgic_v3_cpu_if *cpu_if)
void __vgic_v3_write_vmcr(u32 vmcr)
static void __vgic_v3_write_ctlr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_read_bpr1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_ap1rn(u32 val, int n)
static void __vgic_v3_write_eoir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_read_igrpen1(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static int __vgic_v3_clear_highest_active_priority(void)
void __vgic_v3_save_state(struct vgic_v3_cpu_if *cpu_if)
static int __vgic_v3_highest_priority_lr(struct kvm_vcpu *vcpu, u32 vmcr, u64 *lr_val)
static void __vgic_v3_write_apxr3(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_igrpen0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
u64 __vgic_v3_get_gic_config(void)
void __vgic_v3_deactivate_traps(struct vgic_v3_cpu_if *cpu_if)
static void __vgic_v3_read_igrpen0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_read_hppir(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static u64 __gic_v3_get_lr(unsigned int lr)
static int __vgic_v3_get_group(struct kvm_vcpu *vcpu)
void __vgic_v3_activate_traps(struct vgic_v3_cpu_if *cpu_if)
static void __vgic_v3_clear_active_lr(int lr, u64 lr_val)
#define vtr_to_max_lr_idx(v)
static void __vgic_v3_read_apxr0(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_bump_eoicount(void)
static void __vgic_v3_read_ctlr(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static void __vgic_v3_write_apxr2(struct kvm_vcpu *vcpu, u32 vmcr, int rt)
static int __vgic_v3_bpr_min(void)
static void __vgic_v3_read_apxr2(struct kvm_vcpu *vcpu, u32 vmcr, int rt)