2 * Copyright (C) 2012-2015 - ARM Ltd
3 * Author: Marc Zyngier <marc.zyngier@arm.com>
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License version 2 as
7 * published by the Free Software Foundation.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program. If not, see <http://www.gnu.org/licenses/>.
18 #include <linux/compiler.h>
19 #include <linux/irqchip/arm-gic.h>
20 #include <linux/kvm_host.h>
22 #include <asm/kvm_hyp.h>
24 #ifdef CONFIG_KVM_NEW_VGIC
25 extern struct vgic_global kvm_vgic_global_state
;
26 #define vgic_v2_params kvm_vgic_global_state
28 extern struct vgic_params vgic_v2_params
;
31 static void __hyp_text
save_maint_int_state(struct kvm_vcpu
*vcpu
,
34 struct vgic_v2_cpu_if
*cpu_if
= &vcpu
->arch
.vgic_cpu
.vgic_v2
;
35 int nr_lr
= (kern_hyp_va(&vgic_v2_params
))->nr_lr
;
40 expect_mi
= !!(cpu_if
->vgic_hcr
& GICH_HCR_UIE
);
42 for (i
= 0; i
< nr_lr
; i
++) {
43 if (!(vcpu
->arch
.vgic_cpu
.live_lrs
& (1UL << i
)))
46 expect_mi
|= (!(cpu_if
->vgic_lr
[i
] & GICH_LR_HW
) &&
47 (cpu_if
->vgic_lr
[i
] & GICH_LR_EOI
));
51 cpu_if
->vgic_misr
= readl_relaxed(base
+ GICH_MISR
);
53 if (cpu_if
->vgic_misr
& GICH_MISR_EOI
) {
54 eisr0
= readl_relaxed(base
+ GICH_EISR0
);
55 if (unlikely(nr_lr
> 32))
56 eisr1
= readl_relaxed(base
+ GICH_EISR1
);
63 cpu_if
->vgic_misr
= 0;
67 #ifdef CONFIG_CPU_BIG_ENDIAN
68 cpu_if
->vgic_eisr
= ((u64
)eisr0
<< 32) | eisr1
;
70 cpu_if
->vgic_eisr
= ((u64
)eisr1
<< 32) | eisr0
;
74 static void __hyp_text
save_elrsr(struct kvm_vcpu
*vcpu
, void __iomem
*base
)
76 struct vgic_v2_cpu_if
*cpu_if
= &vcpu
->arch
.vgic_cpu
.vgic_v2
;
77 int nr_lr
= (kern_hyp_va(&vgic_v2_params
))->nr_lr
;
80 elrsr0
= readl_relaxed(base
+ GICH_ELRSR0
);
81 if (unlikely(nr_lr
> 32))
82 elrsr1
= readl_relaxed(base
+ GICH_ELRSR1
);
86 #ifdef CONFIG_CPU_BIG_ENDIAN
87 cpu_if
->vgic_elrsr
= ((u64
)elrsr0
<< 32) | elrsr1
;
89 cpu_if
->vgic_elrsr
= ((u64
)elrsr1
<< 32) | elrsr0
;
93 static void __hyp_text
save_lrs(struct kvm_vcpu
*vcpu
, void __iomem
*base
)
95 struct vgic_v2_cpu_if
*cpu_if
= &vcpu
->arch
.vgic_cpu
.vgic_v2
;
96 int nr_lr
= (kern_hyp_va(&vgic_v2_params
))->nr_lr
;
99 for (i
= 0; i
< nr_lr
; i
++) {
100 if (!(vcpu
->arch
.vgic_cpu
.live_lrs
& (1UL << i
)))
103 if (cpu_if
->vgic_elrsr
& (1UL << i
))
104 cpu_if
->vgic_lr
[i
] &= ~GICH_LR_STATE
;
106 cpu_if
->vgic_lr
[i
] = readl_relaxed(base
+ GICH_LR0
+ (i
* 4));
108 writel_relaxed(0, base
+ GICH_LR0
+ (i
* 4));
112 /* vcpu is already in the HYP VA space */
113 void __hyp_text
__vgic_v2_save_state(struct kvm_vcpu
*vcpu
)
115 struct kvm
*kvm
= kern_hyp_va(vcpu
->kvm
);
116 struct vgic_v2_cpu_if
*cpu_if
= &vcpu
->arch
.vgic_cpu
.vgic_v2
;
117 struct vgic_dist
*vgic
= &kvm
->arch
.vgic
;
118 void __iomem
*base
= kern_hyp_va(vgic
->vctrl_base
);
123 cpu_if
->vgic_vmcr
= readl_relaxed(base
+ GICH_VMCR
);
125 if (vcpu
->arch
.vgic_cpu
.live_lrs
) {
126 cpu_if
->vgic_apr
= readl_relaxed(base
+ GICH_APR
);
128 save_maint_int_state(vcpu
, base
);
129 save_elrsr(vcpu
, base
);
130 save_lrs(vcpu
, base
);
132 writel_relaxed(0, base
+ GICH_HCR
);
134 vcpu
->arch
.vgic_cpu
.live_lrs
= 0;
136 cpu_if
->vgic_eisr
= 0;
137 cpu_if
->vgic_elrsr
= ~0UL;
138 cpu_if
->vgic_misr
= 0;
139 cpu_if
->vgic_apr
= 0;
143 /* vcpu is already in the HYP VA space */
144 void __hyp_text
__vgic_v2_restore_state(struct kvm_vcpu
*vcpu
)
146 struct kvm
*kvm
= kern_hyp_va(vcpu
->kvm
);
147 struct vgic_v2_cpu_if
*cpu_if
= &vcpu
->arch
.vgic_cpu
.vgic_v2
;
148 struct vgic_dist
*vgic
= &kvm
->arch
.vgic
;
149 void __iomem
*base
= kern_hyp_va(vgic
->vctrl_base
);
150 int nr_lr
= (kern_hyp_va(&vgic_v2_params
))->nr_lr
;
158 for (i
= 0; i
< nr_lr
; i
++)
159 if (cpu_if
->vgic_lr
[i
] & GICH_LR_STATE
)
160 live_lrs
|= 1UL << i
;
163 writel_relaxed(cpu_if
->vgic_hcr
, base
+ GICH_HCR
);
164 writel_relaxed(cpu_if
->vgic_apr
, base
+ GICH_APR
);
165 for (i
= 0; i
< nr_lr
; i
++) {
166 if (!(live_lrs
& (1UL << i
)))
169 writel_relaxed(cpu_if
->vgic_lr
[i
],
170 base
+ GICH_LR0
+ (i
* 4));
174 writel_relaxed(cpu_if
->vgic_vmcr
, base
+ GICH_VMCR
);
175 vcpu
->arch
.vgic_cpu
.live_lrs
= live_lrs
;