Commit | Line | Data |
---|---|---|
06282fd2 MZ |
1 | /* |
2 | * Copyright (C) 2012-2015 - ARM Ltd | |
3 | * Author: Marc Zyngier <marc.zyngier@arm.com> | |
4 | * | |
5 | * This program is free software; you can redistribute it and/or modify | |
6 | * it under the terms of the GNU General Public License version 2 as | |
7 | * published by the Free Software Foundation. | |
8 | * | |
9 | * This program is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
12 | * GNU General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU General Public License | |
15 | * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
16 | */ | |
17 | ||
18 | #include <linux/compiler.h> | |
19 | #include <linux/irqchip/arm-gic.h> | |
20 | #include <linux/kvm_host.h> | |
21 | ||
13720a56 | 22 | #include <asm/kvm_hyp.h> |
06282fd2 | 23 | |
efffe55a AP |
24 | #ifdef CONFIG_KVM_NEW_VGIC |
25 | extern struct vgic_global kvm_vgic_global_state; | |
26 | #define vgic_v2_params kvm_vgic_global_state | |
27 | #else | |
2db4c104 | 28 | extern struct vgic_params vgic_v2_params; |
efffe55a | 29 | #endif |
2db4c104 | 30 | |
c813bb17 MZ |
31 | static void __hyp_text save_maint_int_state(struct kvm_vcpu *vcpu, |
32 | void __iomem *base) | |
33 | { | |
34 | struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; | |
2db4c104 | 35 | int nr_lr = (kern_hyp_va(&vgic_v2_params))->nr_lr; |
c813bb17 MZ |
36 | u32 eisr0, eisr1; |
37 | int i; | |
38 | bool expect_mi; | |
39 | ||
40 | expect_mi = !!(cpu_if->vgic_hcr & GICH_HCR_UIE); | |
41 | ||
42 | for (i = 0; i < nr_lr; i++) { | |
43 | if (!(vcpu->arch.vgic_cpu.live_lrs & (1UL << i))) | |
44 | continue; | |
45 | ||
46 | expect_mi |= (!(cpu_if->vgic_lr[i] & GICH_LR_HW) && | |
47 | (cpu_if->vgic_lr[i] & GICH_LR_EOI)); | |
48 | } | |
49 | ||
50 | if (expect_mi) { | |
51 | cpu_if->vgic_misr = readl_relaxed(base + GICH_MISR); | |
52 | ||
53 | if (cpu_if->vgic_misr & GICH_MISR_EOI) { | |
54 | eisr0 = readl_relaxed(base + GICH_EISR0); | |
55 | if (unlikely(nr_lr > 32)) | |
56 | eisr1 = readl_relaxed(base + GICH_EISR1); | |
57 | else | |
58 | eisr1 = 0; | |
59 | } else { | |
60 | eisr0 = eisr1 = 0; | |
61 | } | |
62 | } else { | |
63 | cpu_if->vgic_misr = 0; | |
64 | eisr0 = eisr1 = 0; | |
65 | } | |
66 | ||
67 | #ifdef CONFIG_CPU_BIG_ENDIAN | |
68 | cpu_if->vgic_eisr = ((u64)eisr0 << 32) | eisr1; | |
69 | #else | |
70 | cpu_if->vgic_eisr = ((u64)eisr1 << 32) | eisr0; | |
71 | #endif | |
72 | } | |
73 | ||
2a1044f8 MZ |
74 | static void __hyp_text save_elrsr(struct kvm_vcpu *vcpu, void __iomem *base) |
75 | { | |
76 | struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; | |
2db4c104 | 77 | int nr_lr = (kern_hyp_va(&vgic_v2_params))->nr_lr; |
2a1044f8 MZ |
78 | u32 elrsr0, elrsr1; |
79 | ||
80 | elrsr0 = readl_relaxed(base + GICH_ELRSR0); | |
81 | if (unlikely(nr_lr > 32)) | |
82 | elrsr1 = readl_relaxed(base + GICH_ELRSR1); | |
83 | else | |
84 | elrsr1 = 0; | |
85 | ||
86 | #ifdef CONFIG_CPU_BIG_ENDIAN | |
87 | cpu_if->vgic_elrsr = ((u64)elrsr0 << 32) | elrsr1; | |
88 | #else | |
89 | cpu_if->vgic_elrsr = ((u64)elrsr1 << 32) | elrsr0; | |
90 | #endif | |
91 | } | |
92 | ||
f8cfbce1 MZ |
93 | static void __hyp_text save_lrs(struct kvm_vcpu *vcpu, void __iomem *base) |
94 | { | |
95 | struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; | |
2db4c104 | 96 | int nr_lr = (kern_hyp_va(&vgic_v2_params))->nr_lr; |
f8cfbce1 MZ |
97 | int i; |
98 | ||
99 | for (i = 0; i < nr_lr; i++) { | |
100 | if (!(vcpu->arch.vgic_cpu.live_lrs & (1UL << i))) | |
101 | continue; | |
102 | ||
4d3afc9b | 103 | if (cpu_if->vgic_elrsr & (1UL << i)) |
f8cfbce1 | 104 | cpu_if->vgic_lr[i] &= ~GICH_LR_STATE; |
4d3afc9b CD |
105 | else |
106 | cpu_if->vgic_lr[i] = readl_relaxed(base + GICH_LR0 + (i * 4)); | |
f8cfbce1 | 107 | |
cc1daf0b | 108 | writel_relaxed(0, base + GICH_LR0 + (i * 4)); |
f8cfbce1 MZ |
109 | } |
110 | } | |
111 | ||
06282fd2 MZ |
112 | /* vcpu is already in the HYP VA space */ |
113 | void __hyp_text __vgic_v2_save_state(struct kvm_vcpu *vcpu) | |
114 | { | |
115 | struct kvm *kvm = kern_hyp_va(vcpu->kvm); | |
116 | struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; | |
117 | struct vgic_dist *vgic = &kvm->arch.vgic; | |
118 | void __iomem *base = kern_hyp_va(vgic->vctrl_base); | |
06282fd2 MZ |
119 | |
120 | if (!base) | |
121 | return; | |
122 | ||
06282fd2 | 123 | cpu_if->vgic_vmcr = readl_relaxed(base + GICH_VMCR); |
59f00ff9 MZ |
124 | |
125 | if (vcpu->arch.vgic_cpu.live_lrs) { | |
2a1044f8 | 126 | cpu_if->vgic_apr = readl_relaxed(base + GICH_APR); |
06282fd2 | 127 | |
c813bb17 | 128 | save_maint_int_state(vcpu, base); |
2a1044f8 | 129 | save_elrsr(vcpu, base); |
f8cfbce1 | 130 | save_lrs(vcpu, base); |
06282fd2 | 131 | |
59f00ff9 MZ |
132 | writel_relaxed(0, base + GICH_HCR); |
133 | ||
134 | vcpu->arch.vgic_cpu.live_lrs = 0; | |
135 | } else { | |
136 | cpu_if->vgic_eisr = 0; | |
137 | cpu_if->vgic_elrsr = ~0UL; | |
138 | cpu_if->vgic_misr = 0; | |
139 | cpu_if->vgic_apr = 0; | |
140 | } | |
06282fd2 MZ |
141 | } |
142 | ||
143 | /* vcpu is already in the HYP VA space */ | |
144 | void __hyp_text __vgic_v2_restore_state(struct kvm_vcpu *vcpu) | |
145 | { | |
146 | struct kvm *kvm = kern_hyp_va(vcpu->kvm); | |
147 | struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; | |
148 | struct vgic_dist *vgic = &kvm->arch.vgic; | |
149 | void __iomem *base = kern_hyp_va(vgic->vctrl_base); | |
2db4c104 CD |
150 | int nr_lr = (kern_hyp_va(&vgic_v2_params))->nr_lr; |
151 | int i; | |
59f00ff9 | 152 | u64 live_lrs = 0; |
06282fd2 MZ |
153 | |
154 | if (!base) | |
155 | return; | |
156 | ||
59f00ff9 | 157 | |
06282fd2 | 158 | for (i = 0; i < nr_lr; i++) |
59f00ff9 MZ |
159 | if (cpu_if->vgic_lr[i] & GICH_LR_STATE) |
160 | live_lrs |= 1UL << i; | |
161 | ||
162 | if (live_lrs) { | |
163 | writel_relaxed(cpu_if->vgic_hcr, base + GICH_HCR); | |
164 | writel_relaxed(cpu_if->vgic_apr, base + GICH_APR); | |
165 | for (i = 0; i < nr_lr; i++) { | |
cc1daf0b MZ |
166 | if (!(live_lrs & (1UL << i))) |
167 | continue; | |
59f00ff9 | 168 | |
cc1daf0b MZ |
169 | writel_relaxed(cpu_if->vgic_lr[i], |
170 | base + GICH_LR0 + (i * 4)); | |
59f00ff9 MZ |
171 | } |
172 | } | |
173 | ||
174 | writel_relaxed(cpu_if->vgic_vmcr, base + GICH_VMCR); | |
175 | vcpu->arch.vgic_cpu.live_lrs = live_lrs; | |
06282fd2 | 176 | } |