7 #include <linux/kvm_host.h>
8 #include <linux/random.h>
9 #include <linux/memblock.h>
10 #include <asm/alternative.h>
11 #include <asm/debug-monitors.h>
13 #include <asm/kvm_mmu.h>
14 #include <asm/memory.h>
44 kern_va = (u64)lm_alias(__hyp_text_start);
62 phys_addr_t idmap_addr = __pa_symbol(__hyp_idmap_text_start);
66 hyp_va_msb = idmap_addr & BIT(vabits_actual - 1);
67 hyp_va_msb ^= BIT(vabits_actual - 1);
69 tag_lsb = fls64((u64)phys_to_virt(memblock_start_of_DRAM()) ^
70 (u64)(high_memory - 1));
75 if (IS_ENABLED(CONFIG_RANDOMIZE_BASE) &&
tag_lsb != (vabits_actual - 1)) {
77 tag_val |= get_random_long() & GENMASK_ULL(vabits_actual - 2,
tag_lsb);
92 int32_t *
begin = (int32_t *)__hyp_reloc_begin;
93 int32_t *
end = (int32_t *)__hyp_reloc_end;
95 for (rel =
begin; rel <
end; ++rel) {
96 uintptr_t *ptr, kimg_va;
102 ptr = (uintptr_t *)lm_alias((
char *)rel + *rel);
114 u32 insn = AARCH64_BREAK_FAULT;
118 insn = aarch64_insn_gen_logical_immediate(AARCH64_INSN_LOGIC_AND,
119 AARCH64_INSN_VARIANT_64BIT,
125 insn = aarch64_insn_gen_extr(AARCH64_INSN_VARIANT_64BIT,
131 insn = aarch64_insn_gen_add_sub_imm(rd, rn,
133 AARCH64_INSN_VARIANT_64BIT,
134 AARCH64_INSN_ADSB_ADD);
138 insn = aarch64_insn_gen_add_sub_imm(rd, rn,
140 AARCH64_INSN_VARIANT_64BIT,
141 AARCH64_INSN_ADSB_ADD);
146 insn = aarch64_insn_gen_extr(AARCH64_INSN_VARIANT_64BIT,
155 __le32 *origptr, __le32 *updptr,
int nr_inst)
159 BUG_ON(nr_inst != 5);
161 for (i = 0; i < nr_inst; i++) {
162 u32 rd, rn, insn, oinsn;
172 if (cpus_have_cap(ARM64_HAS_VIRT_HOST_EXTN) || (!
tag_val && i > 0)) {
173 updptr[i] = cpu_to_le32(aarch64_insn_gen_nop());
177 oinsn = le32_to_cpu(origptr[i]);
178 rd = aarch64_insn_decode_register(AARCH64_INSN_REGTYPE_RD, oinsn);
179 rn = aarch64_insn_decode_register(AARCH64_INSN_REGTYPE_RN, oinsn);
182 BUG_ON(insn == AARCH64_BREAK_FAULT);
184 updptr[i] = cpu_to_le32(insn);
189 __le32 *origptr, __le32 *updptr,
int nr_inst)
194 BUG_ON(nr_inst != 4);
196 if (!cpus_have_cap(ARM64_SPECTRE_V3A) ||
197 WARN_ON_ONCE(cpus_have_cap(ARM64_HAS_VIRT_HOST_EXTN)))
206 addr |= ((u64)origptr & GENMASK_ULL(10, 7));
212 addr += KVM_VECTOR_PREAMBLE;
215 insn = aarch64_insn_gen_movewide(AARCH64_INSN_REG_0,
218 AARCH64_INSN_VARIANT_64BIT,
219 AARCH64_INSN_MOVEWIDE_ZERO);
220 *updptr++ = cpu_to_le32(insn);
223 insn = aarch64_insn_gen_movewide(AARCH64_INSN_REG_0,
226 AARCH64_INSN_VARIANT_64BIT,
227 AARCH64_INSN_MOVEWIDE_KEEP);
228 *updptr++ = cpu_to_le32(insn);
231 insn = aarch64_insn_gen_movewide(AARCH64_INSN_REG_0,
234 AARCH64_INSN_VARIANT_64BIT,
235 AARCH64_INSN_MOVEWIDE_KEEP);
236 *updptr++ = cpu_to_le32(insn);
239 insn = aarch64_insn_gen_branch_reg(AARCH64_INSN_REG_0,
240 AARCH64_INSN_BRANCH_NOLINK);
241 *updptr++ = cpu_to_le32(insn);
244 static void generate_mov_q(u64 val, __le32 *origptr, __le32 *updptr,
int nr_inst)
248 BUG_ON(nr_inst != 4);
251 oinsn = le32_to_cpu(*origptr);
252 rd = aarch64_insn_decode_register(AARCH64_INSN_REGTYPE_RD, oinsn);
255 insn = aarch64_insn_gen_movewide(rd,
258 AARCH64_INSN_VARIANT_64BIT,
259 AARCH64_INSN_MOVEWIDE_ZERO);
260 *updptr++ = cpu_to_le32(insn);
263 insn = aarch64_insn_gen_movewide(rd,
266 AARCH64_INSN_VARIANT_64BIT,
267 AARCH64_INSN_MOVEWIDE_KEEP);
268 *updptr++ = cpu_to_le32(insn);
271 insn = aarch64_insn_gen_movewide(rd,
274 AARCH64_INSN_VARIANT_64BIT,
275 AARCH64_INSN_MOVEWIDE_KEEP);
276 *updptr++ = cpu_to_le32(insn);
279 insn = aarch64_insn_gen_movewide(rd,
282 AARCH64_INSN_VARIANT_64BIT,
283 AARCH64_INSN_MOVEWIDE_KEEP);
284 *updptr++ = cpu_to_le32(insn);
288 __le32 *origptr, __le32 *updptr,
int nr_inst)
294 __le32 *origptr, __le32 *updptr,
int nr_inst)
297 origptr, updptr, nr_inst);
s64 __ro_after_init hyp_physvirt_offset
static void generate_mov_q(u64 val, __le32 *origptr, __le32 *updptr, int nr_inst)
static u64 __early_kern_hyp_va(u64 addr)
void kvm_compute_final_ctr_el0(struct alt_instr *alt, __le32 *origptr, __le32 *updptr, int nr_inst)
void kvm_patch_vector_branch(struct alt_instr *alt, __le32 *origptr, __le32 *updptr, int nr_inst)
void kvm_get_kimage_voffset(struct alt_instr *alt, __le32 *origptr, __le32 *updptr, int nr_inst)
static u32 compute_instruction(int n, u32 rd, u32 rn)
__init void kvm_apply_hyp_relocations(void)
void __init kvm_update_va_mask(struct alt_instr *alt, __le32 *origptr, __le32 *updptr, int nr_inst)
static void init_hyp_physvirt_offset(void)
__init void kvm_compute_layout(void)