Home
last modified time | relevance | path

Searched refs:vgic_cpu (Results 1 – 16 of 16) sorted by relevance

/linux-6.1.9/arch/arm64/kvm/vgic/
Dvgic.c96 return &vcpu->arch.vgic_cpu.private_irqs[intid]; in vgic_get_irq()
152 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_flush_pending_lpis() local
156 raw_spin_lock_irqsave(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
158 list_for_each_entry_safe(irq, tmp, &vgic_cpu->ap_list_head, ap_list) { in vgic_flush_pending_lpis()
168 raw_spin_unlock_irqrestore(&vgic_cpu->ap_list_lock, flags); in vgic_flush_pending_lpis()
299 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_sort_ap_list() local
301 lockdep_assert_held(&vgic_cpu->ap_list_lock); in vgic_sort_ap_list()
303 list_sort(NULL, &vgic_cpu->ap_list_head, vgic_irq_cmp); in vgic_sort_ap_list()
379 raw_spin_lock_irqsave(&vcpu->arch.vgic_cpu.ap_list_lock, flags); in vgic_queue_irq_unlock()
396 raw_spin_unlock_irqrestore(&vcpu->arch.vgic_cpu.ap_list_lock, in vgic_queue_irq_unlock()
[all …]
Dvgic-mmio-v3.c240 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_lpis_enabled() local
242 return atomic_read(&vgic_cpu->ctlr) == GICR_CTLR_ENABLE_LPIS; in vgic_lpis_enabled()
248 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_read_v3r_ctlr() local
251 val = atomic_read(&vgic_cpu->ctlr); in vgic_mmio_read_v3r_ctlr()
262 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_write_v3r_ctlr() local
273 ctlr = atomic_cmpxchg_acquire(&vgic_cpu->ctlr, in vgic_mmio_write_v3r_ctlr()
281 atomic_set_release(&vgic_cpu->ctlr, 0); in vgic_mmio_write_v3r_ctlr()
283 ctlr = atomic_cmpxchg_acquire(&vgic_cpu->ctlr, 0, in vgic_mmio_write_v3r_ctlr()
295 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_mmio_vcpu_rdist_is_last() local
296 struct vgic_redist_region *iter, *rdreg = vgic_cpu->rdreg; in vgic_mmio_vcpu_rdist_is_last()
[all …]
Dvgic-init.c188 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_init() local
193 vgic_cpu->rd_iodev.base_addr = VGIC_ADDR_UNDEF; in kvm_vgic_vcpu_init()
195 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_init()
196 raw_spin_lock_init(&vgic_cpu->ap_list_lock); in kvm_vgic_vcpu_init()
197 atomic_set(&vgic_cpu->vgic_v3.its_vpe.vlpi_count, 0); in kvm_vgic_vcpu_init()
204 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in kvm_vgic_vcpu_init()
279 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_init() local
282 struct vgic_irq *irq = &vgic_cpu->private_irqs[i]; in vgic_init()
364 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in kvm_vgic_vcpu_destroy() local
372 INIT_LIST_HEAD(&vgic_cpu->ap_list_head); in kvm_vgic_vcpu_destroy()
[all …]
Dvgic-v2.c31 struct vgic_v2_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_underflow()
51 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v2_fold_lr_state() local
52 struct vgic_v2_cpu_if *cpuif = &vgic_cpu->vgic_v2; in vgic_v2_fold_lr_state()
59 for (lr = 0; lr < vgic_cpu->vgic_v2.used_lrs; lr++) { in vgic_v2_fold_lr_state()
197 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = val; in vgic_v2_populate_lr()
202 vcpu->arch.vgic_cpu.vgic_v2.vgic_lr[lr] = 0; in vgic_v2_clear_lr()
207 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_set_vmcr()
234 struct vgic_v2_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v2; in vgic_v2_get_vmcr()
267 vcpu->arch.vgic_cpu.vgic_v2.vgic_vmcr = 0; in vgic_v2_enable()
270 vcpu->arch.vgic_cpu.vgic_v2.vgic_hcr = GICH_HCR_EN; in vgic_v2_enable()
[all …]
Dvgic-v3.c23 struct vgic_v3_cpu_if *cpuif = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_underflow()
36 struct vgic_cpu *vgic_cpu = &vcpu->arch.vgic_cpu; in vgic_v3_fold_lr_state() local
37 struct vgic_v3_cpu_if *cpuif = &vgic_cpu->vgic_v3; in vgic_v3_fold_lr_state()
185 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = val; in vgic_v3_populate_lr()
190 vcpu->arch.vgic_cpu.vgic_v3.vgic_lr[lr] = 0; in vgic_v3_clear_lr()
195 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_set_vmcr()
225 struct vgic_v3_cpu_if *cpu_if = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_get_vmcr()
261 struct vgic_v3_cpu_if *vgic_v3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_v3_enable()
280 vcpu->arch.vgic_cpu.pendbaser = INITIAL_PENDBASER_VALUE; in vgic_v3_enable()
285 vcpu->arch.vgic_cpu.num_id_bits = (kvm_vgic_global_state.ich_vtr_el2 & in vgic_v3_enable()
[all …]
Dvgic-v4.c98 raw_spin_lock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler()
99 vcpu->arch.vgic_cpu.vgic_v3.its_vpe.pending_last = true; in vgic_v4_doorbell_handler()
100 raw_spin_unlock(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vpe_lock); in vgic_v4_doorbell_handler()
117 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_enable_vsgis()
214 struct its_vpe *vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_get_vlpi_state()
262 dist->its_vm.vpes[i] = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_init()
339 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_put()
349 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_load()
382 struct its_vpe *vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in vgic_v4_commit()
442 .vpe = &irq->target_vcpu->arch.vgic_cpu.vgic_v3.its_vpe, in kvm_vgic_v4_set_forwarding()
[all …]
Dvgic-mmio-v2.c373 return vcpu->arch.vgic_cpu.vgic_v2.vgic_apr; in vgic_mmio_read_apr()
375 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_read_apr()
399 vcpu->arch.vgic_cpu.vgic_v2.vgic_apr = val; in vgic_mmio_write_apr()
401 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in vgic_mmio_write_apr()
Dvgic.h267 struct vgic_cpu *cpu_if = &vcpu->arch.vgic_cpu; in vgic_v3_max_apr_idx()
Dvgic-its.c372 map.vpe = &vcpu->arch.vgic_cpu.vgic_v3.its_vpe; in update_affinity()
430 gpa_t pendbase = GICR_PENDBASER_ADDRESS(vcpu->arch.vgic_cpu.pendbaser); in its_sync_lpi_pending_table()
1360 if (vcpu->arch.vgic_cpu.vgic_v3.its_vpe.its_vm) in vgic_its_invall()
1361 its_invall_vpe(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe); in vgic_its_invall()
1853 if (!(vcpu->arch.vgic_cpu.pendbaser & GICR_PENDBASER_PTZ)) in vgic_enable_lpis()
/linux-6.1.9/arch/arm64/kvm/
Dvgic-sys-reg-v3.c17 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in set_gic_ctlr()
62 struct vgic_cpu *vgic_v3_cpu = &vcpu->arch.vgic_cpu; in get_gic_ctlr()
210 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in set_apr_reg()
220 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in get_apr_reg()
293 struct vgic_v3_cpu_if *vgicv3 = &vcpu->arch.vgic_cpu.vgic_v3; in get_gic_sre()
Dsys_regs.c249 p->regval = vcpu->arch.vgic_cpu.vgic_v3.vgic_sre; in access_gic_sre()
/linux-6.1.9/arch/arm64/kvm/hyp/nvhe/
Dswitch.c115 __vgic_v3_save_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state()
116 __vgic_v3_deactivate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_save_state()
124 __vgic_v3_activate_traps(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
125 __vgic_v3_restore_state(&vcpu->arch.vgic_cpu.vgic_v3); in __hyp_vgic_restore_state()
/linux-6.1.9/include/kvm/
Darm_vgic.h324 struct vgic_cpu { struct
/linux-6.1.9/arch/arm64/include/asm/
Dkvm_host.h383 struct vgic_cpu vgic_cpu; member
Dkvm_emulate.h107 if (atomic_read(&vcpu->arch.vgic_cpu.vgic_v3.its_vpe.vlpi_count) || in vcpu_clear_wfx_traps()
/linux-6.1.9/arch/arm64/kvm/hyp/
Dvgic-v3-sr.c487 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs; in __vgic_v3_highest_priority_lr()
526 unsigned int used_lrs = vcpu->arch.vgic_cpu.vgic_v3.used_lrs; in __vgic_v3_find_active_lr()