Lines Matching refs:kvm

29 static int update_lpi_config(struct kvm *kvm, struct vgic_irq *irq,
39 static struct vgic_irq *vgic_add_lpi(struct kvm *kvm, u32 intid, in vgic_add_lpi() argument
42 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_add_lpi()
43 struct vgic_irq *irq = vgic_get_irq(kvm, NULL, intid), *oldirq; in vgic_add_lpi()
103 ret = update_lpi_config(kvm, irq, NULL, false); in vgic_add_lpi()
105 vgic_put_irq(kvm, irq); in vgic_add_lpi()
109 ret = vgic_v3_lpi_sync_pending_status(kvm, irq); in vgic_add_lpi()
111 vgic_put_irq(kvm, irq); in vgic_add_lpi()
280 static int update_lpi_config(struct kvm *kvm, struct vgic_irq *irq, in update_lpi_config() argument
283 u64 propbase = GICR_PROPBASER_ADDRESS(kvm->arch.vgic.propbaser); in update_lpi_config()
288 ret = kvm_read_guest_lock(kvm, propbase + irq->intid - GIC_LPI_OFFSET, in update_lpi_config()
301 vgic_queue_irq_unlock(kvm, irq, flags); in update_lpi_config()
319 int vgic_copy_lpi_list(struct kvm *kvm, struct kvm_vcpu *vcpu, u32 **intid_ptr) in vgic_copy_lpi_list() argument
321 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_copy_lpi_list()
387 static void update_affinity_ite(struct kvm *kvm, struct its_ite *ite) in update_affinity_ite() argument
394 vcpu = kvm_get_vcpu(kvm, ite->collection->target_addr); in update_affinity_ite()
402 static void update_affinity_collection(struct kvm *kvm, struct vgic_its *its, in update_affinity_collection() argument
412 update_affinity_ite(kvm, ite); in update_affinity_collection()
439 nr_irqs = vgic_copy_lpi_list(vcpu->kvm, vcpu, &intids); in its_sync_lpi_pending_table()
454 ret = kvm_read_guest_lock(vcpu->kvm, in its_sync_lpi_pending_table()
464 irq = vgic_get_irq(vcpu->kvm, NULL, intids[i]); in its_sync_lpi_pending_table()
467 vgic_queue_irq_unlock(vcpu->kvm, irq, flags); in its_sync_lpi_pending_table()
468 vgic_put_irq(vcpu->kvm, irq); in its_sync_lpi_pending_table()
476 static unsigned long vgic_mmio_read_its_typer(struct kvm *kvm, in vgic_mmio_read_its_typer() argument
498 static unsigned long vgic_mmio_read_its_iidr(struct kvm *kvm, in vgic_mmio_read_its_iidr() argument
509 static int vgic_mmio_uaccess_write_its_iidr(struct kvm *kvm, in vgic_mmio_uaccess_write_its_iidr() argument
521 static unsigned long vgic_mmio_read_its_idregs(struct kvm *kvm, in vgic_mmio_read_its_idregs() argument
579 static struct vgic_irq *vgic_its_check_cache(struct kvm *kvm, phys_addr_t db, in vgic_its_check_cache() argument
582 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_its_check_cache()
593 static void vgic_its_cache_translation(struct kvm *kvm, struct vgic_its *its, in vgic_its_cache_translation() argument
597 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_its_cache_translation()
630 __vgic_put_lpi_locked(kvm, cte->irq); in vgic_its_cache_translation()
646 void vgic_its_invalidate_cache(struct kvm *kvm) in vgic_its_invalidate_cache() argument
648 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_its_invalidate_cache()
662 __vgic_put_lpi_locked(kvm, cte->irq); in vgic_its_invalidate_cache()
669 int vgic_its_resolve_lpi(struct kvm *kvm, struct vgic_its *its, in vgic_its_resolve_lpi() argument
682 vcpu = kvm_get_vcpu(kvm, ite->collection->target_addr); in vgic_its_resolve_lpi()
689 vgic_its_cache_translation(kvm, its, devid, eventid, ite->irq); in vgic_its_resolve_lpi()
695 struct vgic_its *vgic_msi_to_its(struct kvm *kvm, struct kvm_msi *msi) in vgic_msi_to_its() argument
701 if (!vgic_has_its(kvm)) in vgic_msi_to_its()
709 kvm_io_dev = kvm_io_bus_get_dev(kvm, KVM_MMIO_BUS, address); in vgic_msi_to_its()
730 static int vgic_its_trigger_msi(struct kvm *kvm, struct vgic_its *its, in vgic_its_trigger_msi() argument
737 err = vgic_its_resolve_lpi(kvm, its, devid, eventid, &irq); in vgic_its_trigger_msi()
747 vgic_queue_irq_unlock(kvm, irq, flags); in vgic_its_trigger_msi()
752 int vgic_its_inject_cached_translation(struct kvm *kvm, struct kvm_msi *msi) in vgic_its_inject_cached_translation() argument
759 irq = vgic_its_check_cache(kvm, db, msi->devid, msi->data); in vgic_its_inject_cached_translation()
765 vgic_queue_irq_unlock(kvm, irq, flags); in vgic_its_inject_cached_translation()
776 int vgic_its_inject_msi(struct kvm *kvm, struct kvm_msi *msi) in vgic_its_inject_msi() argument
781 if (!vgic_its_inject_cached_translation(kvm, msi)) in vgic_its_inject_msi()
784 its = vgic_msi_to_its(kvm, msi); in vgic_its_inject_msi()
789 ret = vgic_its_trigger_msi(kvm, its, msi->devid, msi->data); in vgic_its_inject_msi()
807 static void its_free_ite(struct kvm *kvm, struct its_ite *ite) in its_free_ite() argument
816 vgic_put_irq(kvm, ite->irq); in its_free_ite()
841 static int vgic_its_cmd_handle_discard(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_discard() argument
855 vgic_its_invalidate_cache(kvm); in vgic_its_cmd_handle_discard()
857 its_free_ite(kvm, ite); in vgic_its_cmd_handle_discard()
868 static int vgic_its_cmd_handle_movi(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_movi() argument
890 vcpu = kvm_get_vcpu(kvm, collection->target_addr); in vgic_its_cmd_handle_movi()
892 vgic_its_invalidate_cache(kvm); in vgic_its_cmd_handle_movi()
950 if (kvm_read_guest_lock(its->dev->kvm, in vgic_its_check_id()
973 idx = srcu_read_lock(&its->dev->kvm->srcu); in vgic_its_check_id()
974 ret = kvm_is_visible_gfn(its->dev->kvm, gfn); in vgic_its_check_id()
975 srcu_read_unlock(&its->dev->kvm->srcu, idx); in vgic_its_check_id()
1047 static int vgic_its_cmd_handle_mapi(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_mapi() argument
1072 lpi_nr >= max_lpis_propbaser(kvm->arch.vgic.propbaser)) in vgic_its_cmd_handle_mapi()
1095 vcpu = kvm_get_vcpu(kvm, collection->target_addr); in vgic_its_cmd_handle_mapi()
1097 irq = vgic_add_lpi(kvm, lpi_nr, vcpu); in vgic_its_cmd_handle_mapi()
1101 its_free_ite(kvm, ite); in vgic_its_cmd_handle_mapi()
1110 static void vgic_its_free_device(struct kvm *kvm, struct its_device *device) in vgic_its_free_device() argument
1120 its_free_ite(kvm, ite); in vgic_its_free_device()
1122 vgic_its_invalidate_cache(kvm); in vgic_its_free_device()
1129 static void vgic_its_free_device_list(struct kvm *kvm, struct vgic_its *its) in vgic_its_free_device_list() argument
1134 vgic_its_free_device(kvm, cur); in vgic_its_free_device_list()
1138 static void vgic_its_free_collection_list(struct kvm *kvm, struct vgic_its *its) in vgic_its_free_collection_list() argument
1170 static int vgic_its_cmd_handle_mapd(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_mapd() argument
1193 vgic_its_free_device(kvm, device); in vgic_its_cmd_handle_mapd()
1212 static int vgic_its_cmd_handle_mapc(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_mapc() argument
1224 if (target_addr >= atomic_read(&kvm->online_vcpus)) in vgic_its_cmd_handle_mapc()
1229 vgic_its_invalidate_cache(kvm); in vgic_its_cmd_handle_mapc()
1243 update_affinity_collection(kvm, its, collection); in vgic_its_cmd_handle_mapc()
1254 static int vgic_its_cmd_handle_clear(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_clear() argument
1279 static int vgic_its_cmd_handle_inv(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_inv() argument
1291 return update_lpi_config(kvm, ite->irq, NULL, true); in vgic_its_cmd_handle_inv()
1302 static int vgic_its_cmd_handle_invall(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_invall() argument
1316 vcpu = kvm_get_vcpu(kvm, collection->target_addr); in vgic_its_cmd_handle_invall()
1318 irq_count = vgic_copy_lpi_list(kvm, vcpu, &intids); in vgic_its_cmd_handle_invall()
1323 irq = vgic_get_irq(kvm, NULL, intids[i]); in vgic_its_cmd_handle_invall()
1326 update_lpi_config(kvm, irq, vcpu, false); in vgic_its_cmd_handle_invall()
1327 vgic_put_irq(kvm, irq); in vgic_its_cmd_handle_invall()
1346 static int vgic_its_cmd_handle_movall(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_movall() argument
1356 if (target1_addr >= atomic_read(&kvm->online_vcpus) || in vgic_its_cmd_handle_movall()
1357 target2_addr >= atomic_read(&kvm->online_vcpus)) in vgic_its_cmd_handle_movall()
1363 vcpu1 = kvm_get_vcpu(kvm, target1_addr); in vgic_its_cmd_handle_movall()
1364 vcpu2 = kvm_get_vcpu(kvm, target2_addr); in vgic_its_cmd_handle_movall()
1366 irq_count = vgic_copy_lpi_list(kvm, vcpu1, &intids); in vgic_its_cmd_handle_movall()
1371 irq = vgic_get_irq(kvm, NULL, intids[i]); in vgic_its_cmd_handle_movall()
1375 vgic_put_irq(kvm, irq); in vgic_its_cmd_handle_movall()
1378 vgic_its_invalidate_cache(kvm); in vgic_its_cmd_handle_movall()
1388 static int vgic_its_cmd_handle_int(struct kvm *kvm, struct vgic_its *its, in vgic_its_cmd_handle_int() argument
1394 return vgic_its_trigger_msi(kvm, its, msi_devid, msi_data); in vgic_its_cmd_handle_int()
1401 static int vgic_its_handle_command(struct kvm *kvm, struct vgic_its *its, in vgic_its_handle_command() argument
1409 ret = vgic_its_cmd_handle_mapd(kvm, its, its_cmd); in vgic_its_handle_command()
1412 ret = vgic_its_cmd_handle_mapc(kvm, its, its_cmd); in vgic_its_handle_command()
1415 ret = vgic_its_cmd_handle_mapi(kvm, its, its_cmd); in vgic_its_handle_command()
1418 ret = vgic_its_cmd_handle_mapi(kvm, its, its_cmd); in vgic_its_handle_command()
1421 ret = vgic_its_cmd_handle_movi(kvm, its, its_cmd); in vgic_its_handle_command()
1424 ret = vgic_its_cmd_handle_discard(kvm, its, its_cmd); in vgic_its_handle_command()
1427 ret = vgic_its_cmd_handle_clear(kvm, its, its_cmd); in vgic_its_handle_command()
1430 ret = vgic_its_cmd_handle_movall(kvm, its, its_cmd); in vgic_its_handle_command()
1433 ret = vgic_its_cmd_handle_int(kvm, its, its_cmd); in vgic_its_handle_command()
1436 ret = vgic_its_cmd_handle_inv(kvm, its, its_cmd); in vgic_its_handle_command()
1439 ret = vgic_its_cmd_handle_invall(kvm, its, its_cmd); in vgic_its_handle_command()
1487 static unsigned long vgic_mmio_read_its_cbaser(struct kvm *kvm, in vgic_mmio_read_its_cbaser() argument
1494 static void vgic_mmio_write_its_cbaser(struct kvm *kvm, struct vgic_its *its, in vgic_mmio_write_its_cbaser() argument
1519 static void vgic_its_process_commands(struct kvm *kvm, struct vgic_its *its) in vgic_its_process_commands() argument
1531 int ret = kvm_read_guest_lock(kvm, cbaser + its->creadr, in vgic_its_process_commands()
1541 vgic_its_handle_command(kvm, its, cmd_buf); in vgic_its_process_commands()
1555 static void vgic_mmio_write_its_cwriter(struct kvm *kvm, struct vgic_its *its, in vgic_mmio_write_its_cwriter() argument
1574 vgic_its_process_commands(kvm, its); in vgic_mmio_write_its_cwriter()
1579 static unsigned long vgic_mmio_read_its_cwriter(struct kvm *kvm, in vgic_mmio_read_its_cwriter() argument
1586 static unsigned long vgic_mmio_read_its_creadr(struct kvm *kvm, in vgic_mmio_read_its_creadr() argument
1593 static int vgic_mmio_uaccess_write_its_creadr(struct kvm *kvm, in vgic_mmio_uaccess_write_its_creadr() argument
1621 static unsigned long vgic_mmio_read_its_baser(struct kvm *kvm, in vgic_mmio_read_its_baser() argument
1643 static void vgic_mmio_write_its_baser(struct kvm *kvm, in vgic_mmio_write_its_baser() argument
1687 vgic_its_free_device_list(kvm, its); in vgic_mmio_write_its_baser()
1690 vgic_its_free_collection_list(kvm, its); in vgic_mmio_write_its_baser()
1697 static unsigned long vgic_mmio_read_its_ctlr(struct kvm *vcpu, in vgic_mmio_read_its_ctlr()
1713 static void vgic_mmio_write_its_ctlr(struct kvm *kvm, struct vgic_its *its, in vgic_mmio_write_its_ctlr() argument
1731 vgic_its_invalidate_cache(kvm); in vgic_mmio_write_its_ctlr()
1737 vgic_its_process_commands(kvm, its); in vgic_mmio_write_its_ctlr()
1762 static void its_mmio_write_wi(struct kvm *kvm, struct vgic_its *its, in its_mmio_write_wi() argument
1804 static int vgic_register_its_iodev(struct kvm *kvm, struct vgic_its *its, in vgic_register_its_iodev() argument
1810 mutex_lock(&kvm->slots_lock); in vgic_register_its_iodev()
1824 ret = kvm_io_bus_register_dev(kvm, KVM_MMIO_BUS, iodev->base_addr, in vgic_register_its_iodev()
1827 mutex_unlock(&kvm->slots_lock); in vgic_register_its_iodev()
1835 void vgic_lpi_translation_cache_init(struct kvm *kvm) in vgic_lpi_translation_cache_init() argument
1837 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_lpi_translation_cache_init()
1844 sz = atomic_read(&kvm->online_vcpus) * LPI_DEFAULT_PCPU_CACHE_SIZE; in vgic_lpi_translation_cache_init()
1859 void vgic_lpi_translation_cache_destroy(struct kvm *kvm) in vgic_lpi_translation_cache_destroy() argument
1861 struct vgic_dist *dist = &kvm->arch.vgic; in vgic_lpi_translation_cache_destroy()
1864 vgic_its_invalidate_cache(kvm); in vgic_lpi_translation_cache_destroy()
1895 if (vgic_initialized(dev->kvm)) { in vgic_its_create()
1896 int ret = vgic_v4_init(dev->kvm); in vgic_its_create()
1902 vgic_lpi_translation_cache_init(dev->kvm); in vgic_its_create()
1913 dev->kvm->arch.vgic.msis_require_devid = true; in vgic_its_create()
1914 dev->kvm->arch.vgic.has_its = true; in vgic_its_create()
1922 dev->kvm->arch.vgic.propbaser = INITIAL_PROPBASER_VALUE; in vgic_its_create()
1931 struct kvm *kvm = kvm_dev->kvm; in vgic_its_destroy() local
1936 vgic_its_free_device_list(kvm, its); in vgic_its_destroy()
1937 vgic_its_free_collection_list(kvm, its); in vgic_its_destroy()
1993 mutex_lock(&dev->kvm->lock); in vgic_its_attr_regs_access()
2008 if (!lock_all_vcpus(dev->kvm)) { in vgic_its_attr_regs_access()
2019 ret = region->uaccess_its_write(dev->kvm, its, addr, in vgic_its_attr_regs_access()
2022 region->its_write(dev->kvm, its, addr, len, *reg); in vgic_its_attr_regs_access()
2024 *reg = region->its_read(dev->kvm, its, addr, len); in vgic_its_attr_regs_access()
2026 unlock_all_vcpus(dev->kvm); in vgic_its_attr_regs_access()
2028 mutex_unlock(&dev->kvm->lock); in vgic_its_attr_regs_access()
2090 struct kvm *kvm = its->dev->kvm; in scan_its_table() local
2103 ret = kvm_read_guest_lock(kvm, gpa, entry, esz); in scan_its_table()
2128 struct kvm *kvm = its->dev->kvm; in vgic_its_save_ite() local
2137 return kvm_write_guest_lock(kvm, gpa, &val, ite_esz); in vgic_its_save_ite()
2151 struct kvm *kvm = its->dev->kvm; in vgic_its_restore_ite() local
2186 vcpu = kvm_get_vcpu(kvm, collection->target_addr); in vgic_its_restore_ite()
2188 irq = vgic_add_lpi(kvm, lpi_id, vcpu); in vgic_its_restore_ite()
2273 struct kvm *kvm = its->dev->kvm; in vgic_its_save_dte() local
2284 return kvm_write_guest_lock(kvm, ptr, &val, dte_esz); in vgic_its_save_dte()
2328 vgic_its_free_device(its->dev->kvm, dev); in vgic_its_restore_dte()
2464 return kvm_write_guest_lock(its->dev->kvm, gpa, &val, esz); in vgic_its_save_cte()
2470 struct kvm *kvm = its->dev->kvm; in vgic_its_restore_cte() local
2476 ret = kvm_read_guest_lock(kvm, gpa, &val, esz); in vgic_its_restore_cte()
2487 target_addr >= atomic_read(&kvm->online_vcpus)) in vgic_its_restore_cte()
2536 ret = kvm_write_guest_lock(its->dev->kvm, gpa, &val, cte_esz); in vgic_its_save_collection_table()
2622 static void vgic_its_reset(struct kvm *kvm, struct vgic_its *its) in vgic_its_reset() argument
2631 vgic_its_free_device_list(kvm, its); in vgic_its_reset()
2632 vgic_its_free_collection_list(kvm, its); in vgic_its_reset()
2663 static int vgic_its_ctrl(struct kvm *kvm, struct vgic_its *its, u64 attr) in vgic_its_ctrl() argument
2671 mutex_lock(&kvm->lock); in vgic_its_ctrl()
2674 if (!lock_all_vcpus(kvm)) { in vgic_its_ctrl()
2676 mutex_unlock(&kvm->lock); in vgic_its_ctrl()
2682 vgic_its_reset(kvm, its); in vgic_its_ctrl()
2692 unlock_all_vcpus(kvm); in vgic_its_ctrl()
2694 mutex_unlock(&kvm->lock); in vgic_its_ctrl()
2716 ret = vgic_check_ioaddr(dev->kvm, &its->vgic_its_base, in vgic_its_set_attr()
2721 return vgic_register_its_iodev(dev->kvm, its, addr); in vgic_its_set_attr()
2724 return vgic_its_ctrl(dev->kvm, its, attr->attr); in vgic_its_set_attr()