Lines Matching refs:vdev
90 static void vfio_pci_probe_mmaps(struct vfio_pci_core_device *vdev) in vfio_pci_probe_mmaps() argument
99 res = &vdev->pdev->resource[bar]; in vfio_pci_probe_mmaps()
116 vdev->bar_mmap_supported[bar] = true; in vfio_pci_probe_mmaps()
141 &vdev->dummy_resources_list); in vfio_pci_probe_mmaps()
142 vdev->bar_mmap_supported[bar] = true; in vfio_pci_probe_mmaps()
154 vdev->bar_mmap_supported[bar] = false; in vfio_pci_probe_mmaps()
194 static void vfio_pci_probe_power_state(struct vfio_pci_core_device *vdev) in vfio_pci_probe_power_state() argument
196 struct pci_dev *pdev = vdev->pdev; in vfio_pci_probe_power_state()
204 vdev->needs_pm_restore = !(pmcsr & PCI_PM_CTRL_NO_SOFT_RESET); in vfio_pci_probe_power_state()
214 int vfio_pci_set_power_state(struct vfio_pci_core_device *vdev, pci_power_t state) in vfio_pci_set_power_state() argument
216 struct pci_dev *pdev = vdev->pdev; in vfio_pci_set_power_state()
220 if (vdev->needs_pm_restore) { in vfio_pci_set_power_state()
247 kfree(vdev->pm_save); in vfio_pci_set_power_state()
248 vdev->pm_save = pci_store_saved_state(pdev); in vfio_pci_set_power_state()
250 pci_load_and_free_saved_state(pdev, &vdev->pm_save); in vfio_pci_set_power_state()
258 int vfio_pci_core_enable(struct vfio_pci_core_device *vdev) in vfio_pci_core_enable() argument
260 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_enable()
265 vfio_pci_set_power_state(vdev, PCI_D0); in vfio_pci_core_enable()
281 vdev->reset_works = !ret; in vfio_pci_core_enable()
283 vdev->pci_saved_state = pci_store_saved_state(pdev); in vfio_pci_core_enable()
284 if (!vdev->pci_saved_state) in vfio_pci_core_enable()
290 vdev->nointx = true; in vfio_pci_core_enable()
293 vdev->pci_2_3 = pci_intx_mask_supported(pdev); in vfio_pci_core_enable()
297 if (vdev->pci_2_3 && (cmd & PCI_COMMAND_INTX_DISABLE)) { in vfio_pci_core_enable()
302 ret = vfio_config_init(vdev); in vfio_pci_core_enable()
304 kfree(vdev->pci_saved_state); in vfio_pci_core_enable()
305 vdev->pci_saved_state = NULL; in vfio_pci_core_enable()
318 vdev->msix_bar = table & PCI_MSIX_TABLE_BIR; in vfio_pci_core_enable()
319 vdev->msix_offset = table & PCI_MSIX_TABLE_OFFSET; in vfio_pci_core_enable()
320 vdev->msix_size = ((flags & PCI_MSIX_FLAGS_QSIZE) + 1) * 16; in vfio_pci_core_enable()
322 vdev->msix_bar = 0xFF; in vfio_pci_core_enable()
325 vdev->has_vga = true; in vfio_pci_core_enable()
332 void vfio_pci_core_disable(struct vfio_pci_core_device *vdev) in vfio_pci_core_disable() argument
334 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_disable()
340 lockdep_assert_held(&vdev->vdev.dev_set->lock); in vfio_pci_core_disable()
351 vfio_pci_set_power_state(vdev, PCI_D0); in vfio_pci_core_disable()
356 vfio_pci_set_irqs_ioctl(vdev, VFIO_IRQ_SET_DATA_NONE | in vfio_pci_core_disable()
358 vdev->irq_type, 0, 0, NULL); in vfio_pci_core_disable()
362 &vdev->ioeventfds_list, next) { in vfio_pci_core_disable()
367 vdev->ioeventfds_nr = 0; in vfio_pci_core_disable()
369 vdev->virq_disabled = false; in vfio_pci_core_disable()
371 for (i = 0; i < vdev->num_regions; i++) in vfio_pci_core_disable()
372 vdev->region[i].ops->release(vdev, &vdev->region[i]); in vfio_pci_core_disable()
374 vdev->num_regions = 0; in vfio_pci_core_disable()
375 kfree(vdev->region); in vfio_pci_core_disable()
376 vdev->region = NULL; /* don't krealloc a freed pointer */ in vfio_pci_core_disable()
378 vfio_config_free(vdev); in vfio_pci_core_disable()
382 if (!vdev->barmap[bar]) in vfio_pci_core_disable()
384 pci_iounmap(pdev, vdev->barmap[bar]); in vfio_pci_core_disable()
386 vdev->barmap[bar] = NULL; in vfio_pci_core_disable()
390 &vdev->dummy_resources_list, res_next) { in vfio_pci_core_disable()
396 vdev->needs_reset = true; in vfio_pci_core_disable()
404 if (pci_load_and_free_saved_state(pdev, &vdev->pci_saved_state)) { in vfio_pci_core_disable()
407 if (!vdev->reset_works) in vfio_pci_core_disable()
426 if (vdev->reset_works && pci_dev_trylock(pdev)) { in vfio_pci_core_disable()
428 vdev->needs_reset = false; in vfio_pci_core_disable()
436 if (!vfio_pci_dev_set_try_reset(vdev->vdev.dev_set) && !disable_idle_d3) in vfio_pci_core_disable()
437 vfio_pci_set_power_state(vdev, PCI_D3hot); in vfio_pci_core_disable()
443 struct vfio_pci_core_device *vdev = in vfio_pci_core_close_device() local
444 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_close_device()
446 if (vdev->sriov_pf_core_dev) { in vfio_pci_core_close_device()
447 mutex_lock(&vdev->sriov_pf_core_dev->vf_token->lock); in vfio_pci_core_close_device()
448 WARN_ON(!vdev->sriov_pf_core_dev->vf_token->users); in vfio_pci_core_close_device()
449 vdev->sriov_pf_core_dev->vf_token->users--; in vfio_pci_core_close_device()
450 mutex_unlock(&vdev->sriov_pf_core_dev->vf_token->lock); in vfio_pci_core_close_device()
452 vfio_spapr_pci_eeh_release(vdev->pdev); in vfio_pci_core_close_device()
453 vfio_pci_core_disable(vdev); in vfio_pci_core_close_device()
455 mutex_lock(&vdev->igate); in vfio_pci_core_close_device()
456 if (vdev->err_trigger) { in vfio_pci_core_close_device()
457 eventfd_ctx_put(vdev->err_trigger); in vfio_pci_core_close_device()
458 vdev->err_trigger = NULL; in vfio_pci_core_close_device()
460 if (vdev->req_trigger) { in vfio_pci_core_close_device()
461 eventfd_ctx_put(vdev->req_trigger); in vfio_pci_core_close_device()
462 vdev->req_trigger = NULL; in vfio_pci_core_close_device()
464 mutex_unlock(&vdev->igate); in vfio_pci_core_close_device()
468 void vfio_pci_core_finish_enable(struct vfio_pci_core_device *vdev) in vfio_pci_core_finish_enable() argument
470 vfio_pci_probe_mmaps(vdev); in vfio_pci_core_finish_enable()
471 vfio_spapr_pci_eeh_open(vdev->pdev); in vfio_pci_core_finish_enable()
473 if (vdev->sriov_pf_core_dev) { in vfio_pci_core_finish_enable()
474 mutex_lock(&vdev->sriov_pf_core_dev->vf_token->lock); in vfio_pci_core_finish_enable()
475 vdev->sriov_pf_core_dev->vf_token->users++; in vfio_pci_core_finish_enable()
476 mutex_unlock(&vdev->sriov_pf_core_dev->vf_token->lock); in vfio_pci_core_finish_enable()
481 static int vfio_pci_get_irq_count(struct vfio_pci_core_device *vdev, int irq_type) in vfio_pci_get_irq_count() argument
487 vdev->nointx || vdev->pdev->is_virtfn) in vfio_pci_get_irq_count()
490 pci_read_config_byte(vdev->pdev, PCI_INTERRUPT_PIN, &pin); in vfio_pci_get_irq_count()
497 pos = vdev->pdev->msi_cap; in vfio_pci_get_irq_count()
499 pci_read_config_word(vdev->pdev, in vfio_pci_get_irq_count()
507 pos = vdev->pdev->msix_cap; in vfio_pci_get_irq_count()
509 pci_read_config_word(vdev->pdev, in vfio_pci_get_irq_count()
515 if (pci_is_pcie(vdev->pdev)) in vfio_pci_get_irq_count()
602 static int msix_mmappable_cap(struct vfio_pci_core_device *vdev, in msix_mmappable_cap() argument
613 int vfio_pci_register_dev_region(struct vfio_pci_core_device *vdev, in vfio_pci_register_dev_region() argument
620 region = krealloc(vdev->region, in vfio_pci_register_dev_region()
621 (vdev->num_regions + 1) * sizeof(*region), in vfio_pci_register_dev_region()
626 vdev->region = region; in vfio_pci_register_dev_region()
627 vdev->region[vdev->num_regions].type = type; in vfio_pci_register_dev_region()
628 vdev->region[vdev->num_regions].subtype = subtype; in vfio_pci_register_dev_region()
629 vdev->region[vdev->num_regions].ops = ops; in vfio_pci_register_dev_region()
630 vdev->region[vdev->num_regions].size = size; in vfio_pci_register_dev_region()
631 vdev->region[vdev->num_regions].flags = flags; in vfio_pci_register_dev_region()
632 vdev->region[vdev->num_regions].data = data; in vfio_pci_register_dev_region()
634 vdev->num_regions++; in vfio_pci_register_dev_region()
643 struct vfio_pci_core_device *vdev = in vfio_pci_core_ioctl() local
644 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_ioctl()
671 if (vdev->reset_works) in vfio_pci_core_ioctl()
674 info.num_regions = VFIO_PCI_NUM_REGIONS + vdev->num_regions; in vfio_pci_core_ioctl()
677 ret = vfio_pci_info_zdev_add_caps(vdev, &caps); in vfio_pci_core_ioctl()
679 pci_warn(vdev->pdev, "Failed to setup zPCI info capabilities\n"); in vfio_pci_core_ioctl()
705 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_ioctl()
735 if (vdev->bar_mmap_supported[info.index]) { in vfio_pci_core_ioctl()
737 if (info.index == vdev->msix_bar) { in vfio_pci_core_ioctl()
738 ret = msix_mmappable_cap(vdev, &caps); in vfio_pci_core_ioctl()
769 cmd = vfio_pci_memory_lock_and_enable(vdev); in vfio_pci_core_ioctl()
777 vfio_pci_memory_unlock_and_restore(vdev, cmd); in vfio_pci_core_ioctl()
782 if (!vdev->has_vga) in vfio_pci_core_ioctl()
798 VFIO_PCI_NUM_REGIONS + vdev->num_regions) in vfio_pci_core_ioctl()
802 vdev->num_regions); in vfio_pci_core_ioctl()
807 info.size = vdev->region[i].size; in vfio_pci_core_ioctl()
808 info.flags = vdev->region[i].flags; in vfio_pci_core_ioctl()
810 cap_type.type = vdev->region[i].type; in vfio_pci_core_ioctl()
811 cap_type.subtype = vdev->region[i].subtype; in vfio_pci_core_ioctl()
818 if (vdev->region[i].ops->add_capability) { in vfio_pci_core_ioctl()
819 ret = vdev->region[i].ops->add_capability(vdev, in vfio_pci_core_ioctl()
820 &vdev->region[i], &caps); in vfio_pci_core_ioctl()
865 if (pci_is_pcie(vdev->pdev)) in vfio_pci_core_ioctl()
874 info.count = vfio_pci_get_irq_count(vdev, info.index); in vfio_pci_core_ioctl()
896 max = vfio_pci_get_irq_count(vdev, hdr.index); in vfio_pci_core_ioctl()
910 mutex_lock(&vdev->igate); in vfio_pci_core_ioctl()
912 ret = vfio_pci_set_irqs_ioctl(vdev, hdr.flags, hdr.index, in vfio_pci_core_ioctl()
915 mutex_unlock(&vdev->igate); in vfio_pci_core_ioctl()
923 if (!vdev->reset_works) in vfio_pci_core_ioctl()
926 vfio_pci_zap_and_down_write_memory_lock(vdev); in vfio_pci_core_ioctl()
938 vfio_pci_set_power_state(vdev, PCI_D0); in vfio_pci_core_ioctl()
940 ret = pci_try_reset_function(vdev->pdev); in vfio_pci_core_ioctl()
941 up_write(&vdev->memory_lock); in vfio_pci_core_ioctl()
963 if (!pci_probe_reset_slot(vdev->pdev->slot)) in vfio_pci_core_ioctl()
965 else if (pci_probe_reset_bus(vdev->pdev->bus)) in vfio_pci_core_ioctl()
969 ret = vfio_pci_for_each_slot_or_bus(vdev->pdev, in vfio_pci_core_ioctl()
993 ret = vfio_pci_for_each_slot_or_bus(vdev->pdev, in vfio_pci_core_ioctl()
1035 if (!pci_probe_reset_slot(vdev->pdev->slot)) in vfio_pci_core_ioctl()
1037 else if (pci_probe_reset_bus(vdev->pdev->bus)) in vfio_pci_core_ioctl()
1046 ret = vfio_pci_for_each_slot_or_bus(vdev->pdev, in vfio_pci_core_ioctl()
1103 ret = vfio_pci_dev_set_hot_reset(vdev->vdev.dev_set, &info); in vfio_pci_core_ioctl()
1131 return vfio_pci_ioeventfd(vdev, ioeventfd.offset, in vfio_pci_core_ioctl()
1160 if (!vdev->vf_token) in vfio_pci_core_ioctl()
1184 mutex_lock(&vdev->vf_token->lock); in vfio_pci_core_ioctl()
1185 uuid_copy(&vdev->vf_token->uuid, &uuid); in vfio_pci_core_ioctl()
1186 mutex_unlock(&vdev->vf_token->lock); in vfio_pci_core_ioctl()
1198 static ssize_t vfio_pci_rw(struct vfio_pci_core_device *vdev, char __user *buf, in vfio_pci_rw() argument
1203 if (index >= VFIO_PCI_NUM_REGIONS + vdev->num_regions) in vfio_pci_rw()
1208 return vfio_pci_config_rw(vdev, buf, count, ppos, iswrite); in vfio_pci_rw()
1213 return vfio_pci_bar_rw(vdev, buf, count, ppos, false); in vfio_pci_rw()
1216 return vfio_pci_bar_rw(vdev, buf, count, ppos, iswrite); in vfio_pci_rw()
1219 return vfio_pci_vga_rw(vdev, buf, count, ppos, iswrite); in vfio_pci_rw()
1222 return vdev->region[index].ops->rw(vdev, buf, in vfio_pci_rw()
1232 struct vfio_pci_core_device *vdev = in vfio_pci_core_read() local
1233 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_read()
1238 return vfio_pci_rw(vdev, buf, count, ppos, false); in vfio_pci_core_read()
1245 struct vfio_pci_core_device *vdev = in vfio_pci_core_write() local
1246 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_write()
1251 return vfio_pci_rw(vdev, (char __user *)buf, count, ppos, true); in vfio_pci_core_write()
1256 static int vfio_pci_zap_and_vma_lock(struct vfio_pci_core_device *vdev, bool try) in vfio_pci_zap_and_vma_lock() argument
1287 if (!mutex_trylock(&vdev->vma_lock)) in vfio_pci_zap_and_vma_lock()
1290 mutex_lock(&vdev->vma_lock); in vfio_pci_zap_and_vma_lock()
1292 while (!list_empty(&vdev->vma_list)) { in vfio_pci_zap_and_vma_lock()
1293 mmap_vma = list_first_entry(&vdev->vma_list, in vfio_pci_zap_and_vma_lock()
1306 mutex_unlock(&vdev->vma_lock); in vfio_pci_zap_and_vma_lock()
1317 if (!mutex_trylock(&vdev->vma_lock)) { in vfio_pci_zap_and_vma_lock()
1323 mutex_lock(&vdev->vma_lock); in vfio_pci_zap_and_vma_lock()
1326 &vdev->vma_list, vma_next) { in vfio_pci_zap_and_vma_lock()
1338 mutex_unlock(&vdev->vma_lock); in vfio_pci_zap_and_vma_lock()
1344 void vfio_pci_zap_and_down_write_memory_lock(struct vfio_pci_core_device *vdev) in vfio_pci_zap_and_down_write_memory_lock() argument
1346 vfio_pci_zap_and_vma_lock(vdev, false); in vfio_pci_zap_and_down_write_memory_lock()
1347 down_write(&vdev->memory_lock); in vfio_pci_zap_and_down_write_memory_lock()
1348 mutex_unlock(&vdev->vma_lock); in vfio_pci_zap_and_down_write_memory_lock()
1351 u16 vfio_pci_memory_lock_and_enable(struct vfio_pci_core_device *vdev) in vfio_pci_memory_lock_and_enable() argument
1355 down_write(&vdev->memory_lock); in vfio_pci_memory_lock_and_enable()
1356 pci_read_config_word(vdev->pdev, PCI_COMMAND, &cmd); in vfio_pci_memory_lock_and_enable()
1358 pci_write_config_word(vdev->pdev, PCI_COMMAND, in vfio_pci_memory_lock_and_enable()
1364 void vfio_pci_memory_unlock_and_restore(struct vfio_pci_core_device *vdev, u16 cmd) in vfio_pci_memory_unlock_and_restore() argument
1366 pci_write_config_word(vdev->pdev, PCI_COMMAND, cmd); in vfio_pci_memory_unlock_and_restore()
1367 up_write(&vdev->memory_lock); in vfio_pci_memory_unlock_and_restore()
1371 static int __vfio_pci_add_vma(struct vfio_pci_core_device *vdev, in __vfio_pci_add_vma() argument
1381 list_add(&mmap_vma->vma_next, &vdev->vma_list); in __vfio_pci_add_vma()
1397 struct vfio_pci_core_device *vdev = vma->vm_private_data; in vfio_pci_mmap_close() local
1400 mutex_lock(&vdev->vma_lock); in vfio_pci_mmap_close()
1401 list_for_each_entry(mmap_vma, &vdev->vma_list, vma_next) { in vfio_pci_mmap_close()
1408 mutex_unlock(&vdev->vma_lock); in vfio_pci_mmap_close()
1414 struct vfio_pci_core_device *vdev = vma->vm_private_data; in vfio_pci_mmap_fault() local
1418 mutex_lock(&vdev->vma_lock); in vfio_pci_mmap_fault()
1419 down_read(&vdev->memory_lock); in vfio_pci_mmap_fault()
1421 if (!__vfio_pci_memory_enabled(vdev)) { in vfio_pci_mmap_fault()
1432 list_for_each_entry(mmap_vma, &vdev->vma_list, vma_next) { in vfio_pci_mmap_fault()
1445 if (__vfio_pci_add_vma(vdev, vma)) { in vfio_pci_mmap_fault()
1451 up_read(&vdev->memory_lock); in vfio_pci_mmap_fault()
1452 mutex_unlock(&vdev->vma_lock); in vfio_pci_mmap_fault()
1464 struct vfio_pci_core_device *vdev = in vfio_pci_core_mmap() local
1465 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_mmap()
1466 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_mmap()
1473 if (index >= VFIO_PCI_NUM_REGIONS + vdev->num_regions) in vfio_pci_core_mmap()
1481 struct vfio_pci_region *region = vdev->region + regnum; in vfio_pci_core_mmap()
1485 return region->ops->mmap(vdev, region, vma); in vfio_pci_core_mmap()
1490 if (!vdev->bar_mmap_supported[index]) in vfio_pci_core_mmap()
1506 if (!vdev->barmap[index]) { in vfio_pci_core_mmap()
1512 vdev->barmap[index] = pci_iomap(pdev, index, 0); in vfio_pci_core_mmap()
1513 if (!vdev->barmap[index]) { in vfio_pci_core_mmap()
1519 vma->vm_private_data = vdev; in vfio_pci_core_mmap()
1536 struct vfio_pci_core_device *vdev = in vfio_pci_core_request() local
1537 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_request()
1538 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_request()
1540 mutex_lock(&vdev->igate); in vfio_pci_core_request()
1542 if (vdev->req_trigger) { in vfio_pci_core_request()
1547 eventfd_signal(vdev->req_trigger, 1); in vfio_pci_core_request()
1553 mutex_unlock(&vdev->igate); in vfio_pci_core_request()
1557 static int vfio_pci_validate_vf_token(struct vfio_pci_core_device *vdev, in vfio_pci_validate_vf_token() argument
1585 if (vdev->pdev->is_virtfn) { in vfio_pci_validate_vf_token()
1586 struct vfio_pci_core_device *pf_vdev = vdev->sriov_pf_core_dev; in vfio_pci_validate_vf_token()
1593 pci_info_ratelimited(vdev->pdev, in vfio_pci_validate_vf_token()
1599 pci_info_ratelimited(vdev->pdev, in vfio_pci_validate_vf_token()
1609 pci_info_ratelimited(vdev->pdev, in vfio_pci_validate_vf_token()
1613 } else if (vdev->vf_token) { in vfio_pci_validate_vf_token()
1614 mutex_lock(&vdev->vf_token->lock); in vfio_pci_validate_vf_token()
1615 if (vdev->vf_token->users) { in vfio_pci_validate_vf_token()
1617 mutex_unlock(&vdev->vf_token->lock); in vfio_pci_validate_vf_token()
1618 pci_info_ratelimited(vdev->pdev, in vfio_pci_validate_vf_token()
1623 if (!uuid_equal(uuid, &vdev->vf_token->uuid)) { in vfio_pci_validate_vf_token()
1624 mutex_unlock(&vdev->vf_token->lock); in vfio_pci_validate_vf_token()
1625 pci_info_ratelimited(vdev->pdev, in vfio_pci_validate_vf_token()
1630 uuid_copy(&vdev->vf_token->uuid, uuid); in vfio_pci_validate_vf_token()
1633 mutex_unlock(&vdev->vf_token->lock); in vfio_pci_validate_vf_token()
1635 pci_info_ratelimited(vdev->pdev, in vfio_pci_validate_vf_token()
1647 struct vfio_pci_core_device *vdev = in vfio_pci_core_match() local
1648 container_of(core_vdev, struct vfio_pci_core_device, vdev); in vfio_pci_core_match()
1653 if (strncmp(pci_name(vdev->pdev), buf, strlen(pci_name(vdev->pdev)))) in vfio_pci_core_match()
1656 if (strlen(buf) > strlen(pci_name(vdev->pdev))) { in vfio_pci_core_match()
1657 buf += strlen(pci_name(vdev->pdev)); in vfio_pci_core_match()
1688 ret = vfio_pci_validate_vf_token(vdev, vf_token, &uuid); in vfio_pci_core_match()
1699 struct vfio_pci_core_device *vdev = container_of(nb, in vfio_pci_bus_notifier() local
1706 pdev->is_virtfn && physfn == vdev->pdev) { in vfio_pci_bus_notifier()
1707 pci_info(vdev->pdev, "Captured SR-IOV VF %s driver_override\n", in vfio_pci_bus_notifier()
1710 vdev->vdev.ops->name); in vfio_pci_bus_notifier()
1712 pdev->is_virtfn && physfn == vdev->pdev) { in vfio_pci_bus_notifier()
1715 if (drv && drv != pci_dev_driver(vdev->pdev)) in vfio_pci_bus_notifier()
1716 pci_warn(vdev->pdev, in vfio_pci_bus_notifier()
1719 pci_dev_driver(vdev->pdev)->name); in vfio_pci_bus_notifier()
1725 static int vfio_pci_vf_init(struct vfio_pci_core_device *vdev) in vfio_pci_vf_init() argument
1727 struct pci_dev *pdev = vdev->pdev; in vfio_pci_vf_init()
1739 physfn = pci_physfn(vdev->pdev); in vfio_pci_vf_init()
1743 vdev->sriov_pf_core_dev = cur; in vfio_pci_vf_init()
1755 vdev->vf_token = kzalloc(sizeof(*vdev->vf_token), GFP_KERNEL); in vfio_pci_vf_init()
1756 if (!vdev->vf_token) in vfio_pci_vf_init()
1759 mutex_init(&vdev->vf_token->lock); in vfio_pci_vf_init()
1760 uuid_gen(&vdev->vf_token->uuid); in vfio_pci_vf_init()
1762 vdev->nb.notifier_call = vfio_pci_bus_notifier; in vfio_pci_vf_init()
1763 ret = bus_register_notifier(&pci_bus_type, &vdev->nb); in vfio_pci_vf_init()
1765 kfree(vdev->vf_token); in vfio_pci_vf_init()
1771 static void vfio_pci_vf_uninit(struct vfio_pci_core_device *vdev) in vfio_pci_vf_uninit() argument
1773 if (!vdev->vf_token) in vfio_pci_vf_uninit()
1776 bus_unregister_notifier(&pci_bus_type, &vdev->nb); in vfio_pci_vf_uninit()
1777 WARN_ON(vdev->vf_token->users); in vfio_pci_vf_uninit()
1778 mutex_destroy(&vdev->vf_token->lock); in vfio_pci_vf_uninit()
1779 kfree(vdev->vf_token); in vfio_pci_vf_uninit()
1782 static int vfio_pci_vga_init(struct vfio_pci_core_device *vdev) in vfio_pci_vga_init() argument
1784 struct pci_dev *pdev = vdev->pdev; in vfio_pci_vga_init()
1797 static void vfio_pci_vga_uninit(struct vfio_pci_core_device *vdev) in vfio_pci_vga_uninit() argument
1799 struct pci_dev *pdev = vdev->pdev; in vfio_pci_vga_uninit()
1809 void vfio_pci_core_init_device(struct vfio_pci_core_device *vdev, in vfio_pci_core_init_device() argument
1813 vfio_init_group_dev(&vdev->vdev, &pdev->dev, vfio_pci_ops); in vfio_pci_core_init_device()
1814 vdev->pdev = pdev; in vfio_pci_core_init_device()
1815 vdev->irq_type = VFIO_PCI_NUM_IRQS; in vfio_pci_core_init_device()
1816 mutex_init(&vdev->igate); in vfio_pci_core_init_device()
1817 spin_lock_init(&vdev->irqlock); in vfio_pci_core_init_device()
1818 mutex_init(&vdev->ioeventfds_lock); in vfio_pci_core_init_device()
1819 INIT_LIST_HEAD(&vdev->dummy_resources_list); in vfio_pci_core_init_device()
1820 INIT_LIST_HEAD(&vdev->ioeventfds_list); in vfio_pci_core_init_device()
1821 mutex_init(&vdev->vma_lock); in vfio_pci_core_init_device()
1822 INIT_LIST_HEAD(&vdev->vma_list); in vfio_pci_core_init_device()
1823 INIT_LIST_HEAD(&vdev->sriov_pfs_item); in vfio_pci_core_init_device()
1824 init_rwsem(&vdev->memory_lock); in vfio_pci_core_init_device()
1828 void vfio_pci_core_uninit_device(struct vfio_pci_core_device *vdev) in vfio_pci_core_uninit_device() argument
1830 mutex_destroy(&vdev->igate); in vfio_pci_core_uninit_device()
1831 mutex_destroy(&vdev->ioeventfds_lock); in vfio_pci_core_uninit_device()
1832 mutex_destroy(&vdev->vma_lock); in vfio_pci_core_uninit_device()
1833 vfio_uninit_group_dev(&vdev->vdev); in vfio_pci_core_uninit_device()
1834 kfree(vdev->region); in vfio_pci_core_uninit_device()
1835 kfree(vdev->pm_save); in vfio_pci_core_uninit_device()
1839 int vfio_pci_core_register_device(struct vfio_pci_core_device *vdev) in vfio_pci_core_register_device() argument
1841 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_register_device()
1866 ret = vfio_assign_device_set(&vdev->vdev, vdev); in vfio_pci_core_register_device()
1868 ret = vfio_assign_device_set(&vdev->vdev, pdev->slot); in vfio_pci_core_register_device()
1874 ret = vfio_assign_device_set(&vdev->vdev, pdev->bus); in vfio_pci_core_register_device()
1879 ret = vfio_pci_vf_init(vdev); in vfio_pci_core_register_device()
1882 ret = vfio_pci_vga_init(vdev); in vfio_pci_core_register_device()
1886 vfio_pci_probe_power_state(vdev); in vfio_pci_core_register_device()
1898 vfio_pci_set_power_state(vdev, PCI_D0); in vfio_pci_core_register_device()
1899 vfio_pci_set_power_state(vdev, PCI_D3hot); in vfio_pci_core_register_device()
1902 ret = vfio_register_group_dev(&vdev->vdev); in vfio_pci_core_register_device()
1909 vfio_pci_set_power_state(vdev, PCI_D0); in vfio_pci_core_register_device()
1911 vfio_pci_vf_uninit(vdev); in vfio_pci_core_register_device()
1918 void vfio_pci_core_unregister_device(struct vfio_pci_core_device *vdev) in vfio_pci_core_unregister_device() argument
1920 struct pci_dev *pdev = vdev->pdev; in vfio_pci_core_unregister_device()
1924 vfio_unregister_group_dev(&vdev->vdev); in vfio_pci_core_unregister_device()
1926 vfio_pci_vf_uninit(vdev); in vfio_pci_core_unregister_device()
1927 vfio_pci_vga_uninit(vdev); in vfio_pci_core_unregister_device()
1932 vfio_pci_set_power_state(vdev, PCI_D0); in vfio_pci_core_unregister_device()
1939 struct vfio_pci_core_device *vdev; in vfio_pci_aer_err_detected() local
1946 vdev = container_of(device, struct vfio_pci_core_device, vdev); in vfio_pci_aer_err_detected()
1948 mutex_lock(&vdev->igate); in vfio_pci_aer_err_detected()
1950 if (vdev->err_trigger) in vfio_pci_aer_err_detected()
1951 eventfd_signal(vdev->err_trigger, 1); in vfio_pci_aer_err_detected()
1953 mutex_unlock(&vdev->igate); in vfio_pci_aer_err_detected()
1962 struct vfio_pci_core_device *vdev; in vfio_pci_core_sriov_configure() local
1972 vdev = container_of(device, struct vfio_pci_core_device, vdev); in vfio_pci_core_sriov_configure()
1982 if (!list_empty(&vdev->sriov_pfs_item)) { in vfio_pci_core_sriov_configure()
1986 list_add_tail(&vdev->sriov_pfs_item, &vfio_pci_sriov_pfs); in vfio_pci_core_sriov_configure()
1999 list_del_init(&vdev->sriov_pfs_item); in vfio_pci_core_sriov_configure()
2013 static bool vfio_dev_in_groups(struct vfio_pci_core_device *vdev, in vfio_dev_in_groups() argument
2019 if (groups->groups[i] == vdev->vdev.group) in vfio_dev_in_groups()
2059 vdev.dev_set_list)->pdev; in vfio_pci_dev_set_resettable()
2090 vdev.dev_set_list); in vfio_pci_dev_set_hot_reset()
2098 list_for_each_entry(cur_vma, &dev_set->device_list, vdev.dev_set_list) { in vfio_pci_dev_set_hot_reset()
2119 list_for_each_entry(cur_mem, &dev_set->device_list, vdev.dev_set_list) { in vfio_pci_dev_set_hot_reset()
2137 list_for_each_entry(cur, &dev_set->device_list, vdev.dev_set_list) in vfio_pci_dev_set_hot_reset()
2143 list_for_each_entry(cur, &dev_set->device_list, vdev.dev_set_list) { in vfio_pci_dev_set_hot_reset()
2163 list_for_each_entry(cur, &dev_set->device_list, vdev.dev_set_list) { in vfio_pci_dev_set_needs_reset()
2165 if (cur->vdev.open_count) in vfio_pci_dev_set_needs_reset()
2202 list_for_each_entry(cur, &dev_set->device_list, vdev.dev_set_list) in vfio_pci_dev_set_try_reset()
2209 list_for_each_entry(cur, &dev_set->device_list, vdev.dev_set_list) { in vfio_pci_dev_set_try_reset()