Lines Matching +full:map +full:- +full:to +full:- +full:dma +full:- +full:channel

1 // SPDX-License-Identifier: GPL-2.0
3 * Test driver to test endpoint functionality
19 #include <linux/pci-epc.h>
20 #include <linux/pci-epf.h>
21 #include <linux/pci-ep-msi.h>
112 epf_test->transfer_status = in pci_epf_test_dma_callback()
113 dmaengine_tx_status(epf_test->transfer_chan, in pci_epf_test_dma_callback()
114 epf_test->transfer_cookie, &state); in pci_epf_test_dma_callback()
115 if (epf_test->transfer_status == DMA_COMPLETE || in pci_epf_test_dma_callback()
116 epf_test->transfer_status == DMA_ERROR) in pci_epf_test_dma_callback()
117 complete(&epf_test->transfer_complete); in pci_epf_test_dma_callback()
121 * pci_epf_test_data_transfer() - Function that uses dmaengine API to transfer
125 * address given by pci_epc_mem_alloc_addr or DMA mapping APIs.
127 * address given by pci_epc_mem_alloc_addr or DMA mapping APIs.
130 * @dir: DMA transfer direction
132 * Function that uses dmaengine API to transfer data between PCIe EP and remote
134 * by pci_epc_mem_alloc_addr or the one obtained using DMA mapping APIs.
144 epf_test->dma_chan_tx : epf_test->dma_chan_rx; in pci_epf_test_data_transfer()
147 struct pci_epf *epf = epf_test->epf; in pci_epf_test_data_transfer()
150 struct device *dev = &epf->dev; in pci_epf_test_data_transfer()
154 dev_err(dev, "Invalid DMA memcpy channel\n"); in pci_epf_test_data_transfer()
155 return -EINVAL; in pci_epf_test_data_transfer()
158 if (epf_test->dma_private) { in pci_epf_test_data_transfer()
166 dev_err(dev, "DMA slave config fail\n"); in pci_epf_test_data_transfer()
167 return -EIO; in pci_epf_test_data_transfer()
177 dev_err(dev, "Failed to prepare DMA memcpy\n"); in pci_epf_test_data_transfer()
178 return -EIO; in pci_epf_test_data_transfer()
181 reinit_completion(&epf_test->transfer_complete); in pci_epf_test_data_transfer()
182 epf_test->transfer_chan = chan; in pci_epf_test_data_transfer()
183 tx->callback = pci_epf_test_dma_callback; in pci_epf_test_data_transfer()
184 tx->callback_param = epf_test; in pci_epf_test_data_transfer()
185 epf_test->transfer_cookie = dmaengine_submit(tx); in pci_epf_test_data_transfer()
187 ret = dma_submit_error(epf_test->transfer_cookie); in pci_epf_test_data_transfer()
189 dev_err(dev, "Failed to do DMA tx_submit %d\n", ret); in pci_epf_test_data_transfer()
194 ret = wait_for_completion_interruptible(&epf_test->transfer_complete); in pci_epf_test_data_transfer()
196 dev_err(dev, "DMA wait_for_completion interrupted\n"); in pci_epf_test_data_transfer()
200 if (epf_test->transfer_status == DMA_ERROR) { in pci_epf_test_data_transfer()
201 dev_err(dev, "DMA transfer failed\n"); in pci_epf_test_data_transfer()
202 ret = -EIO; in pci_epf_test_data_transfer()
224 return chan->device->dev == filter->dev in epf_dma_filter_fn()
225 && (filter->dma_mask & caps.directions); in epf_dma_filter_fn()
229 * pci_epf_test_init_dma_chan() - Function to initialize EPF test DMA channel
232 * Function to initialize EPF test DMA channel.
236 struct pci_epf *epf = epf_test->epf; in pci_epf_test_init_dma_chan()
237 struct device *dev = &epf->dev; in pci_epf_test_init_dma_chan()
243 filter.dev = epf->epc->dev.parent; in pci_epf_test_init_dma_chan()
250 dev_info(dev, "Failed to get private DMA rx channel. Falling back to generic one\n"); in pci_epf_test_init_dma_chan()
254 epf_test->dma_chan_rx = dma_chan; in pci_epf_test_init_dma_chan()
260 dev_info(dev, "Failed to get private DMA tx channel. Falling back to generic one\n"); in pci_epf_test_init_dma_chan()
264 epf_test->dma_chan_tx = dma_chan; in pci_epf_test_init_dma_chan()
265 epf_test->dma_private = true; in pci_epf_test_init_dma_chan()
267 init_completion(&epf_test->transfer_complete); in pci_epf_test_init_dma_chan()
272 dma_release_channel(epf_test->dma_chan_rx); in pci_epf_test_init_dma_chan()
273 epf_test->dma_chan_rx = NULL; in pci_epf_test_init_dma_chan()
282 if (ret != -EPROBE_DEFER) in pci_epf_test_init_dma_chan()
283 dev_err(dev, "Failed to get DMA channel\n"); in pci_epf_test_init_dma_chan()
286 init_completion(&epf_test->transfer_complete); in pci_epf_test_init_dma_chan()
288 epf_test->dma_chan_tx = epf_test->dma_chan_rx = dma_chan; in pci_epf_test_init_dma_chan()
294 * pci_epf_test_clean_dma_chan() - Function to cleanup EPF test DMA channel
297 * Helper to cleanup EPF test DMA channel.
301 if (!epf_test->dma_supported) in pci_epf_test_clean_dma_chan()
304 if (epf_test->dma_chan_tx) { in pci_epf_test_clean_dma_chan()
305 dma_release_channel(epf_test->dma_chan_tx); in pci_epf_test_clean_dma_chan()
306 if (epf_test->dma_chan_tx == epf_test->dma_chan_rx) { in pci_epf_test_clean_dma_chan()
307 epf_test->dma_chan_tx = NULL; in pci_epf_test_clean_dma_chan()
308 epf_test->dma_chan_rx = NULL; in pci_epf_test_clean_dma_chan()
311 epf_test->dma_chan_tx = NULL; in pci_epf_test_clean_dma_chan()
314 if (epf_test->dma_chan_rx) { in pci_epf_test_clean_dma_chan()
315 dma_release_channel(epf_test->dma_chan_rx); in pci_epf_test_clean_dma_chan()
316 epf_test->dma_chan_rx = NULL; in pci_epf_test_clean_dma_chan()
323 struct timespec64 *end, bool dma) in pci_epf_test_print_rate() argument
333 dev_info(&epf_test->epf->dev, in pci_epf_test_print_rate()
334 "%s => Size: %llu B, DMA: %s, Time: %llu.%09u s, Rate: %llu KB/s\n", in pci_epf_test_print_rate()
335 op, size, dma ? "YES" : "NO", in pci_epf_test_print_rate()
344 struct pci_epf *epf = epf_test->epf; in pci_epf_test_copy()
345 struct pci_epc *epc = epf->epc; in pci_epf_test_copy()
346 struct device *dev = &epf->dev; in pci_epf_test_copy()
348 u64 src_addr = le64_to_cpu(reg->src_addr); in pci_epf_test_copy()
349 u64 dst_addr = le64_to_cpu(reg->dst_addr); in pci_epf_test_copy()
352 u32 flags = le32_to_cpu(reg->flags); in pci_epf_test_copy()
356 orig_size = copy_size = le32_to_cpu(reg->size); in pci_epf_test_copy()
359 if (!dma_has_cap(DMA_MEMCPY, epf_test->dma_chan_tx->device->cap_mask)) { in pci_epf_test_copy()
360 dev_err(dev, "DMA controller doesn't support MEMCPY\n"); in pci_epf_test_copy()
361 ret = -EINVAL; in pci_epf_test_copy()
367 ret = -ENOMEM; in pci_epf_test_copy()
374 ret = pci_epc_mem_map(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_copy()
377 dev_err(dev, "Failed to map source address\n"); in pci_epf_test_copy()
382 ret = pci_epc_mem_map(epf->epc, epf->func_no, epf->vfunc_no, in pci_epf_test_copy()
385 dev_err(dev, "Failed to map destination address\n"); in pci_epf_test_copy()
387 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_copy()
410 copy_size -= map_size; in pci_epf_test_copy()
414 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &dst_map); in pci_epf_test_copy()
415 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &src_map); in pci_epf_test_copy()
424 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &dst_map); in pci_epf_test_copy()
425 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &src_map); in pci_epf_test_copy()
436 reg->status = cpu_to_le32(status); in pci_epf_test_copy()
445 struct pci_epc_map map; in pci_epf_test_read() local
448 struct pci_epf *epf = epf_test->epf; in pci_epf_test_read()
449 struct pci_epc *epc = epf->epc; in pci_epf_test_read()
450 struct device *dev = &epf->dev; in pci_epf_test_read()
451 struct device *dma_dev = epf->epc->dev.parent; in pci_epf_test_read()
452 u64 src_addr = le64_to_cpu(reg->src_addr); in pci_epf_test_read()
455 u32 flags = le32_to_cpu(reg->flags); in pci_epf_test_read()
456 u32 checksum = le32_to_cpu(reg->checksum); in pci_epf_test_read()
459 orig_size = src_size = le32_to_cpu(reg->size); in pci_epf_test_read()
463 ret = -ENOMEM; in pci_epf_test_read()
469 ret = pci_epc_mem_map(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_read()
470 src_addr, src_size, &map); in pci_epf_test_read()
472 dev_err(dev, "Failed to map address\n"); in pci_epf_test_read()
477 map_size = map.pci_size; in pci_epf_test_read()
483 "Failed to map destination buffer addr\n"); in pci_epf_test_read()
484 ret = -ENOMEM; in pci_epf_test_read()
490 dst_phys_addr, map.phys_addr, in pci_epf_test_read()
503 memcpy_fromio(buf, map.virt_addr, map_size); in pci_epf_test_read()
507 src_size -= map_size; in pci_epf_test_read()
511 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &map); in pci_epf_test_read()
520 ret = -EIO; in pci_epf_test_read()
524 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &map); in pci_epf_test_read()
534 reg->status = cpu_to_le32(status); in pci_epf_test_read()
542 struct pci_epc_map map; in pci_epf_test_write() local
545 struct pci_epf *epf = epf_test->epf; in pci_epf_test_write()
546 struct pci_epc *epc = epf->epc; in pci_epf_test_write()
547 struct device *dev = &epf->dev; in pci_epf_test_write()
548 struct device *dma_dev = epf->epc->dev.parent; in pci_epf_test_write()
549 u64 dst_addr = le64_to_cpu(reg->dst_addr); in pci_epf_test_write()
552 u32 flags = le32_to_cpu(reg->flags); in pci_epf_test_write()
555 orig_size = dst_size = le32_to_cpu(reg->size); in pci_epf_test_write()
559 ret = -ENOMEM; in pci_epf_test_write()
563 reg->checksum = cpu_to_le32(crc32_le(~0, dst_buf, dst_size)); in pci_epf_test_write()
567 ret = pci_epc_mem_map(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_write()
568 dst_addr, dst_size, &map); in pci_epf_test_write()
570 dev_err(dev, "Failed to map address\n"); in pci_epf_test_write()
575 map_size = map.pci_size; in pci_epf_test_write()
581 "Failed to map source buffer addr\n"); in pci_epf_test_write()
582 ret = -ENOMEM; in pci_epf_test_write()
589 map.phys_addr, src_phys_addr, in pci_epf_test_write()
603 memcpy_toio(map.virt_addr, buf, map_size); in pci_epf_test_write()
607 dst_size -= map_size; in pci_epf_test_write()
611 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &map); in pci_epf_test_write()
619 * wait 1ms inorder for the write to complete. Without this delay L3 in pci_epf_test_write()
626 pci_epc_mem_unmap(epc, epf->func_no, epf->vfunc_no, &map); in pci_epf_test_write()
636 reg->status = cpu_to_le32(status); in pci_epf_test_write()
642 struct pci_epf *epf = epf_test->epf; in pci_epf_test_raise_irq()
643 struct device *dev = &epf->dev; in pci_epf_test_raise_irq()
644 struct pci_epc *epc = epf->epc; in pci_epf_test_raise_irq()
645 u32 status = le32_to_cpu(reg->status); in pci_epf_test_raise_irq()
646 u32 irq_number = le32_to_cpu(reg->irq_number); in pci_epf_test_raise_irq()
647 u32 irq_type = le32_to_cpu(reg->irq_type); in pci_epf_test_raise_irq()
651 * Set the status before raising the IRQ to ensure that the host sees in pci_epf_test_raise_irq()
655 WRITE_ONCE(reg->status, cpu_to_le32(status)); in pci_epf_test_raise_irq()
659 pci_epc_raise_irq(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_raise_irq()
663 count = pci_epc_get_msi(epc, epf->func_no, epf->vfunc_no); in pci_epf_test_raise_irq()
669 pci_epc_raise_irq(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_raise_irq()
673 count = pci_epc_get_msix(epc, epf->func_no, epf->vfunc_no); in pci_epf_test_raise_irq()
675 dev_err(dev, "Invalid MSI-X IRQ number %d / %d\n", in pci_epf_test_raise_irq()
679 pci_epc_raise_irq(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_raise_irq()
683 dev_err(dev, "Failed to raise IRQ, unknown type\n"); in pci_epf_test_raise_irq()
691 enum pci_barno test_reg_bar = epf_test->test_reg_bar; in pci_epf_test_doorbell_handler()
692 struct pci_epf_test_reg *reg = epf_test->reg[test_reg_bar]; in pci_epf_test_doorbell_handler()
693 u32 status = le32_to_cpu(reg->status); in pci_epf_test_doorbell_handler()
696 reg->status = cpu_to_le32(status); in pci_epf_test_doorbell_handler()
704 struct pci_epf_test_reg *reg = epf_test->reg[epf_test->test_reg_bar]; in pci_epf_test_doorbell_cleanup()
705 struct pci_epf *epf = epf_test->epf; in pci_epf_test_doorbell_cleanup()
707 free_irq(epf->db_msg[0].virq, epf_test); in pci_epf_test_doorbell_cleanup()
708 reg->doorbell_bar = cpu_to_le32(NO_BAR); in pci_epf_test_doorbell_cleanup()
716 u32 status = le32_to_cpu(reg->status); in pci_epf_test_enable_doorbell()
717 struct pci_epf *epf = epf_test->epf; in pci_epf_test_enable_doorbell()
718 struct pci_epc *epc = epf->epc; in pci_epf_test_enable_doorbell()
728 msg = &epf->db_msg[0].msg; in pci_epf_test_enable_doorbell()
729 bar = pci_epc_get_next_free_bar(epf_test->epc_features, epf_test->test_reg_bar + 1); in pci_epf_test_enable_doorbell()
733 ret = request_irq(epf->db_msg[0].virq, pci_epf_test_doorbell_handler, 0, in pci_epf_test_enable_doorbell()
734 "pci-ep-test-doorbell", epf_test); in pci_epf_test_enable_doorbell()
736 dev_err(&epf->dev, in pci_epf_test_enable_doorbell()
737 "Failed to request doorbell IRQ: %d\n", in pci_epf_test_enable_doorbell()
738 epf->db_msg[0].virq); in pci_epf_test_enable_doorbell()
742 reg->doorbell_data = cpu_to_le32(msg->data); in pci_epf_test_enable_doorbell()
743 reg->doorbell_bar = cpu_to_le32(bar); in pci_epf_test_enable_doorbell()
745 msg = &epf->db_msg[0].msg; in pci_epf_test_enable_doorbell()
746 ret = pci_epf_align_inbound_addr(epf, bar, ((u64)msg->address_hi << 32) | msg->address_lo, in pci_epf_test_enable_doorbell()
747 &epf_test->db_bar.phys_addr, &offset); in pci_epf_test_enable_doorbell()
752 reg->doorbell_offset = cpu_to_le32(offset); in pci_epf_test_enable_doorbell()
754 epf_test->db_bar.barno = bar; in pci_epf_test_enable_doorbell()
755 epf_test->db_bar.size = epf->bar[bar].size; in pci_epf_test_enable_doorbell()
756 epf_test->db_bar.flags = epf->bar[bar].flags; in pci_epf_test_enable_doorbell()
758 ret = pci_epc_set_bar(epc, epf->func_no, epf->vfunc_no, &epf_test->db_bar); in pci_epf_test_enable_doorbell()
763 reg->status = cpu_to_le32(status); in pci_epf_test_enable_doorbell()
770 reg->status = cpu_to_le32(status); in pci_epf_test_enable_doorbell()
776 enum pci_barno bar = le32_to_cpu(reg->doorbell_bar); in pci_epf_test_disable_doorbell()
777 u32 status = le32_to_cpu(reg->status); in pci_epf_test_disable_doorbell()
778 struct pci_epf *epf = epf_test->epf; in pci_epf_test_disable_doorbell()
779 struct pci_epc *epc = epf->epc; in pci_epf_test_disable_doorbell()
789 * to point to the address stored in epf_test->db_bar.phys_addr, i.e., in pci_epf_test_disable_doorbell()
793 * translation to point to the memory allocated for the BAR. in pci_epf_test_disable_doorbell()
795 ret = pci_epc_set_bar(epc, epf->func_no, epf->vfunc_no, &epf->bar[bar]); in pci_epf_test_disable_doorbell()
800 reg->status = cpu_to_le32(status); in pci_epf_test_disable_doorbell()
806 reg->status = cpu_to_le32(status); in pci_epf_test_disable_doorbell()
814 struct pci_epf *epf = epf_test->epf; in pci_epf_test_cmd_handler()
815 struct device *dev = &epf->dev; in pci_epf_test_cmd_handler()
816 enum pci_barno test_reg_bar = epf_test->test_reg_bar; in pci_epf_test_cmd_handler()
817 struct pci_epf_test_reg *reg = epf_test->reg[test_reg_bar]; in pci_epf_test_cmd_handler()
818 u32 irq_type = le32_to_cpu(reg->irq_type); in pci_epf_test_cmd_handler()
820 command = le32_to_cpu(READ_ONCE(reg->command)); in pci_epf_test_cmd_handler()
824 WRITE_ONCE(reg->command, 0); in pci_epf_test_cmd_handler()
825 WRITE_ONCE(reg->status, 0); in pci_epf_test_cmd_handler()
827 if ((le32_to_cpu(READ_ONCE(reg->flags)) & FLAG_USE_DMA) && in pci_epf_test_cmd_handler()
828 !epf_test->dma_supported) { in pci_epf_test_cmd_handler()
829 dev_err(dev, "Cannot transfer data using DMA\n"); in pci_epf_test_cmd_handler()
834 dev_err(dev, "Failed to detect IRQ type\n"); in pci_epf_test_cmd_handler()
870 queue_delayed_work(kpcitest_workqueue, &epf_test->cmd_handler, in pci_epf_test_cmd_handler()
877 struct pci_epc *epc = epf->epc; in pci_epf_test_set_bar()
878 struct device *dev = &epf->dev; in pci_epf_test_set_bar()
880 enum pci_barno test_reg_bar = epf_test->test_reg_bar; in pci_epf_test_set_bar()
883 if (!epf_test->reg[bar]) in pci_epf_test_set_bar()
886 ret = pci_epc_set_bar(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_set_bar()
887 &epf->bar[bar]); in pci_epf_test_set_bar()
889 pci_epf_free_space(epf, epf_test->reg[bar], bar, in pci_epf_test_set_bar()
891 epf_test->reg[bar] = NULL; in pci_epf_test_set_bar()
892 dev_err(dev, "Failed to set BAR%d\n", bar); in pci_epf_test_set_bar()
904 struct pci_epc *epc = epf->epc; in pci_epf_test_clear_bar()
908 if (!epf_test->reg[bar]) in pci_epf_test_clear_bar()
911 pci_epc_clear_bar(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_clear_bar()
912 &epf->bar[bar]); in pci_epf_test_clear_bar()
919 enum pci_barno test_reg_bar = epf_test->test_reg_bar; in pci_epf_test_set_capabilities()
920 struct pci_epf_test_reg *reg = epf_test->reg[test_reg_bar]; in pci_epf_test_set_capabilities()
921 struct pci_epc *epc = epf->epc; in pci_epf_test_set_capabilities()
924 if (epc->ops->align_addr) in pci_epf_test_set_capabilities()
927 if (epf_test->epc_features->msi_capable) in pci_epf_test_set_capabilities()
930 if (epf_test->epc_features->msix_capable) in pci_epf_test_set_capabilities()
933 if (epf_test->epc_features->intx_capable) in pci_epf_test_set_capabilities()
936 reg->caps = cpu_to_le32(caps); in pci_epf_test_set_capabilities()
942 struct pci_epf_header *header = epf->header; in pci_epf_test_epc_init()
943 const struct pci_epc_features *epc_features = epf_test->epc_features; in pci_epf_test_epc_init()
944 struct pci_epc *epc = epf->epc; in pci_epf_test_epc_init()
945 struct device *dev = &epf->dev; in pci_epf_test_epc_init()
949 epf_test->dma_supported = true; in pci_epf_test_epc_init()
953 epf_test->dma_supported = false; in pci_epf_test_epc_init()
955 if (epf->vfunc_no <= 1) { in pci_epf_test_epc_init()
956 ret = pci_epc_write_header(epc, epf->func_no, epf->vfunc_no, header); in pci_epf_test_epc_init()
969 if (epc_features->msi_capable) { in pci_epf_test_epc_init()
970 ret = pci_epc_set_msi(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_epc_init()
971 epf->msi_interrupts); in pci_epf_test_epc_init()
978 if (epc_features->msix_capable) { in pci_epf_test_epc_init()
979 ret = pci_epc_set_msix(epc, epf->func_no, epf->vfunc_no, in pci_epf_test_epc_init()
980 epf->msix_interrupts, in pci_epf_test_epc_init()
981 epf_test->test_reg_bar, in pci_epf_test_epc_init()
982 epf_test->msix_table_offset); in pci_epf_test_epc_init()
984 dev_err(dev, "MSI-X configuration failed\n"); in pci_epf_test_epc_init()
989 linkup_notifier = epc_features->linkup_notifier; in pci_epf_test_epc_init()
991 queue_work(kpcitest_workqueue, &epf_test->cmd_handler.work); in pci_epf_test_epc_init()
1000 cancel_delayed_work_sync(&epf_test->cmd_handler); in pci_epf_test_epc_deinit()
1009 queue_delayed_work(kpcitest_workqueue, &epf_test->cmd_handler, in pci_epf_test_link_up()
1019 cancel_delayed_work_sync(&epf_test->cmd_handler); in pci_epf_test_link_down()
1034 struct device *dev = &epf->dev; in pci_epf_test_alloc_space()
1039 enum pci_barno test_reg_bar = epf_test->test_reg_bar; in pci_epf_test_alloc_space()
1041 const struct pci_epc_features *epc_features = epf_test->epc_features; in pci_epf_test_alloc_space()
1046 if (epc_features->msix_capable) { in pci_epf_test_alloc_space()
1047 msix_table_size = PCI_MSIX_ENTRY_SIZE * epf->msix_interrupts; in pci_epf_test_alloc_space()
1048 epf_test->msix_table_offset = test_reg_bar_size; in pci_epf_test_alloc_space()
1049 /* Align to QWORD or 8 Bytes */ in pci_epf_test_alloc_space()
1050 pba_size = ALIGN(DIV_ROUND_UP(epf->msix_interrupts, 8), 8); in pci_epf_test_alloc_space()
1057 dev_err(dev, "Failed to allocated register space\n"); in pci_epf_test_alloc_space()
1058 return -ENOMEM; in pci_epf_test_alloc_space()
1060 epf_test->reg[test_reg_bar] = base; in pci_epf_test_alloc_space()
1070 if (epc_features->bar[bar].type == BAR_FIXED) in pci_epf_test_alloc_space()
1071 test_reg_size = epc_features->bar[bar].fixed_size; in pci_epf_test_alloc_space()
1078 dev_err(dev, "Failed to allocate space for BAR%d\n", in pci_epf_test_alloc_space()
1080 epf_test->reg[bar] = base; in pci_epf_test_alloc_space()
1092 if (!epf_test->reg[bar]) in pci_epf_test_free_space()
1095 pci_epf_free_space(epf, epf_test->reg[bar], bar, in pci_epf_test_free_space()
1097 epf_test->reg[bar] = NULL; in pci_epf_test_free_space()
1107 struct pci_epc *epc = epf->epc; in pci_epf_test_bind()
1110 return -EINVAL; in pci_epf_test_bind()
1112 epc_features = pci_epc_get_features(epc, epf->func_no, epf->vfunc_no); in pci_epf_test_bind()
1114 dev_err(&epf->dev, "epc_features not implemented\n"); in pci_epf_test_bind()
1115 return -EOPNOTSUPP; in pci_epf_test_bind()
1120 return -EINVAL; in pci_epf_test_bind()
1122 epf_test->test_reg_bar = test_reg_bar; in pci_epf_test_bind()
1123 epf_test->epc_features = epc_features; in pci_epf_test_bind()
1135 struct pci_epc *epc = epf->epc; in pci_epf_test_unbind()
1137 cancel_delayed_work_sync(&epf_test->cmd_handler); in pci_epf_test_unbind()
1138 if (epc->init_complete) { in pci_epf_test_unbind()
1156 struct device *dev = &epf->dev; in pci_epf_test_probe()
1160 return -ENOMEM; in pci_epf_test_probe()
1162 epf->header = &test_header; in pci_epf_test_probe()
1163 epf_test->epf = epf; in pci_epf_test_probe()
1165 INIT_DELAYED_WORK(&epf_test->cmd_handler, pci_epf_test_cmd_handler); in pci_epf_test_probe()
1167 epf->event_ops = &pci_epf_test_event_ops; in pci_epf_test_probe()
1193 pr_err("Failed to allocate the kpcitest work queue\n"); in pci_epf_test_init()
1194 return -ENOMEM; in pci_epf_test_init()
1200 pr_err("Failed to register pci epf test driver --> %d\n", ret); in pci_epf_test_init()