/linux/drivers/gpu/drm/nouveau/nvkm/subdev/instmem/ |
H A D | base.c | 74 nvkm_instobj_dtor(struct nvkm_instmem *imem, struct nvkm_instobj *iobj) in nvkm_instobj_dtor() argument 76 spin_lock(&imem->lock); in nvkm_instobj_dtor() 78 spin_unlock(&imem->lock); in nvkm_instobj_dtor() 83 struct nvkm_instmem *imem, struct nvkm_instobj *iobj) in nvkm_instobj_ctor() argument 87 spin_lock(&imem->lock); in nvkm_instobj_ctor() 88 list_add_tail(&iobj->head, &imem->list); in nvkm_instobj_ctor() 89 spin_unlock(&imem->lock); in nvkm_instobj_ctor() 96 struct nvkm_instmem *imem = device->imem; in nvkm_instobj_wrap() local 99 if (!imem->func->memory_wrap) in nvkm_instobj_wrap() 102 ret = imem->func->memory_wrap(imem, memory, pmemory); in nvkm_instobj_wrap() [all …]
|
H A D | gk20a.c | 54 struct gk20a_instmem *imem; member 145 struct gk20a_instmem *imem = obj->base.imem; in gk20a_instobj_iommu_recycle_vaddr() local 151 imem->vaddr_use -= nvkm_memory_size(&obj->base.base.memory); in gk20a_instobj_iommu_recycle_vaddr() 152 nvkm_debug(&imem->base.subdev, "vaddr used: %x/%x\n", imem->vaddr_use, in gk20a_instobj_iommu_recycle_vaddr() 153 imem->vaddr_max); in gk20a_instobj_iommu_recycle_vaddr() 160 gk20a_instmem_vaddr_gc(struct gk20a_instmem *imem, const u64 size) in gk20a_instmem_vaddr_gc() argument 162 while (imem->vaddr_use + size > imem->vaddr_max) { in gk20a_instmem_vaddr_gc() 164 if (list_empty(&imem->vaddr_lru)) in gk20a_instmem_vaddr_gc() 168 list_first_entry(&imem->vaddr_lru, in gk20a_instmem_vaddr_gc() 177 struct gk20a_instmem *imem = node->imem; in gk20a_instobj_acquire_dma() local [all …]
|
H A D | nv40.c | 43 struct nv40_instmem *imem; member 51 iowrite32_native(data, iobj->imem->iomem + iobj->node->offset + offset); in nv40_instobj_wr32() 58 return ioread32_native(iobj->imem->iomem + iobj->node->offset + offset); in nv40_instobj_rd32() 77 return iobj->imem->iomem + iobj->node->offset; in nv40_instobj_acquire() 102 mutex_lock(&iobj->imem->base.mutex); in nv40_instobj_dtor() 103 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv40_instobj_dtor() 104 mutex_unlock(&iobj->imem->base.mutex); in nv40_instobj_dtor() 105 nvkm_instobj_dtor(&iobj->imem->base, &iobj->base); in nv40_instobj_dtor() 123 struct nv40_instmem *imem = nv40_instmem(base); in nv40_instobj_new() local 131 nvkm_instobj_ctor(&nv40_instobj_func, &imem->base, &iobj->base); in nv40_instobj_new() [all …]
|
H A D | nv04.c | 42 struct nv04_instmem *imem; member 50 struct nvkm_device *device = iobj->imem->base.subdev.device; in nv04_instobj_wr32() 58 struct nvkm_device *device = iobj->imem->base.subdev.device; in nv04_instobj_rd32() 77 struct nvkm_device *device = iobj->imem->base.subdev.device; in nv04_instobj_acquire() 103 mutex_lock(&iobj->imem->base.mutex); in nv04_instobj_dtor() 104 nvkm_mm_free(&iobj->imem->heap, &iobj->node); in nv04_instobj_dtor() 105 mutex_unlock(&iobj->imem->base.mutex); in nv04_instobj_dtor() 106 nvkm_instobj_dtor(&iobj->imem->base, &iobj->base); in nv04_instobj_dtor() 124 struct nv04_instmem *imem = nv04_instmem(base); in nv04_instobj_new() local 132 nvkm_instobj_ctor(&nv04_instobj_func, &imem->base, &iobj->base); in nv04_instobj_new() [all …]
|
H A D | nv50.c | 48 struct nv50_instmem *imem; member 60 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_wr32_slow() local 61 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_wr32_slow() 66 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_wr32_slow() 67 if (unlikely(imem->addr != base)) { in nv50_instobj_wr32_slow() 69 imem->addr = base; in nv50_instobj_wr32_slow() 72 spin_unlock_irqrestore(&imem->base.lock, flags); in nv50_instobj_wr32_slow() 79 struct nv50_instmem *imem = iobj->imem; in nv50_instobj_rd32_slow() local 80 struct nvkm_device *device = imem->base.subdev.device; in nv50_instobj_rd32_slow() 86 spin_lock_irqsave(&imem->base.lock, flags); in nv50_instobj_rd32_slow() [all …]
|
H A D | r535.c | 210 r535_instmem_resume(struct nvkm_instmem *imem) in r535_instmem_resume() argument 213 if (imem->rm.fbsr_valid) { in r535_instmem_resume() 214 nvkm_gsp_sg_free(imem->subdev.device, &imem->rm.fbsr); in r535_instmem_resume() 215 imem->rm.fbsr_valid = false; in r535_instmem_resume() 220 r535_instmem_suspend(struct nvkm_instmem *imem) in r535_instmem_suspend() argument 222 struct nvkm_subdev *subdev = &imem->subdev; in r535_instmem_suspend() 235 list_for_each_entry(iobj, &imem->list, head) { in r535_instmem_suspend() 242 list_for_each_entry(iobj, &imem->boot, head) { in r535_instmem_suspend() 265 ret = nvkm_gsp_sg(gsp->subdev.device, fbsr.size, &imem->rm.fbsr); in r535_instmem_suspend() 274 ret = fbsr_init(&fbsr, &imem->rm.fbsr, items_size); in r535_instmem_suspend() [all …]
|
/linux/drivers/net/ethernet/intel/ice/ |
H A D | ice_parser_rt.c | 115 struct ice_imem_item *imem) in ice_bst_key_init() argument 123 if (imem->b_kb.tsr_ctrl) in ice_bst_key_init() 126 key[idd] = imem->b_kb.prio; in ice_bst_key_init() 200 struct ice_imem_item *imem) in ice_imem_pgk_init() argument 203 rt->pg_key.next_proto = ice_pk_build(rt, &imem->np_kb); in ice_imem_pgk_init() 207 if (imem->pg_kb.flag0_ena) in ice_imem_pgk_init() 208 rt->pg_key.flag0 = ice_flag_get(rt, imem->pg_kb.flag0_idx); in ice_imem_pgk_init() 209 if (imem->pg_kb.flag1_ena) in ice_imem_pgk_init() 210 rt->pg_key.flag1 = ice_flag_get(rt, imem->pg_kb.flag1_idx); in ice_imem_pgk_init() 211 if (imem->pg_kb.flag2_ena) in ice_imem_pgk_init() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/fifo/ |
H A D | nv40.c | 41 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv40_chan_ramfc_write() 114 struct nvkm_instmem *imem = fifo->engine.subdev.device->imem; in nv40_eobj_ramht_add() local 119 hash = nvkm_ramht_insert(imem->ramht, eobj, chan->id, 4, eobj->handle, context); in nv40_eobj_ramht_add() 129 struct nvkm_memory *ramfc = device->imem->ramfc; in nv40_ectx_bind() 185 struct nvkm_instmem *imem = device->imem; in nv40_fifo_init() local 186 struct nvkm_ramht *ramht = imem->ramht; in nv40_fifo_init() 187 struct nvkm_memory *ramro = imem->ramro; in nv40_fifo_init() 188 struct nvkm_memory *ramfc = imem->ramfc; in nv40_fifo_init()
|
H A D | nv17.c | 40 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv17_chan_ramfc_write() 96 struct nvkm_instmem *imem = device->imem; in nv17_fifo_init() local 97 struct nvkm_ramht *ramht = imem->ramht; in nv17_fifo_init() 98 struct nvkm_memory *ramro = imem->ramro; in nv17_fifo_init() 99 struct nvkm_memory *ramfc = imem->ramfc; in nv17_fifo_init()
|
H A D | nv04.c | 45 struct nvkm_memory *fctx = device->imem->ramfc; in nv04_chan_stop() 105 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv04_chan_ramfc_clear() 118 struct nvkm_memory *ramfc = chan->cgrp->runl->fifo->engine.subdev.device->imem->ramfc; in nv04_chan_ramfc_write() 184 struct nvkm_instmem *imem = fifo->engine.subdev.device->imem; in nv04_eobj_ramht_del() local 187 nvkm_ramht_remove(imem->ramht, hash); in nv04_eobj_ramht_del() 195 struct nvkm_instmem *imem = fifo->engine.subdev.device->imem; in nv04_eobj_ramht_add() local 200 hash = nvkm_ramht_insert(imem->ramht, eobj, chan->id, 4, eobj->handle, context); in nv04_eobj_ramht_add() 469 struct nvkm_instmem *imem = device->imem; in nv04_fifo_init() local 470 struct nvkm_ramht *ramht = imem->ramht; in nv04_fifo_init() 471 struct nvkm_memory *ramro = imem->ramro; in nv04_fifo_init() [all …]
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/device/ |
H A D | base.c | 65 .imem = { 0x00000001, nv04_instmem_new }, 86 .imem = { 0x00000001, nv04_instmem_new }, 108 .imem = { 0x00000001, nv04_instmem_new }, 128 .imem = { 0x00000001, nv04_instmem_new }, 150 .imem = { 0x00000001, nv04_instmem_new }, 172 .imem = { 0x00000001, nv04_instmem_new }, 194 .imem = { 0x00000001, nv04_instmem_new }, 216 .imem = { 0x00000001, nv04_instmem_new }, 238 .imem = { 0x00000001, nv04_instmem_new }, 260 .imem = { 0x00000001, nv04_instmem_new }, [all …]
|
H A D | user.c | 78 struct nvkm_instmem *imem = device->imem; in nvkm_udevice_info() local 162 if (imem && args->v0.ram_size > 0) in nvkm_udevice_info() 163 args->v0.ram_user = args->v0.ram_user - imem->reserved; in nvkm_udevice_info()
|
/linux/drivers/remoteproc/ |
H A D | qcom_pil_info.c | 33 struct resource imem; in qcom_pil_info_init() local 45 ret = of_address_to_resource(np, 0, &imem); in qcom_pil_info_init() 50 base = ioremap(imem.start, resource_size(&imem)); in qcom_pil_info_init() 56 memset_io(base, 0, resource_size(&imem)); in qcom_pil_info_init() 59 _reloc.num_entries = (u32)resource_size(&imem) / PIL_RELOC_ENTRY_SIZE; in qcom_pil_info_init()
|
/linux/drivers/net/wwan/iosm/ |
H A D | iosm_ipc_mux.c | 13 channel_id = ipc_imem_channel_alloc(ipc_mux->imem, ipc_mux->instance_id, in ipc_mux_channel_create() 25 ipc_mux->channel = ipc_imem_channel_open(ipc_mux->imem, channel_id, in ipc_mux_channel_create() 187 ipc_imem_channel_close(ipc_mux->imem, ipc_mux->channel_id); in ipc_mux_channel_close() 223 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true); in ipc_mux_schedule() 227 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false); in ipc_mux_schedule() 236 ipc_imem_td_update_timer_suspend(ipc_mux->imem, true); in ipc_mux_schedule() 240 ipc_imem_td_update_timer_suspend(ipc_mux->imem, false); in ipc_mux_schedule() 279 struct iosm_imem *imem) in ipc_mux_init() argument 295 ipc_mux->pcie = imem->pcie; in ipc_mux_init() 296 ipc_mux->imem = imem; in ipc_mux_init() [all …]
|
H A D | iosm_ipc_protocol.c | 17 int index = ipc_protocol_msg_prep(ipc_protocol->imem, msg_type, in ipc_protocol_tq_msg_send() 25 ipc_protocol_msg_hp_update(ipc_protocol->imem); in ipc_protocol_tq_msg_send() 83 index = ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_msg_send() 100 ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_msg_send() 182 ipc_task_queue_send_task(ipc_protocol->imem, in ipc_protocol_suspend() 235 ipc_protocol->imem = ipc_imem; in ipc_protocol_init()
|
H A D | iosm_ipc_pcie.c | 43 ipc_imem_cleanup(ipc_pcie->imem); in ipc_pcie_cleanup() 53 kfree(ipc_pcie->imem); in ipc_pcie_deinit() 314 ipc_pcie->imem = ipc_imem_init(ipc_pcie, pci->device, in ipc_pcie_probe() 316 if (!ipc_pcie->imem) { in ipc_pcie_probe() 355 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, true); in ipc_pcie_suspend_s2idle() 366 ipc_imem_pm_s2idle_sleep(ipc_pcie->imem, false); in ipc_pcie_resume_s2idle() 381 ipc_imem_pm_suspend(ipc_pcie->imem); in ipc_pcie_suspend() 392 ipc_imem_pm_resume(ipc_pcie->imem); in ipc_pcie_resume()
|
H A D | iosm_ipc_task_queue.c | 140 int ipc_task_queue_send_task(struct iosm_imem *imem, in ipc_task_queue_send_task() argument 157 ret = ipc_task_queue_add_task(imem, arg, copy, func, in ipc_task_queue_send_task() 160 dev_err(imem->ipc_task->dev, in ipc_task_queue_send_task()
|
H A D | iosm_ipc_mux_codec.c | 20 ipc_imem_ul_send(ipc_mux->imem); in ipc_mux_tq_cmd_send() 28 int ret = ipc_task_queue_send_task(ipc_mux->imem, ipc_mux_tq_cmd_send, in ipc_mux_acb_send() 49 ipc_uevent_send(ipc_mux->imem->dev, UEVENT_MDM_TIMEOUT); in ipc_mux_acb_send() 252 adb_timer = &ipc_mux->imem->adb_timer; in ipc_mux_dl_cmds_decode_process() 900 ul_data_pend = ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_ul_adb_finish() 904 ipc_imem_td_update_timer_start(ipc_mux->imem); in ipc_mux_ul_adb_finish() 1003 (void)ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_lite_send_qlt() 1161 (void)ipc_imem_ul_write_td(ipc_mux->imem); in ipc_mux_ul_adgh_encode() 1491 ipc_imem_adb_timer_start(ipc_mux->imem); in ipc_mux_tq_ul_trigger_encode() 1494 ipc_imem_td_update_timer_start(ipc_mux->imem); in ipc_mux_tq_ul_trigger_encode() [all …]
|
H A D | iosm_ipc_task_queue.h | 92 int ipc_task_queue_send_task(struct iosm_imem *imem,
|
H A D | iosm_ipc_pcie.h | 69 struct iosm_imem *imem; member
|
H A D | iosm_ipc_protocol.h | 122 struct iosm_imem *imem; member
|
/linux/drivers/gpu/drm/nouveau/nvkm/engine/mpeg/ |
H A D | nv40.c | 33 struct nvkm_instmem *imem = device->imem; in nv40_mpeg_mthd_dma() local 37 u32 dma0 = nvkm_instmem_rd32(imem, inst + 0); in nv40_mpeg_mthd_dma() 38 u32 dma1 = nvkm_instmem_rd32(imem, inst + 4); in nv40_mpeg_mthd_dma() 39 u32 dma2 = nvkm_instmem_rd32(imem, inst + 8); in nv40_mpeg_mthd_dma()
|
/linux/drivers/gpu/drm/nouveau/nvkm/core/ |
H A D | memory.c | 141 struct nvkm_instmem *imem = device->imem; in nvkm_memory_new() local 146 if (unlikely(!imem)) in nvkm_memory_new() 159 ret = nvkm_instobj_new(imem, size, align, zero, preserve, &memory); in nvkm_memory_new()
|
/linux/drivers/memory/ |
H A D | brcmstb_dpfe.c | 184 void __iomem *imem; member 571 u32 __iomem *imem = priv->imem; in __verify_fw_checksum() local 590 sum += readl_relaxed(imem + i); in __verify_fw_checksum() 626 const u32 *dmem, *imem; in brcmstb_dpfe_download_firmware() local 676 imem = fw_blob; in brcmstb_dpfe_download_firmware() 683 ret = __write_firmware(priv->imem, imem, imem_size, is_big_endian); in brcmstb_dpfe_download_firmware() 884 priv->imem = devm_platform_ioremap_resource_byname(pdev, "dpfe-imem"); in brcmstb_dpfe_probe() 885 if (IS_ERR(priv->imem)) { in brcmstb_dpfe_probe()
|
/linux/Documentation/devicetree/bindings/dma/ |
H A D | st_fdma.txt | 15 - reg-names : Must contain "slimcore", "dmem", "peripherals", "imem" entries 31 reg-names = "slimcore", "dmem", "peripherals", "imem";
|