Lines Matching defs:fw_obj
1091 * @fw_obj: Pointer to FW object containing object to cleanup.
1101 pvr_fw_structure_cleanup(struct pvr_device *pvr_dev, u32 type, struct pvr_fw_object *fw_obj,
1125 pvr_fw_object_get_fw_addr_offset(fw_obj, offset,
1129 pvr_fw_object_get_fw_addr_offset(fw_obj, offset,
1133 pvr_fw_object_get_fw_addr_offset(fw_obj, offset,
1164 * @fw_obj: FW object to map.
1170 * * -%EINVAL if @fw_obj is already mapped but has no references, or
1174 pvr_fw_object_fw_map(struct pvr_device *pvr_dev, struct pvr_fw_object *fw_obj, u64 dev_addr)
1176 struct pvr_gem_object *pvr_obj = fw_obj->gem;
1184 if (drm_mm_node_allocated(&fw_obj->fw_mm_node)) {
1194 err = drm_mm_insert_node_in_range(&fw_dev->fw_mm, &fw_obj->fw_mm_node,
1202 fw_obj->fw_mm_node.start = dev_addr;
1203 fw_obj->fw_mm_node.size = gem_obj->size;
1204 err = drm_mm_reserve_node(&fw_dev->fw_mm, &fw_obj->fw_mm_node);
1212 err = fw_dev->defs->vm_map(pvr_dev, fw_obj);
1216 fw_obj->fw_addr_offset = (u32)(fw_obj->fw_mm_node.start - fw_dev->fw_mm_base);
1222 drm_mm_remove_node(&fw_obj->fw_mm_node);
1232 * @fw_obj: FW object to unmap.
1239 pvr_fw_object_fw_unmap(struct pvr_fw_object *fw_obj)
1241 struct pvr_gem_object *pvr_obj = fw_obj->gem;
1246 fw_dev->defs->vm_unmap(pvr_dev, fw_obj);
1250 if (!drm_mm_node_allocated(&fw_obj->fw_mm_node)) {
1255 drm_mm_remove_node(&fw_obj->fw_mm_node);
1268 struct pvr_fw_object *fw_obj;
1275 fw_obj = kzalloc(sizeof(*fw_obj), GFP_KERNEL);
1276 if (!fw_obj)
1279 INIT_LIST_HEAD(&fw_obj->node);
1280 fw_obj->init = init;
1281 fw_obj->init_priv = init_priv;
1283 fw_obj->gem = pvr_gem_object_create(pvr_dev, size, flags);
1284 if (IS_ERR(fw_obj->gem)) {
1285 err = PTR_ERR(fw_obj->gem);
1286 fw_obj->gem = NULL;
1290 err = pvr_fw_object_fw_map(pvr_dev, fw_obj, dev_addr);
1294 cpu_ptr = pvr_fw_object_vmap(fw_obj);
1300 *fw_obj_out = fw_obj;
1302 if (fw_obj->init)
1303 fw_obj->init(cpu_ptr, fw_obj->init_priv);
1306 list_add_tail(&fw_obj->node, &pvr_dev->fw_dev.fw_objs.list);
1312 pvr_fw_object_destroy(fw_obj);
1424 * @fw_obj: Pointer to object to destroy.
1426 void pvr_fw_object_destroy(struct pvr_fw_object *fw_obj)
1428 struct pvr_gem_object *pvr_obj = fw_obj->gem;
1433 list_del(&fw_obj->node);
1436 if (drm_mm_node_allocated(&fw_obj->fw_mm_node)) {
1438 if (WARN_ON(pvr_fw_object_fw_unmap(fw_obj)))
1442 if (fw_obj->gem)
1443 pvr_gem_object_put(fw_obj->gem);
1445 kfree(fw_obj);
1451 * @fw_obj: Pointer to object.
1455 void pvr_fw_object_get_fw_addr_offset(struct pvr_fw_object *fw_obj, u32 offset, u32 *fw_addr_out)
1457 struct pvr_gem_object *pvr_obj = fw_obj->gem;
1460 *fw_addr_out = pvr_dev->fw_dev.defs->get_fw_addr_with_offset(fw_obj, offset);
1464 pvr_fw_obj_get_gpu_addr(struct pvr_fw_object *fw_obj)
1466 struct pvr_device *pvr_dev = to_pvr_device(gem_from_pvr_gem(fw_obj->gem)->dev);
1469 return fw_dev->fw_heap_info.gpu_addr + fw_obj->fw_addr_offset;
1493 struct pvr_fw_object *fw_obj = container_of(pos, struct pvr_fw_object, node);
1494 void *cpu_ptr = pvr_fw_object_vmap(fw_obj);
1498 if (!(fw_obj->gem->flags & PVR_BO_FW_NO_CLEAR_ON_RESET)) {
1499 memset(cpu_ptr, 0, pvr_gem_object_size(fw_obj->gem));
1501 if (fw_obj->init)
1502 fw_obj->init(cpu_ptr, fw_obj->init_priv);
1505 pvr_fw_object_vunmap(fw_obj);