1 /* 2 * Copyright (C) 2017 Etnaviv Project 3 * 4 * This program is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 as published by 6 * the Free Software Foundation. 7 * 8 * This program is distributed in the hope that it will be useful, but WITHOUT 9 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 10 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for 11 * more details. 12 * 13 * You should have received a copy of the GNU General Public License along with 14 * this program. If not, see <http://www.gnu.org/licenses/>. 15 */ 16 17 #include <drm/drm_mm.h> 18 19 #include "etnaviv_cmdbuf.h" 20 #include "etnaviv_gpu.h" 21 #include "etnaviv_mmu.h" 22 23 #define SUBALLOC_SIZE SZ_256K 24 #define SUBALLOC_GRANULE SZ_4K 25 #define SUBALLOC_GRANULES (SUBALLOC_SIZE / SUBALLOC_GRANULE) 26 27 struct etnaviv_cmdbuf_suballoc { 28 /* suballocated dma buffer properties */ 29 struct etnaviv_gpu *gpu; 30 void *vaddr; 31 dma_addr_t paddr; 32 33 /* GPU mapping */ 34 u32 iova; 35 struct drm_mm_node vram_node; /* only used on MMUv2 */ 36 37 /* allocation management */ 38 struct mutex lock; 39 DECLARE_BITMAP(granule_map, SUBALLOC_GRANULES); 40 int free_space; 41 wait_queue_head_t free_event; 42 }; 43 44 struct etnaviv_cmdbuf_suballoc * 45 etnaviv_cmdbuf_suballoc_new(struct etnaviv_gpu * gpu) 46 { 47 struct etnaviv_cmdbuf_suballoc *suballoc; 48 int ret; 49 50 suballoc = kzalloc(sizeof(*suballoc), GFP_KERNEL); 51 if (!suballoc) 52 return ERR_PTR(-ENOMEM); 53 54 suballoc->gpu = gpu; 55 mutex_init(&suballoc->lock); 56 init_waitqueue_head(&suballoc->free_event); 57 58 suballoc->vaddr = dma_alloc_wc(gpu->dev, SUBALLOC_SIZE, 59 &suballoc->paddr, GFP_KERNEL); 60 if (!suballoc->vaddr) 61 goto free_suballoc; 62 63 ret = etnaviv_iommu_get_suballoc_va(gpu, suballoc->paddr, 64 &suballoc->vram_node, SUBALLOC_SIZE, 65 &suballoc->iova); 66 if (ret) 67 goto free_dma; 68 69 return suballoc; 70 71 free_dma: 72 dma_free_wc(gpu->dev, SUBALLOC_SIZE, suballoc->vaddr, suballoc->paddr); 73 free_suballoc: 74 kfree(suballoc); 75 76 return NULL; 77 } 78 79 void etnaviv_cmdbuf_suballoc_destroy(struct etnaviv_cmdbuf_suballoc *suballoc) 80 { 81 etnaviv_iommu_put_suballoc_va(suballoc->gpu, &suballoc->vram_node, 82 SUBALLOC_SIZE, suballoc->iova); 83 dma_free_wc(suballoc->gpu->dev, SUBALLOC_SIZE, suballoc->vaddr, 84 suballoc->paddr); 85 kfree(suballoc); 86 } 87 88 struct etnaviv_cmdbuf * 89 etnaviv_cmdbuf_new(struct etnaviv_cmdbuf_suballoc *suballoc, u32 size, 90 size_t nr_bos) 91 { 92 struct etnaviv_cmdbuf *cmdbuf; 93 size_t sz = size_vstruct(nr_bos, sizeof(cmdbuf->bo_map[0]), 94 sizeof(*cmdbuf)); 95 int granule_offs, order, ret; 96 97 cmdbuf = kzalloc(sz, GFP_KERNEL); 98 if (!cmdbuf) 99 return NULL; 100 101 cmdbuf->suballoc = suballoc; 102 cmdbuf->size = size; 103 104 order = order_base_2(ALIGN(size, SUBALLOC_GRANULE) / SUBALLOC_GRANULE); 105 retry: 106 mutex_lock(&suballoc->lock); 107 granule_offs = bitmap_find_free_region(suballoc->granule_map, 108 SUBALLOC_GRANULES, order); 109 if (granule_offs < 0) { 110 suballoc->free_space = 0; 111 mutex_unlock(&suballoc->lock); 112 ret = wait_event_interruptible_timeout(suballoc->free_event, 113 suballoc->free_space, 114 msecs_to_jiffies(10 * 1000)); 115 if (!ret) { 116 dev_err(suballoc->gpu->dev, 117 "Timeout waiting for cmdbuf space\n"); 118 return NULL; 119 } 120 goto retry; 121 } 122 mutex_unlock(&suballoc->lock); 123 cmdbuf->suballoc_offset = granule_offs * SUBALLOC_GRANULE; 124 cmdbuf->vaddr = suballoc->vaddr + cmdbuf->suballoc_offset; 125 126 return cmdbuf; 127 } 128 129 void etnaviv_cmdbuf_free(struct etnaviv_cmdbuf *cmdbuf) 130 { 131 struct etnaviv_cmdbuf_suballoc *suballoc = cmdbuf->suballoc; 132 int order = order_base_2(ALIGN(cmdbuf->size, SUBALLOC_GRANULE) / 133 SUBALLOC_GRANULE); 134 135 mutex_lock(&suballoc->lock); 136 bitmap_release_region(suballoc->granule_map, 137 cmdbuf->suballoc_offset / SUBALLOC_GRANULE, 138 order); 139 suballoc->free_space = 1; 140 mutex_unlock(&suballoc->lock); 141 wake_up_all(&suballoc->free_event); 142 kfree(cmdbuf); 143 } 144 145 u32 etnaviv_cmdbuf_get_va(struct etnaviv_cmdbuf *buf) 146 { 147 return buf->suballoc->iova + buf->suballoc_offset; 148 } 149 150 dma_addr_t etnaviv_cmdbuf_get_pa(struct etnaviv_cmdbuf *buf) 151 { 152 return buf->suballoc->paddr + buf->suballoc_offset; 153 } 154