Lines Matching refs:entry
169 static inline void dump_entry_trace(struct dma_debug_entry *entry) in dump_entry_trace() argument
172 if (entry) { in dump_entry_trace()
174 stack_trace_print(entry->stack_entries, entry->stack_len, 0); in dump_entry_trace()
220 #define err_printk(dev, entry, format, arg...) do { \ argument
227 dump_entry_trace(entry); \
239 static int hash_fn(struct dma_debug_entry *entry) in hash_fn() argument
245 return (entry->dev_addr >> HASH_FN_SHIFT) & HASH_FN_MASK; in hash_fn()
251 static struct hash_bucket *get_hash_bucket(struct dma_debug_entry *entry, in get_hash_bucket() argument
255 int idx = hash_fn(entry); in get_hash_bucket()
299 struct dma_debug_entry *entry, *ret = NULL; in __hash_bucket_find() local
302 list_for_each_entry(entry, &bucket->list, list) { in __hash_bucket_find()
303 if (!match(ref, entry)) in __hash_bucket_find()
318 entry->size == ref->size ? ++match_lvl : 0; in __hash_bucket_find()
319 entry->type == ref->type ? ++match_lvl : 0; in __hash_bucket_find()
320 entry->direction == ref->direction ? ++match_lvl : 0; in __hash_bucket_find()
321 entry->sg_call_ents == ref->sg_call_ents ? ++match_lvl : 0; in __hash_bucket_find()
325 return entry; in __hash_bucket_find()
332 ret = entry; in __hash_bucket_find()
356 struct dma_debug_entry *entry, index = *ref; in bucket_find_contain() local
360 entry = __hash_bucket_find(*bucket, ref, containing_match); in bucket_find_contain()
362 if (entry) in bucket_find_contain()
363 return entry; in bucket_find_contain()
380 struct dma_debug_entry *entry) in hash_bucket_add() argument
382 list_add_tail(&entry->list, &bucket->list); in hash_bucket_add()
388 static void hash_bucket_del(struct dma_debug_entry *entry) in hash_bucket_del() argument
390 list_del(&entry->list); in hash_bucket_del()
422 static phys_addr_t to_cacheline_number(struct dma_debug_entry *entry) in to_cacheline_number() argument
424 return ((entry->paddr >> PAGE_SHIFT) << CACHELINE_PER_PAGE_SHIFT) + in to_cacheline_number()
425 (offset_in_page(entry->paddr) >> L1_CACHE_SHIFT); in to_cacheline_number()
475 static int active_cacheline_insert(struct dma_debug_entry *entry) in active_cacheline_insert() argument
477 phys_addr_t cln = to_cacheline_number(entry); in active_cacheline_insert()
485 if (entry->direction == DMA_TO_DEVICE) in active_cacheline_insert()
489 rc = radix_tree_insert(&dma_active_cacheline, cln, entry); in active_cacheline_insert()
497 static void active_cacheline_remove(struct dma_debug_entry *entry) in active_cacheline_remove() argument
499 phys_addr_t cln = to_cacheline_number(entry); in active_cacheline_remove()
503 if (entry->direction == DMA_TO_DEVICE) in active_cacheline_remove()
526 struct dma_debug_entry *entry; in debug_dma_dump_mappings() local
530 list_for_each_entry(entry, &bucket->list, list) { in debug_dma_dump_mappings()
531 if (!dev || dev == entry->dev) { in debug_dma_dump_mappings()
532 cln = to_cacheline_number(entry); in debug_dma_dump_mappings()
533 dev_info(entry->dev, in debug_dma_dump_mappings()
535 type2name[entry->type], idx, in debug_dma_dump_mappings()
536 &entry->paddr, entry->dev_addr, in debug_dma_dump_mappings()
537 entry->size, &cln, in debug_dma_dump_mappings()
538 dir2name[entry->direction], in debug_dma_dump_mappings()
539 maperr2str[entry->map_err_type]); in debug_dma_dump_mappings()
558 struct dma_debug_entry *entry; in dump_show() local
562 list_for_each_entry(entry, &bucket->list, list) { in dump_show()
563 cln = to_cacheline_number(entry); in dump_show()
566 dev_driver_string(entry->dev), in dump_show()
567 dev_name(entry->dev), in dump_show()
568 type2name[entry->type], idx, in dump_show()
569 &entry->paddr, entry->dev_addr, in dump_show()
570 entry->size, &cln, in dump_show()
571 dir2name[entry->direction], in dump_show()
572 maperr2str[entry->map_err_type]); in dump_show()
584 static void add_dma_entry(struct dma_debug_entry *entry, unsigned long attrs) in add_dma_entry() argument
590 bucket = get_hash_bucket(entry, &flags); in add_dma_entry()
591 hash_bucket_add(bucket, entry); in add_dma_entry()
594 rc = active_cacheline_insert(entry); in add_dma_entry()
600 is_swiotlb_active(entry->dev))) { in add_dma_entry()
601 err_printk(entry->dev, entry, in add_dma_entry()
608 struct dma_debug_entry *entry; in dma_debug_create_entries() local
611 entry = (void *)get_zeroed_page(gfp); in dma_debug_create_entries()
612 if (!entry) in dma_debug_create_entries()
616 list_add_tail(&entry[i].list, &free_entries); in dma_debug_create_entries()
626 struct dma_debug_entry *entry; in __dma_entry_alloc() local
628 entry = list_entry(free_entries.next, struct dma_debug_entry, list); in __dma_entry_alloc()
629 list_del(&entry->list); in __dma_entry_alloc()
630 memset(entry, 0, sizeof(*entry)); in __dma_entry_alloc()
636 return entry; in __dma_entry_alloc()
663 struct dma_debug_entry *entry; in dma_entry_alloc() local
679 entry = __dma_entry_alloc(); in dma_entry_alloc()
687 entry->stack_len = stack_trace_save(entry->stack_entries, in dma_entry_alloc()
688 ARRAY_SIZE(entry->stack_entries), in dma_entry_alloc()
691 return entry; in dma_entry_alloc()
694 static void dma_entry_free(struct dma_debug_entry *entry) in dma_entry_free() argument
698 active_cacheline_remove(entry); in dma_entry_free()
705 list_add(&entry->list, &free_entries); in dma_entry_free()
830 struct dma_debug_entry *entry; in device_dma_allocations() local
836 list_for_each_entry(entry, &dma_entry_hash[i].list, list) { in device_dma_allocations()
837 if (entry->dev == dev) { in device_dma_allocations()
839 *out_entry = entry; in device_dma_allocations()
851 struct dma_debug_entry *entry; in dma_debug_device_change() local
859 count = device_dma_allocations(dev, &entry); in dma_debug_device_change()
862 err_printk(dev, entry, "device driver has pending " in dma_debug_device_change()
868 count, entry->dev_addr, entry->size, in dma_debug_device_change()
869 dir2name[entry->direction], type2name[entry->type]); in dma_debug_device_change()
961 struct dma_debug_entry *entry; in check_unmap() local
966 entry = bucket_find_exact(bucket, ref); in check_unmap()
968 if (!entry) { in check_unmap()
986 if (ref->size != entry->size) { in check_unmap()
987 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
991 ref->dev_addr, entry->size, ref->size); in check_unmap()
994 if (ref->type != entry->type) { in check_unmap()
995 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1000 type2name[entry->type], type2name[ref->type]); in check_unmap()
1001 } else if ((entry->type == dma_debug_coherent || in check_unmap()
1002 entry->type == dma_debug_noncoherent) && in check_unmap()
1003 ref->paddr != entry->paddr) { in check_unmap()
1004 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1010 &entry->paddr, in check_unmap()
1015 ref->sg_call_ents != entry->sg_call_ents) { in check_unmap()
1016 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1019 entry->sg_call_ents, ref->sg_call_ents); in check_unmap()
1026 if (ref->direction != entry->direction) { in check_unmap()
1027 err_printk(ref->dev, entry, "device driver frees " in check_unmap()
1032 dir2name[entry->direction], in check_unmap()
1041 if (entry->map_err_type == MAP_ERR_NOT_CHECKED) { in check_unmap()
1042 err_printk(ref->dev, entry, in check_unmap()
1047 type2name[entry->type]); in check_unmap()
1050 hash_bucket_del(entry); in check_unmap()
1057 dma_entry_free(entry); in check_unmap()
1100 struct dma_debug_entry *entry; in check_sync() local
1106 entry = bucket_find_contain(&bucket, ref, &flags); in check_sync()
1108 if (!entry) { in check_sync()
1116 if (ref->size > entry->size) { in check_sync()
1117 err_printk(dev, entry, "device driver syncs" in check_sync()
1122 entry->dev_addr, entry->size, in check_sync()
1126 if (entry->direction == DMA_BIDIRECTIONAL) in check_sync()
1129 if (ref->direction != entry->direction) { in check_sync()
1130 err_printk(dev, entry, "device driver syncs " in check_sync()
1134 (unsigned long long)ref->dev_addr, entry->size, in check_sync()
1135 dir2name[entry->direction], in check_sync()
1139 if (to_cpu && !(entry->direction == DMA_FROM_DEVICE) && in check_sync()
1141 err_printk(dev, entry, "device driver syncs " in check_sync()
1145 (unsigned long long)ref->dev_addr, entry->size, in check_sync()
1146 dir2name[entry->direction], in check_sync()
1149 if (!to_cpu && !(entry->direction == DMA_TO_DEVICE) && in check_sync()
1151 err_printk(dev, entry, "device driver syncs " in check_sync()
1155 (unsigned long long)ref->dev_addr, entry->size, in check_sync()
1156 dir2name[entry->direction], in check_sync()
1160 ref->sg_call_ents != entry->sg_call_ents) { in check_sync()
1161 err_printk(ref->dev, entry, "device driver syncs " in check_sync()
1164 entry->sg_call_ents, ref->sg_call_ents); in check_sync()
1214 struct dma_debug_entry *entry; in debug_dma_map_phys() local
1222 entry = dma_entry_alloc(); in debug_dma_map_phys()
1223 if (!entry) in debug_dma_map_phys()
1226 entry->dev = dev; in debug_dma_map_phys()
1227 entry->type = dma_debug_phy; in debug_dma_map_phys()
1228 entry->paddr = phys; in debug_dma_map_phys()
1229 entry->dev_addr = dma_addr; in debug_dma_map_phys()
1230 entry->size = size; in debug_dma_map_phys()
1231 entry->direction = direction; in debug_dma_map_phys()
1232 entry->map_err_type = MAP_ERR_NOT_CHECKED; in debug_dma_map_phys()
1241 add_dma_entry(entry, attrs); in debug_dma_map_phys()
1247 struct dma_debug_entry *entry; in debug_dma_mapping_error() local
1258 list_for_each_entry(entry, &bucket->list, list) { in debug_dma_mapping_error()
1259 if (!exact_match(&ref, entry)) in debug_dma_mapping_error()
1272 if (entry->map_err_type == MAP_ERR_NOT_CHECKED) { in debug_dma_mapping_error()
1273 entry->map_err_type = MAP_ERR_CHECKED; in debug_dma_mapping_error()
1302 struct dma_debug_entry *entry; in debug_dma_map_sg() local
1316 entry = dma_entry_alloc(); in debug_dma_map_sg()
1317 if (!entry) in debug_dma_map_sg()
1320 entry->type = dma_debug_sg; in debug_dma_map_sg()
1321 entry->dev = dev; in debug_dma_map_sg()
1322 entry->paddr = sg_phys(s); in debug_dma_map_sg()
1323 entry->size = sg_dma_len(s); in debug_dma_map_sg()
1324 entry->dev_addr = sg_dma_address(s); in debug_dma_map_sg()
1325 entry->direction = direction; in debug_dma_map_sg()
1326 entry->sg_call_ents = nents; in debug_dma_map_sg()
1327 entry->sg_mapped_ents = mapped_ents; in debug_dma_map_sg()
1331 add_dma_entry(entry, attrs); in debug_dma_map_sg()
1338 struct dma_debug_entry *entry; in get_nr_mapped_entries() local
1344 entry = bucket_find_exact(bucket, ref); in get_nr_mapped_entries()
1347 if (entry) in get_nr_mapped_entries()
1348 mapped_ents = entry->sg_mapped_ents; in get_nr_mapped_entries()
1401 struct dma_debug_entry *entry; in debug_dma_alloc_coherent() local
1413 entry = dma_entry_alloc(); in debug_dma_alloc_coherent()
1414 if (!entry) in debug_dma_alloc_coherent()
1417 entry->type = dma_debug_coherent; in debug_dma_alloc_coherent()
1418 entry->dev = dev; in debug_dma_alloc_coherent()
1419 entry->paddr = virt_to_paddr(virt); in debug_dma_alloc_coherent()
1420 entry->size = size; in debug_dma_alloc_coherent()
1421 entry->dev_addr = dma_addr; in debug_dma_alloc_coherent()
1422 entry->direction = DMA_BIDIRECTIONAL; in debug_dma_alloc_coherent()
1424 add_dma_entry(entry, attrs); in debug_dma_alloc_coherent()
1553 struct dma_debug_entry *entry; in debug_dma_alloc_pages() local
1558 entry = dma_entry_alloc(); in debug_dma_alloc_pages()
1559 if (!entry) in debug_dma_alloc_pages()
1562 entry->type = dma_debug_noncoherent; in debug_dma_alloc_pages()
1563 entry->dev = dev; in debug_dma_alloc_pages()
1564 entry->paddr = page_to_phys(page); in debug_dma_alloc_pages()
1565 entry->size = size; in debug_dma_alloc_pages()
1566 entry->dev_addr = dma_addr; in debug_dma_alloc_pages()
1567 entry->direction = direction; in debug_dma_alloc_pages()
1569 add_dma_entry(entry, attrs); in debug_dma_alloc_pages()