Lines Matching +full:mm +full:- +full:0

20  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
36 * unordered stack of free regions. This could easily be improved if an RB-tree
42 * Thomas Hellström <thomas-at-tungstengraphics-dot-com>
50 static struct drm_mm_node *drm_mm_kmalloc(struct drm_mm *mm, int atomic) in drm_mm_kmalloc() argument
57 mtx_lock(&mm->unused_lock); in drm_mm_kmalloc()
58 if (list_empty(&mm->unused_nodes)) in drm_mm_kmalloc()
62 list_entry(mm->unused_nodes.next, in drm_mm_kmalloc()
64 list_del(&child->node_list); in drm_mm_kmalloc()
65 --mm->num_unused; in drm_mm_kmalloc()
67 mtx_unlock(&mm->unused_lock); in drm_mm_kmalloc()
72 /* drm_mm_pre_get() - pre allocate drm_mm_node structure
73 * drm_mm: memory manager struct we are pre-allocating for
75 * Returns 0 on success or -ENOMEM if allocation fails.
77 int drm_mm_pre_get(struct drm_mm *mm) in drm_mm_pre_get() argument
81 mtx_lock(&mm->unused_lock); in drm_mm_pre_get()
82 while (mm->num_unused < MM_UNUSED_TARGET) { in drm_mm_pre_get()
83 mtx_unlock(&mm->unused_lock); in drm_mm_pre_get()
85 mtx_lock(&mm->unused_lock); in drm_mm_pre_get()
88 int ret = (mm->num_unused < 2) ? -ENOMEM : 0; in drm_mm_pre_get()
89 mtx_unlock(&mm->unused_lock); in drm_mm_pre_get()
92 ++mm->num_unused; in drm_mm_pre_get()
93 list_add_tail(&node->node_list, &mm->unused_nodes); in drm_mm_pre_get()
95 mtx_unlock(&mm->unused_lock); in drm_mm_pre_get()
96 return 0; in drm_mm_pre_get()
102 return hole_node->start + hole_node->size; in drm_mm_hole_node_start()
108 list_entry(hole_node->node_list.next, struct drm_mm_node, in drm_mm_hole_node_end()
111 return next_node->start; in drm_mm_hole_node_end()
119 struct drm_mm *mm = hole_node->mm; in drm_mm_insert_helper() local
125 BUG_ON(!hole_node->hole_follows || node->allocated); in drm_mm_insert_helper()
127 if (mm->color_adjust) in drm_mm_insert_helper()
128 mm->color_adjust(hole_node, color, &adj_start, &adj_end); in drm_mm_insert_helper()
133 adj_start += alignment - tmp; in drm_mm_insert_helper()
137 hole_node->hole_follows = 0; in drm_mm_insert_helper()
138 list_del(&hole_node->hole_stack); in drm_mm_insert_helper()
141 node->start = adj_start; in drm_mm_insert_helper()
142 node->size = size; in drm_mm_insert_helper()
143 node->mm = mm; in drm_mm_insert_helper()
144 node->color = color; in drm_mm_insert_helper()
145 node->allocated = 1; in drm_mm_insert_helper()
147 INIT_LIST_HEAD(&node->hole_stack); in drm_mm_insert_helper()
148 list_add(&node->node_list, &hole_node->node_list); in drm_mm_insert_helper()
150 BUG_ON(node->start + node->size > adj_end); in drm_mm_insert_helper()
152 node->hole_follows = 0; in drm_mm_insert_helper()
153 if (node->start + node->size < hole_end) { in drm_mm_insert_helper()
154 list_add(&node->hole_stack, &mm->hole_stack); in drm_mm_insert_helper()
155 node->hole_follows = 1; in drm_mm_insert_helper()
167 node = drm_mm_kmalloc(hole_node->mm, atomic); in drm_mm_get_block_generic()
179 * -ENOSPC if no suitable free area is available. The preallocated memory node
182 int drm_mm_insert_node_generic(struct drm_mm *mm, struct drm_mm_node *node, in drm_mm_insert_node_generic() argument
188 hole_node = drm_mm_search_free_generic(mm, size, alignment, in drm_mm_insert_node_generic()
189 color, 0); in drm_mm_insert_node_generic()
191 return -ENOSPC; in drm_mm_insert_node_generic()
194 return 0; in drm_mm_insert_node_generic()
198 int drm_mm_insert_node(struct drm_mm *mm, struct drm_mm_node *node, in drm_mm_insert_node() argument
201 return drm_mm_insert_node_generic(mm, node, size, alignment, 0); in drm_mm_insert_node()
211 struct drm_mm *mm = hole_node->mm; in drm_mm_insert_helper_range() local
217 BUG_ON(!hole_node->hole_follows || node->allocated); in drm_mm_insert_helper_range()
224 if (mm->color_adjust) in drm_mm_insert_helper_range()
225 mm->color_adjust(hole_node, color, &adj_start, &adj_end); in drm_mm_insert_helper_range()
230 adj_start += alignment - tmp; in drm_mm_insert_helper_range()
234 hole_node->hole_follows = 0; in drm_mm_insert_helper_range()
235 list_del(&hole_node->hole_stack); in drm_mm_insert_helper_range()
238 node->start = adj_start; in drm_mm_insert_helper_range()
239 node->size = size; in drm_mm_insert_helper_range()
240 node->mm = mm; in drm_mm_insert_helper_range()
241 node->color = color; in drm_mm_insert_helper_range()
242 node->allocated = 1; in drm_mm_insert_helper_range()
244 INIT_LIST_HEAD(&node->hole_stack); in drm_mm_insert_helper_range()
245 list_add(&node->node_list, &hole_node->node_list); in drm_mm_insert_helper_range()
247 BUG_ON(node->start + node->size > adj_end); in drm_mm_insert_helper_range()
248 BUG_ON(node->start + node->size > end); in drm_mm_insert_helper_range()
250 node->hole_follows = 0; in drm_mm_insert_helper_range()
251 if (node->start + node->size < hole_end) { in drm_mm_insert_helper_range()
252 list_add(&node->hole_stack, &mm->hole_stack); in drm_mm_insert_helper_range()
253 node->hole_follows = 1; in drm_mm_insert_helper_range()
267 node = drm_mm_kmalloc(hole_node->mm, atomic); in drm_mm_get_block_range_generic()
280 * -ENOSPC if no suitable free area is available. This is for range
283 int drm_mm_insert_node_in_range_generic(struct drm_mm *mm, struct drm_mm_node *node, in drm_mm_insert_node_in_range_generic() argument
289 hole_node = drm_mm_search_free_in_range_generic(mm, in drm_mm_insert_node_in_range_generic()
291 start, end, 0); in drm_mm_insert_node_in_range_generic()
293 return -ENOSPC; in drm_mm_insert_node_in_range_generic()
298 return 0; in drm_mm_insert_node_in_range_generic()
302 int drm_mm_insert_node_in_range(struct drm_mm *mm, struct drm_mm_node *node, in drm_mm_insert_node_in_range() argument
306 return drm_mm_insert_node_in_range_generic(mm, node, size, alignment, 0, start, end); in drm_mm_insert_node_in_range()
315 struct drm_mm *mm = node->mm; in drm_mm_remove_node() local
318 BUG_ON(node->scanned_block || node->scanned_prev_free in drm_mm_remove_node()
319 || node->scanned_next_free); in drm_mm_remove_node()
322 list_entry(node->node_list.prev, struct drm_mm_node, node_list); in drm_mm_remove_node()
324 if (node->hole_follows) { in drm_mm_remove_node()
327 list_del(&node->hole_stack); in drm_mm_remove_node()
332 if (!prev_node->hole_follows) { in drm_mm_remove_node()
333 prev_node->hole_follows = 1; in drm_mm_remove_node()
334 list_add(&prev_node->hole_stack, &mm->hole_stack); in drm_mm_remove_node()
336 list_move(&prev_node->hole_stack, &mm->hole_stack); in drm_mm_remove_node()
338 list_del(&node->node_list); in drm_mm_remove_node()
339 node->allocated = 0; in drm_mm_remove_node()
351 struct drm_mm *mm = node->mm; in drm_mm_put_block() local
355 mtx_lock(&mm->unused_lock); in drm_mm_put_block()
356 if (mm->num_unused < MM_UNUSED_TARGET) { in drm_mm_put_block()
357 list_add(&node->node_list, &mm->unused_nodes); in drm_mm_put_block()
358 ++mm->num_unused; in drm_mm_put_block()
361 mtx_unlock(&mm->unused_lock); in drm_mm_put_block()
368 if (end - start < size) in check_free_hole()
369 return 0; in check_free_hole()
374 start += alignment - tmp; in check_free_hole()
380 struct drm_mm_node *drm_mm_search_free_generic(const struct drm_mm *mm, in drm_mm_search_free_generic() argument
390 BUG_ON(mm->scanned_blocks); in drm_mm_search_free_generic()
393 best_size = ~0UL; in drm_mm_search_free_generic()
395 list_for_each_entry(entry, &mm->hole_stack, hole_stack) { in drm_mm_search_free_generic()
399 if (mm->color_adjust) { in drm_mm_search_free_generic()
400 mm->color_adjust(entry, color, &adj_start, &adj_end); in drm_mm_search_free_generic()
405 BUG_ON(!entry->hole_follows); in drm_mm_search_free_generic()
412 if (entry->size < best_size) { in drm_mm_search_free_generic()
414 best_size = entry->size; in drm_mm_search_free_generic()
422 struct drm_mm_node *drm_mm_search_free_in_range_generic(const struct drm_mm *mm, in drm_mm_search_free_in_range_generic() argument
434 BUG_ON(mm->scanned_blocks); in drm_mm_search_free_in_range_generic()
437 best_size = ~0UL; in drm_mm_search_free_in_range_generic()
439 list_for_each_entry(entry, &mm->hole_stack, hole_stack) { in drm_mm_search_free_in_range_generic()
445 BUG_ON(!entry->hole_follows); in drm_mm_search_free_in_range_generic()
447 if (mm->color_adjust) { in drm_mm_search_free_in_range_generic()
448 mm->color_adjust(entry, color, &adj_start, &adj_end); in drm_mm_search_free_in_range_generic()
459 if (entry->size < best_size) { in drm_mm_search_free_in_range_generic()
461 best_size = entry->size; in drm_mm_search_free_in_range_generic()
474 list_replace(&old->node_list, &new->node_list); in drm_mm_replace_node()
475 list_replace(&old->hole_stack, &new->hole_stack); in drm_mm_replace_node()
476 new->hole_follows = old->hole_follows; in drm_mm_replace_node()
477 new->mm = old->mm; in drm_mm_replace_node()
478 new->start = old->start; in drm_mm_replace_node()
479 new->size = old->size; in drm_mm_replace_node()
480 new->color = old->color; in drm_mm_replace_node()
482 old->allocated = 0; in drm_mm_replace_node()
483 new->allocated = 1; in drm_mm_replace_node()
493 * Warning: As long as the scan list is non-empty, no other operations than
496 void drm_mm_init_scan(struct drm_mm *mm, in drm_mm_init_scan() argument
501 mm->scan_color = color; in drm_mm_init_scan()
502 mm->scan_alignment = alignment; in drm_mm_init_scan()
503 mm->scan_size = size; in drm_mm_init_scan()
504 mm->scanned_blocks = 0; in drm_mm_init_scan()
505 mm->scan_hit_start = 0; in drm_mm_init_scan()
506 mm->scan_hit_end = 0; in drm_mm_init_scan()
507 mm->scan_check_range = 0; in drm_mm_init_scan()
508 mm->prev_scanned_node = NULL; in drm_mm_init_scan()
516 * hole. This version is for range-restricted scans.
518 * Warning: As long as the scan list is non-empty, no other operations than
521 void drm_mm_init_scan_with_range(struct drm_mm *mm, in drm_mm_init_scan_with_range() argument
528 mm->scan_color = color; in drm_mm_init_scan_with_range()
529 mm->scan_alignment = alignment; in drm_mm_init_scan_with_range()
530 mm->scan_size = size; in drm_mm_init_scan_with_range()
531 mm->scanned_blocks = 0; in drm_mm_init_scan_with_range()
532 mm->scan_hit_start = 0; in drm_mm_init_scan_with_range()
533 mm->scan_hit_end = 0; in drm_mm_init_scan_with_range()
534 mm->scan_start = start; in drm_mm_init_scan_with_range()
535 mm->scan_end = end; in drm_mm_init_scan_with_range()
536 mm->scan_check_range = 1; in drm_mm_init_scan_with_range()
537 mm->prev_scanned_node = NULL; in drm_mm_init_scan_with_range()
545 * Returns non-zero, if a hole has been found, zero otherwise.
549 struct drm_mm *mm = node->mm; in drm_mm_scan_add_block() local
554 mm->scanned_blocks++; in drm_mm_scan_add_block()
556 BUG_ON(node->scanned_block); in drm_mm_scan_add_block()
557 node->scanned_block = 1; in drm_mm_scan_add_block()
559 prev_node = list_entry(node->node_list.prev, struct drm_mm_node, in drm_mm_scan_add_block()
562 node->scanned_preceeds_hole = prev_node->hole_follows; in drm_mm_scan_add_block()
563 prev_node->hole_follows = 1; in drm_mm_scan_add_block()
564 list_del(&node->node_list); in drm_mm_scan_add_block()
565 node->node_list.prev = &prev_node->node_list; in drm_mm_scan_add_block()
566 node->node_list.next = &mm->prev_scanned_node->node_list; in drm_mm_scan_add_block()
567 mm->prev_scanned_node = node; in drm_mm_scan_add_block()
572 if (mm->scan_check_range) { in drm_mm_scan_add_block()
573 if (adj_start < mm->scan_start) in drm_mm_scan_add_block()
574 adj_start = mm->scan_start; in drm_mm_scan_add_block()
575 if (adj_end > mm->scan_end) in drm_mm_scan_add_block()
576 adj_end = mm->scan_end; in drm_mm_scan_add_block()
579 if (mm->color_adjust) in drm_mm_scan_add_block()
580 mm->color_adjust(prev_node, mm->scan_color, in drm_mm_scan_add_block()
584 mm->scan_size, mm->scan_alignment)) { in drm_mm_scan_add_block()
585 mm->scan_hit_start = hole_start; in drm_mm_scan_add_block()
586 mm->scan_hit_end = hole_end; in drm_mm_scan_add_block()
590 return 0; in drm_mm_scan_add_block()
602 * immediately following drm_mm_search_free with best_match = 0 will then return
610 struct drm_mm *mm = node->mm; in drm_mm_scan_remove_block() local
613 mm->scanned_blocks--; in drm_mm_scan_remove_block()
615 BUG_ON(!node->scanned_block); in drm_mm_scan_remove_block()
616 node->scanned_block = 0; in drm_mm_scan_remove_block()
618 prev_node = list_entry(node->node_list.prev, struct drm_mm_node, in drm_mm_scan_remove_block()
621 prev_node->hole_follows = node->scanned_preceeds_hole; in drm_mm_scan_remove_block()
622 list_add(&node->node_list, &prev_node->node_list); in drm_mm_scan_remove_block()
624 return (drm_mm_hole_node_end(node) > mm->scan_hit_start && in drm_mm_scan_remove_block()
625 node->start < mm->scan_hit_end); in drm_mm_scan_remove_block()
629 int drm_mm_clean(struct drm_mm * mm) in drm_mm_clean() argument
631 struct list_head *head = &mm->head_node.node_list; in drm_mm_clean()
633 return (head->next->next == head); in drm_mm_clean()
637 int drm_mm_init(struct drm_mm * mm, unsigned long start, unsigned long size) in drm_mm_init() argument
639 INIT_LIST_HEAD(&mm->hole_stack); in drm_mm_init()
640 INIT_LIST_HEAD(&mm->unused_nodes); in drm_mm_init()
641 mm->num_unused = 0; in drm_mm_init()
642 mm->scanned_blocks = 0; in drm_mm_init()
643 mtx_init(&mm->unused_lock, "drm_unused", NULL, MTX_DEF); in drm_mm_init()
646 INIT_LIST_HEAD(&mm->head_node.node_list); in drm_mm_init()
647 INIT_LIST_HEAD(&mm->head_node.hole_stack); in drm_mm_init()
648 mm->head_node.hole_follows = 1; in drm_mm_init()
649 mm->head_node.scanned_block = 0; in drm_mm_init()
650 mm->head_node.scanned_prev_free = 0; in drm_mm_init()
651 mm->head_node.scanned_next_free = 0; in drm_mm_init()
652 mm->head_node.mm = mm; in drm_mm_init()
653 mm->head_node.start = start + size; in drm_mm_init()
654 mm->head_node.size = start - mm->head_node.start; in drm_mm_init()
655 list_add_tail(&mm->head_node.hole_stack, &mm->hole_stack); in drm_mm_init()
657 mm->color_adjust = NULL; in drm_mm_init()
659 return 0; in drm_mm_init()
663 void drm_mm_takedown(struct drm_mm * mm) in drm_mm_takedown() argument
667 if (!list_empty(&mm->head_node.node_list)) { in drm_mm_takedown()
672 mtx_lock(&mm->unused_lock); in drm_mm_takedown()
673 list_for_each_entry_safe(entry, next, &mm->unused_nodes, node_list) { in drm_mm_takedown()
674 list_del(&entry->node_list); in drm_mm_takedown()
676 --mm->num_unused; in drm_mm_takedown()
678 mtx_unlock(&mm->unused_lock); in drm_mm_takedown()
680 BUG_ON(mm->num_unused != 0); in drm_mm_takedown()
684 void drm_mm_debug_table(struct drm_mm *mm, const char *prefix) in drm_mm_debug_table() argument
687 unsigned long total_used = 0, total_free = 0, total = 0; in drm_mm_debug_table()
690 hole_start = drm_mm_hole_node_start(&mm->head_node); in drm_mm_debug_table()
691 hole_end = drm_mm_hole_node_end(&mm->head_node); in drm_mm_debug_table()
692 hole_size = hole_end - hole_start; in drm_mm_debug_table()
694 printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n", in drm_mm_debug_table()
699 drm_mm_for_each_node(entry, mm) { in drm_mm_debug_table()
700 printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: used\n", in drm_mm_debug_table()
701 prefix, entry->start, entry->start + entry->size, in drm_mm_debug_table()
702 entry->size); in drm_mm_debug_table()
703 total_used += entry->size; in drm_mm_debug_table()
705 if (entry->hole_follows) { in drm_mm_debug_table()
708 hole_size = hole_end - hole_start; in drm_mm_debug_table()
709 printk(KERN_DEBUG "%s 0x%08lx-0x%08lx: %8lu: free\n", in drm_mm_debug_table()