Lines Matching refs:cachep
19 void kmem_cache_set_callback(struct kmem_cache *cachep, void (*callback)(void *))
21 cachep->callback = callback;
24 void kmem_cache_set_private(struct kmem_cache *cachep, void *private)
26 cachep->private = private;
29 void kmem_cache_set_non_kernel(struct kmem_cache *cachep, unsigned int val)
31 cachep->non_kernel = val;
34 unsigned long kmem_cache_get_alloc(struct kmem_cache *cachep)
36 return cachep->size * cachep->nr_allocated;
39 unsigned long kmem_cache_nr_allocated(struct kmem_cache *cachep)
41 return cachep->nr_allocated;
44 unsigned long kmem_cache_nr_tallocated(struct kmem_cache *cachep)
46 return cachep->nr_tallocated;
49 void kmem_cache_zero_nr_tallocated(struct kmem_cache *cachep)
51 cachep->nr_tallocated = 0;
54 void *kmem_cache_alloc_lru(struct kmem_cache *cachep, struct list_lru *lru,
59 if (cachep->exec_callback) {
60 if (cachep->callback)
61 cachep->callback(cachep->private);
62 cachep->exec_callback = false;
66 if (!cachep->non_kernel) {
67 if (cachep->callback)
68 cachep->exec_callback = true;
72 cachep->non_kernel--;
75 pthread_mutex_lock(&cachep->lock);
76 if (cachep->nr_objs) {
77 struct radix_tree_node *node = cachep->objs;
78 cachep->nr_objs--;
79 cachep->objs = node->parent;
80 pthread_mutex_unlock(&cachep->lock);
84 pthread_mutex_unlock(&cachep->lock);
85 if (cachep->align) {
86 if (posix_memalign(&p, cachep->align, cachep->size) < 0)
89 p = malloc(cachep->size);
92 if (cachep->ctor)
93 cachep->ctor(p);
95 memset(p, 0, cachep->size);
98 uatomic_inc(&cachep->nr_allocated);
100 uatomic_inc(&cachep->nr_tallocated);
106 void __kmem_cache_free_locked(struct kmem_cache *cachep, void *objp)
109 if (cachep->nr_objs > 10 || cachep->align) {
110 memset(objp, POISON_FREE, cachep->size);
114 cachep->nr_objs++;
115 node->parent = cachep->objs;
116 cachep->objs = node;
120 void kmem_cache_free_locked(struct kmem_cache *cachep, void *objp)
123 uatomic_dec(&cachep->nr_allocated);
126 __kmem_cache_free_locked(cachep, objp);
129 void kmem_cache_free(struct kmem_cache *cachep, void *objp)
131 pthread_mutex_lock(&cachep->lock);
132 kmem_cache_free_locked(cachep, objp);
133 pthread_mutex_unlock(&cachep->lock);
136 void kmem_cache_free_bulk(struct kmem_cache *cachep, size_t size, void **list)
141 if (cachep->exec_callback) {
142 if (cachep->callback)
143 cachep->callback(cachep->private);
144 cachep->exec_callback = false;
147 pthread_mutex_lock(&cachep->lock);
149 kmem_cache_free_locked(cachep, list[i]);
150 pthread_mutex_unlock(&cachep->lock);
153 void kmem_cache_shrink(struct kmem_cache *cachep)
157 int kmem_cache_alloc_bulk(struct kmem_cache *cachep, gfp_t gfp, size_t size,
165 pthread_mutex_lock(&cachep->lock);
166 if (cachep->nr_objs >= size) {
171 if (!cachep->non_kernel)
173 cachep->non_kernel--;
176 node = cachep->objs;
177 cachep->nr_objs--;
178 cachep->objs = node->parent;
182 pthread_mutex_unlock(&cachep->lock);
184 pthread_mutex_unlock(&cachep->lock);
187 if (!cachep->non_kernel)
189 cachep->non_kernel--;
192 if (cachep->align) {
193 if (posix_memalign(&p[i], cachep->align,
194 cachep->size) < 0)
197 p[i] = malloc(cachep->size);
201 if (cachep->ctor)
202 cachep->ctor(p[i]);
204 memset(p[i], 0, cachep->size);
210 pthread_mutex_lock(&cachep->lock);
212 __kmem_cache_free_locked(cachep, p[i]);
213 pthread_mutex_unlock(&cachep->lock);
214 if (cachep->callback)
215 cachep->exec_callback = true;
221 uatomic_inc(&cachep->nr_allocated);
222 uatomic_inc(&cachep->nr_tallocated);