xref: /titanic_41/usr/src/lib/libast/common/vmalloc/vmlast.c (revision 1915be19b7a1f7c113b706bd54d6ca393e8d8107)
1 /***********************************************************************
2 *                                                                      *
3 *               This software is part of the ast package               *
4 *           Copyright (c) 1985-2007 AT&T Knowledge Ventures            *
5 *                      and is licensed under the                       *
6 *                  Common Public License, Version 1.0                  *
7 *                      by AT&T Knowledge Ventures                      *
8 *                                                                      *
9 *                A copy of the License is available at                 *
10 *            http://www.opensource.org/licenses/cpl1.0.txt             *
11 *         (with md5 checksum 059e8cd6165cb4c31e351f2b69388fd9)         *
12 *                                                                      *
13 *              Information and Software Systems Research               *
14 *                            AT&T Research                             *
15 *                           Florham Park NJ                            *
16 *                                                                      *
17 *                 Glenn Fowler <gsf@research.att.com>                  *
18 *                  David Korn <dgk@research.att.com>                   *
19 *                   Phong Vo <kpv@research.att.com>                    *
20 *                                                                      *
21 ***********************************************************************/
22 #if defined(_UWIN) && defined(_BLD_ast)
23 
24 void _STUB_vmlast(){}
25 
26 #else
27 
28 #include	"vmhdr.h"
29 
30 /*	Allocation with freeing and reallocing of last allocated block only.
31 **
32 **	Written by Kiem-Phong Vo, kpv@research.att.com, 01/16/94.
33 */
34 
35 #if __STD_C
36 static Void_t* lastalloc(Vmalloc_t* vm, size_t size)
37 #else
38 static Void_t* lastalloc(vm, size)
39 Vmalloc_t*	vm;
40 size_t		size;
41 #endif
42 {
43 	reg Block_t	*tp, *next;
44 	reg Seg_t	*seg, *last;
45 	reg size_t	s;
46 	reg Vmdata_t*	vd = vm->data;
47 	reg int		local;
48 	size_t		orgsize = 0;
49 
50 	if(!(local = vd->mode&VM_TRUST))
51 	{	GETLOCAL(vd,local);
52 		if(ISLOCK(vd,local))
53 			return NIL(Void_t*);
54 		SETLOCK(vd,local);
55 		orgsize = size;
56 	}
57 
58 	size = size < ALIGN ? ALIGN : ROUND(size,ALIGN);
59 	for(;;)
60 	{	for(last = NIL(Seg_t*), seg = vd->seg; seg; last = seg, seg = seg->next)
61 		{	if(!(tp = seg->free) || (SIZE(tp)+sizeof(Head_t)) < size)
62 				continue;
63 			if(last)
64 			{	last->next = seg->next;
65 				seg->next = vd->seg;
66 				vd->seg = seg;
67 			}
68 			goto got_block;
69 		}
70 
71 		/* there is no usable free space in region, try extending */
72 		if((tp = (*_Vmextend)(vm,size,NIL(Vmsearch_f))) )
73 		{	seg = SEG(tp);
74 			goto got_block;
75 		}
76 		else if(vd->mode&VM_AGAIN)
77 			vd->mode &= ~VM_AGAIN;
78 		else	goto done;
79 	}
80 
81 got_block:
82 	if((s = SIZE(tp)) >= size)
83 	{	next = (Block_t*)((Vmuchar_t*)tp+size);
84 		SIZE(next) = s - size;
85 		SEG(next) = seg;
86 		seg->free = next;
87 	}
88 	else	seg->free = NIL(Block_t*);
89 
90 	vd->free = seg->last = tp;
91 
92 	if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
93 		(*_Vmtrace)(vm, NIL(Vmuchar_t*), (Vmuchar_t*)tp, orgsize, 0);
94 
95 done:
96 	CLRLOCK(vd,local);
97 	ANNOUNCE(local, vm, VM_ALLOC, (Void_t*)tp, vm->disc);
98 	return (Void_t*)tp;
99 }
100 
101 #if __STD_C
102 static int lastfree(Vmalloc_t* vm, reg Void_t* data )
103 #else
104 static int lastfree(vm, data)
105 Vmalloc_t*	vm;
106 reg Void_t*	data;
107 #endif
108 {
109 	reg Seg_t*	seg;
110 	reg Block_t*	fp;
111 	reg size_t	s;
112 	reg Vmdata_t*	vd = vm->data;
113 	reg int		local;
114 
115 	if(!data)
116 		return 0;
117 	if(!(local = vd->mode&VM_TRUST) )
118 	{	GETLOCAL(vd, local);
119 		if(ISLOCK(vd, local))
120 			return -1;
121 		SETLOCK(vd, local);
122 	}
123 	if(data != (Void_t*)vd->free)
124 	{	if(!local && vm->disc->exceptf)
125 			(void)(*vm->disc->exceptf)(vm,VM_BADADDR,data,vm->disc);
126 		CLRLOCK(vd, local);
127 		return -1;
128 	}
129 
130 	seg = vd->seg;
131 	if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
132 	{	if(seg->free )
133 			s = (Vmuchar_t*)(seg->free) - (Vmuchar_t*)data;
134 		else	s = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
135 		(*_Vmtrace)(vm, (Vmuchar_t*)data, NIL(Vmuchar_t*), s, 0);
136 	}
137 
138 	vd->free = NIL(Block_t*);
139 	fp = (Block_t*)data;
140 	SEG(fp)  = seg;
141 	SIZE(fp) = ((Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data) - sizeof(Head_t);
142 	seg->free = fp;
143 	seg->last = NIL(Block_t*);
144 
145 	CLRLOCK(vd, local);
146 	ANNOUNCE(local, vm, VM_FREE, data, vm->disc);
147 	return 0;
148 }
149 
150 #if __STD_C
151 static Void_t* lastresize(Vmalloc_t* vm, reg Void_t* data, size_t size, int type )
152 #else
153 static Void_t* lastresize(vm, data, size, type )
154 Vmalloc_t*	vm;
155 reg Void_t*	data;
156 size_t		size;
157 int		type;
158 #endif
159 {
160 	reg Block_t*	tp;
161 	reg Seg_t	*seg;
162 	reg size_t	oldsize;
163 	reg ssize_t	s, ds;
164 	reg Vmdata_t*	vd = vm->data;
165 	reg int		local;
166 	reg Void_t*	addr;
167 	Void_t*		orgdata = NIL(Void_t*);
168 	size_t		orgsize = 0;
169 
170 	if(!data)
171 	{	oldsize = 0;
172 		data = lastalloc(vm,size);
173 		goto done;
174 	}
175 	if(size <= 0)
176 	{	(void)lastfree(vm,data);
177 		return NIL(Void_t*);
178 	}
179 
180 	if(!(local = vd->mode&VM_TRUST))
181 	{	GETLOCAL(vd, local);
182 		if(ISLOCK(vd, local))
183 			return NIL(Void_t*);
184 		SETLOCK(vd, local);
185 		orgdata = data;
186 		orgsize = size;
187 	}
188 
189 	if(data == (Void_t*)vd->free)
190 		seg = vd->seg;
191 	else
192 	{	/* see if it was one of ours */
193 		for(seg = vd->seg; seg; seg = seg->next)
194 			if(data >= seg->addr && data < (Void_t*)seg->baddr)
195 				break;
196 		if(!seg || (VLONG(data)%ALIGN) != 0 ||
197 		   (seg->last && (Vmuchar_t*)data > (Vmuchar_t*)seg->last) )
198 		{	CLRLOCK(vd,0);
199 			return NIL(Void_t*);
200 		}
201 	}
202 
203 	/* set 's' to be the current available space */
204 	if(data != seg->last)
205 	{	if(seg->last && (Vmuchar_t*)data < (Vmuchar_t*)seg->last)
206 			oldsize = (Vmuchar_t*)seg->last - (Vmuchar_t*)data;
207 		else	oldsize = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
208 		s = -1;
209 	}
210 	else
211 	{	s = (Vmuchar_t*)BLOCK(seg->baddr) - (Vmuchar_t*)data;
212 		if(!(tp = seg->free) )
213 			oldsize = s;
214 		else
215 		{	oldsize = (Vmuchar_t*)tp - (Vmuchar_t*)data;
216 			seg->free = NIL(Block_t*);
217 		}
218 	}
219 
220 	size = size < ALIGN ? ALIGN : ROUND(size,ALIGN);
221 	if(s < 0 || (ssize_t)size > s)
222 	{	if(s >= 0) /* amount to extend */
223 		{	ds = size-s; ds = ROUND(ds,vd->incr);
224 			addr = (*vm->disc->memoryf)(vm, seg->addr, seg->extent,
225 						    seg->extent+ds, vm->disc);
226 			if(addr == seg->addr)
227 			{	s += ds;
228 				seg->size += ds;
229 				seg->extent += ds;
230 				seg->baddr += ds;
231 				SIZE(BLOCK(seg->baddr)) = BUSY;
232 			}
233 			else	goto do_alloc;
234 		}
235 		else
236 		{ do_alloc:
237 			if(!(type&(VM_RSMOVE|VM_RSCOPY)) )
238 				data = NIL(Void_t*);
239 			else
240 			{	tp = vd->free;
241 				if(!(addr = KPVALLOC(vm,size,lastalloc)) )
242 				{	vd->free = tp;
243 					data = NIL(Void_t*);
244 				}
245 				else
246 				{	if(type&VM_RSCOPY)
247 					{	ds = oldsize < size ? oldsize : size;
248 						memcpy(addr, data, ds);
249 					}
250 
251 					if(s >= 0 && seg != vd->seg)
252 					{	tp = (Block_t*)data;
253 						SEG(tp) = seg;
254 						SIZE(tp) = s - sizeof(Head_t);
255 						seg->free = tp;
256 					}
257 
258 					/* new block and size */
259 					data = addr;
260 					seg = vd->seg;
261 					s = (Vmuchar_t*)BLOCK(seg->baddr) -
262 					    (Vmuchar_t*)data;
263 					seg->free = NIL(Block_t*);
264 				}
265 			}
266 		}
267 	}
268 
269 	if(data)
270 	{	if(s >= (ssize_t)(size+sizeof(Head_t)) )
271 		{	tp = (Block_t*)((Vmuchar_t*)data + size);
272 			SEG(tp) = seg;
273 			SIZE(tp) = (s - size) - sizeof(Head_t);
274 			seg->free = tp;
275 		}
276 
277 		vd->free = seg->last = (Block_t*)data;
278 
279 		if(!local && (vd->mode&VM_TRACE) && _Vmtrace)
280 			(*_Vmtrace)(vm,(Vmuchar_t*)orgdata,(Vmuchar_t*)data,orgsize,0);
281 	}
282 
283 	CLRLOCK(vd, local);
284 	ANNOUNCE(local, vm, VM_RESIZE, data, vm->disc);
285 
286 done:	if(data && (type&VM_RSZERO) && size > oldsize)
287 		memset((Void_t*)((Vmuchar_t*)data + oldsize), 0, size-oldsize);
288 
289 	return data;
290 }
291 
292 
293 #if __STD_C
294 static long lastaddr(Vmalloc_t* vm, Void_t* addr)
295 #else
296 static long lastaddr(vm, addr)
297 Vmalloc_t*	vm;
298 Void_t*		addr;
299 #endif
300 {
301 	reg Vmdata_t*	vd = vm->data;
302 
303 	if(!(vd->mode&VM_TRUST) && ISLOCK(vd,0))
304 		return -1L;
305 	if(!vd->free || addr < (Void_t*)vd->free || addr >= (Void_t*)vd->seg->baddr)
306 		return -1L;
307 	else	return (Vmuchar_t*)addr - (Vmuchar_t*)vd->free;
308 }
309 
310 #if __STD_C
311 static long lastsize(Vmalloc_t* vm, Void_t* addr)
312 #else
313 static long lastsize(vm, addr)
314 Vmalloc_t*	vm;
315 Void_t*		addr;
316 #endif
317 {
318 	reg Vmdata_t*	vd = vm->data;
319 
320 	if(!(vd->mode&VM_TRUST) && ISLOCK(vd,0))
321 		return -1L;
322 	if(!vd->free || addr != (Void_t*)vd->free )
323 		return -1L;
324 	else if(vd->seg->free)
325 		return (Vmuchar_t*)vd->seg->free - (Vmuchar_t*)addr;
326 	else	return (Vmuchar_t*)vd->seg->baddr - (Vmuchar_t*)addr - sizeof(Head_t);
327 }
328 
329 #if __STD_C
330 static int lastcompact(Vmalloc_t* vm)
331 #else
332 static int lastcompact(vm)
333 Vmalloc_t*	vm;
334 #endif
335 {
336 	reg Block_t*	fp;
337 	reg Seg_t	*seg, *next;
338 	reg size_t	s;
339 	reg Vmdata_t*	vd = vm->data;
340 
341 	if(!(vd->mode&VM_TRUST))
342 	{	if(ISLOCK(vd,0))
343 			return -1;
344 		SETLOCK(vd,0);
345 	}
346 
347 	for(seg = vd->seg; seg; seg = next)
348 	{	next = seg->next;
349 
350 		if(!(fp = seg->free))
351 			continue;
352 
353 		seg->free = NIL(Block_t*);
354 		if(seg->size == (s = SIZE(fp)&~BITS))
355 			s = seg->extent;
356 		else	s += sizeof(Head_t);
357 
358 		if((*_Vmtruncate)(vm,seg,s,1) == s)
359 			seg->free = fp;
360 	}
361 
362 	if((vd->mode&VM_TRACE) && _Vmtrace)
363 		(*_Vmtrace)(vm,(Vmuchar_t*)0,(Vmuchar_t*)0,0,0);
364 
365 	CLRLOCK(vd,0);
366 	return 0;
367 }
368 
369 #if __STD_C
370 static Void_t* lastalign(Vmalloc_t* vm, size_t size, size_t align)
371 #else
372 static Void_t* lastalign(vm, size, align)
373 Vmalloc_t*	vm;
374 size_t		size;
375 size_t		align;
376 #endif
377 {
378 	reg Vmuchar_t*	data;
379 	reg Seg_t*	seg;
380 	reg Block_t*	next;
381 	reg int		local;
382 	reg size_t	s, orgsize = 0, orgalign = 0;
383 	reg Vmdata_t*	vd = vm->data;
384 
385 	if(size <= 0 || align <= 0)
386 		return NIL(Void_t*);
387 
388 	if(!(local = vd->mode&VM_TRUST) )
389 	{	GETLOCAL(vd,local);
390 		if(ISLOCK(vd,local) )
391 			return NIL(Void_t*);
392 		SETLOCK(vd,local);
393 		orgsize = size;
394 		orgalign = align;
395 	}
396 
397 	size = size <= TINYSIZE ? TINYSIZE : ROUND(size,ALIGN);
398 	align = MULTIPLE(align,ALIGN);
399 
400 	s = size + align;
401 	if(!(data = (Vmuchar_t*)KPVALLOC(vm,s,lastalloc)) )
402 		goto done;
403 
404 	/* find the segment containing this block */
405 	for(seg = vd->seg; seg; seg = seg->next)
406 		if(seg->last == (Block_t*)data)
407 			break;
408 	/**/ASSERT(seg);
409 
410 	/* get a suitably aligned address */
411 	if((s = (size_t)(VLONG(data)%align)) != 0)
412 		data += align-s; /**/ASSERT((VLONG(data)%align) == 0);
413 
414 	/* free the unused tail */
415 	next = (Block_t*)(data+size);
416 	if((s = (seg->baddr - (Vmuchar_t*)next)) >= sizeof(Block_t))
417 	{	SEG(next) = seg;
418 		SIZE(next) = s - sizeof(Head_t);
419 		seg->free = next;
420 	}
421 
422 	vd->free = seg->last = (Block_t*)data;
423 
424 	if(!local && !(vd->mode&VM_TRUST) && _Vmtrace && (vd->mode&VM_TRACE) )
425 		(*_Vmtrace)(vm,NIL(Vmuchar_t*),data,orgsize,orgalign);
426 
427 done:
428 	CLRLOCK(vd,local);
429 	ANNOUNCE(local, vm, VM_ALLOC, (Void_t*)data, vm->disc);
430 
431 	return (Void_t*)data;
432 }
433 
434 /* Public method for free-1 allocation */
435 static Vmethod_t _Vmlast =
436 {
437 	lastalloc,
438 	lastresize,
439 	lastfree,
440 	lastaddr,
441 	lastsize,
442 	lastcompact,
443 	lastalign,
444 	VM_MTLAST
445 };
446 
447 __DEFINE__(Vmethod_t*,Vmlast,&_Vmlast);
448 
449 #ifdef NoF
450 NoF(vmlast)
451 #endif
452 
453 #endif
454