xref: /illumos-gate/usr/src/cmd/sgs/rtld/common/tsort.c (revision 032624d56c174c5c55126582b32e314a6af15522)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License, Version 1.0 only
6  * (the "License").  You may not use this file except in compliance
7  * with the License.
8  *
9  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10  * or http://www.opensolaris.org/os/licensing.
11  * See the License for the specific language governing permissions
12  * and limitations under the License.
13  *
14  * When distributing Covered Code, include this CDDL HEADER in each
15  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16  * If applicable, add the following below this CDDL HEADER, with the
17  * fields enclosed by brackets "[]" replaced with your own identifying
18  * information: Portions Copyright [yyyy] [name of copyright owner]
19  *
20  * CDDL HEADER END
21  */
22 /*
23  *	Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24  *	Use is subject to license terms.
25  */
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 /*
29  * Utilities to handle shared object dependency graph.
30  *
31  * The algorithms used in this file are taken from the following book:
32  *	Algorithms in C
33  *		Robert Sedgewick
34  *		Addison-Wesley Publishing company
35  *		ISBN 0-201-51425-7
36  * 	From the following chapters:
37  *		Chapter 29 Elementary Graph Algorithms
38  *		Chapter 32 Directed Graph
39  */
40 #include	"_synonyms.h"
41 
42 #include	<sys/types.h>
43 #include	<stdarg.h>
44 #include	<stdio.h>
45 #include	<dlfcn.h>
46 #include	<signal.h>
47 #include	<locale.h>
48 #include	<string.h>
49 #include	<libintl.h>
50 #include	"_rtld.h"
51 #include	"msg.h"
52 #include	"debug.h"
53 
54 /*
55  * Structure for maintaining sorting state.
56  */
57 typedef struct {
58 	Rt_map		**s_lmpa;	/* link-map[] (returned to caller) */
59 	Rt_map		*s_lmp;		/* originating link-map */
60 	Rt_map		**s_stack;	/* strongly connected component stack */
61 	Alist 		*s_scc;		/* cyclic list */
62 	Alist		*s_queue;	/* depth queue for cyclic components */
63 	int		s_sndx;		/* present stack index */
64 	int 		s_lndx;		/* present link-map index */
65 	int		s_num;		/* number of objects to sort */
66 	int		s_initfirst;	/* no. of INITFIRST entries */
67 } Sort;
68 
69 #define	AL_CNT_SCC	10
70 
71 /*
72  * qsort(3c) comparison function.
73  */
74 static int
75 compare(const void * lmpp1, const void * lmpp2)
76 {
77 	Rt_map	*lmp1 = *((Rt_map **)lmpp1);
78 	Rt_map	*lmp2 = *((Rt_map **)lmpp2);
79 
80 	if (IDX(lmp1) > IDX(lmp2))
81 		return (-1);
82 	if (IDX(lmp1) < IDX(lmp2))
83 		return (1);
84 	return (0);
85 }
86 
87 /*
88  * This routine is called when a cyclic dependency is detected between strongly
89  * connected components.  The nodes within the cycle are reverse breadth-first
90  * sorted.
91  */
92 static int
93 sort_scc(Sort * sort, int fndx, int flag)
94 {
95 	static const char	*tfmt = 0, *ffmt;
96 	static int		cnt = 1;
97 	int			ndx;
98 	Rt_map			*lmp;
99 	Word			lmflags = LIST(sort->s_lmp)->lm_flags;
100 	Word			init, unref;
101 
102 	/*
103 	 * If this is the first cyclic dependency traverse the new objects that
104 	 * have been added to the link-map list and for each object establish
105 	 * a unique depth index.  We build this dynamically as we have no idea
106 	 * of the number of objects that will be inspected (logic matches that
107 	 * used by dlsym() to traverse lazy dependencies).
108 	 */
109 	if (sort->s_queue == 0) {
110 		Aliste	off;
111 		Rt_map	*lmp, **lmpp;
112 
113 		lmp = sort->s_lmp;
114 		ndx = 1;
115 
116 		if (alist_append(&(sort->s_queue), &lmp, sizeof (Rt_map *),
117 		    sort->s_num) == 0)
118 			return (0);
119 
120 		IDX(lmp) = ndx++;
121 
122 		for (ALIST_TRAVERSE(sort->s_queue, off, lmpp)) {
123 			Bnd_desc	**bdpp;
124 			Aliste		off;
125 
126 			for (ALIST_TRAVERSE(DEPENDS(*lmpp), off, bdpp)) {
127 				Rt_map	*lmp = (*bdpp)->b_depend;
128 
129 				if (IDX(lmp))
130 					continue;
131 
132 				/*
133 				 * If we're .init processing and this depend-
134 				 * encies .init has been called, skip it.
135 				 */
136 				if ((flag & RT_SORT_REV) &&
137 				    (FLAGS(lmp) & FLG_RT_INITCALL))
138 					continue;
139 
140 				if (alist_append(&(sort->s_queue), &lmp,
141 				    sizeof (Rt_map *), sort->s_num) == 0)
142 					return (0);
143 
144 				IDX(lmp) = ndx++;
145 			}
146 		}
147 	}
148 
149 	/*
150 	 * Sort the cyclics.
151 	 */
152 	qsort(&(sort->s_lmpa[fndx]), sort->s_lndx - fndx, sizeof (Rt_map *),
153 	    compare);
154 
155 	/*
156 	 * Under ldd -i, or debugging, print this cycle.  Under ldd -i/-U assign
157 	 * each object a group identifier so that cyclic dependent callers can
158 	 * be better traced (see trace_sort()), or analyzed for non-use.
159 	 */
160 	if (((init = (lmflags & LML_FLG_TRC_INIT)) == 0) &&
161 	    ((unref = (lmflags & LML_FLG_TRC_UNREF)) == 0) && (dbg_mask == 0))
162 		return (1);
163 
164 	if (init) {
165 		if (tfmt == 0) {
166 			tfmt = MSG_INTL(MSG_LDD_INIT_FMT_01);
167 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
168 		}
169 		(void) printf(tfmt, cnt);
170 	}
171 	DBG_CALL(Dbg_util_scc_title(flag & RT_SORT_REV));
172 
173 	/*
174 	 * Identify this cyclic group, and under ldd -i print the cycle in the
175 	 * order its components will be run.
176 	 */
177 	if (flag & RT_SORT_REV) {
178 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
179 			lmp = sort->s_lmpa[ndx];
180 			CYCGROUP(lmp) = cnt;
181 
182 			if (init)
183 				(void) printf(ffmt, NAME(lmp));
184 			DBG_CALL(Dbg_util_scc_entry(ndx, NAME(lmp)));
185 		}
186 		cnt++;
187 
188 	} else if (dbg_mask) {
189 		for (ndx = sort->s_lndx - 1; ndx >= fndx; ndx--) {
190 			lmp = sort->s_lmpa[ndx];
191 			DBG_CALL(Dbg_util_scc_entry(ndx, NAME(lmp)));
192 		}
193 	}
194 
195 	/*
196 	 * If we're looking for unused dependencies determine if any of these
197 	 * cyclic components are referenced from outside of the cycle.
198 	 */
199 	if (unref || dbg_mask) {
200 		Bnd_desc **	bdpp;
201 
202 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
203 			Aliste	off;
204 
205 			lmp = sort->s_lmpa[ndx];
206 
207 			/*
208 			 * If this object has a handle then it can be in use by
209 			 * anyone.
210 			 */
211 			if (HANDLES(lmp))
212 				return (1);
213 
214 			/*
215 			 * Traverse this objects callers looking for outside
216 			 * references.
217 			 */
218 			for (ALIST_TRAVERSE(CALLERS(lmp), off, bdpp)) {
219 				Bnd_desc	*bdp = *bdpp;
220 				Rt_map		*clmp = bdp->b_caller;
221 
222 				if ((bdp->b_flags & BND_REFER) == 0)
223 					continue;
224 
225 				if (CYCGROUP(lmp) != CYCGROUP(clmp))
226 					return (1);
227 			}
228 		}
229 
230 		/*
231 		 * If we're here then none of the cyclic dependents have been
232 		 * referenced from outside of the cycle, mark them as unused.
233 		 */
234 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
235 			lmp = sort->s_lmpa[ndx];
236 			FLAGS1(lmp) &= ~FL1_RT_USED;
237 		}
238 	}
239 	return (1);
240 }
241 
242 /*
243  * Take elements off of the stack and move them to the link-map array. Typically
244  * this routine just pops one strongly connected component (individual link-map)
245  * at a time.  When a cyclic dependency has been detected the stack will contain
246  * more than just the present object to process, and will trigger the later call
247  * to sort_scc() to sort these elements.
248  */
249 static int
250 visit(Lm_list *lml, Rt_map * lmp, Sort * sort, int flag)
251 {
252 	Alist		*alpp = 0;
253 	int		num = sort->s_lndx;
254 	Word		tracing = lml->lm_flags & LML_FLG_TRC_ENABLE;
255 	Rt_map		*tlmp;
256 
257 	do {
258 		tlmp = sort->s_stack[--(sort->s_sndx)];
259 		SORTVAL(tlmp) = sort->s_num;
260 		DBG_CALL(Dbg_util_collect(NAME(tlmp), sort->s_lndx, flag));
261 		sort->s_lmpa[(sort->s_lndx)++] = tlmp;
262 
263 		if (flag & RT_SORT_REV) {
264 			/*
265 			 * Indicate the object has had its .init collected.
266 			 * Note, that regardless of the object having a .init
267 			 * the object is added to the tsort list, as it's from
268 			 * this list that any post-init flags are established.
269 			 */
270 			FLAGS(tlmp) |= FLG_RT_INITCLCT;
271 			lml->lm_init--;
272 		} else {
273 			/*
274 			 * Indicate the object has had its .fini collected.
275 			 * Note, that regardless of the object having a .fini,
276 			 * the object is added to the tsort list, as it's from
277 			 * this list that any audit_objclose() activity is
278 			 * triggered.
279 			 */
280 			FLAGS(tlmp) |= FLG_RT_FINICLCT;
281 		}
282 
283 		/*
284 		 * If tracing, save the strongly connected component.
285 		 */
286 		if (tracing && (alist_append(&alpp, &tlmp, sizeof (Rt_map *),
287 		    AL_CNT_SCC) == 0))
288 			return (0);
289 	} while (tlmp != lmp);
290 
291 	/*
292 	 * Determine if there are cyclic dependencies to process.  If so, sort
293 	 * the components, and retain them for tracing output.
294 	 */
295 	if (sort->s_lndx > (num + 1)) {
296 		if (sort_scc(sort, num, flag) == 0)
297 			return (0);
298 
299 		if (tracing && (alist_append(&(sort->s_scc), &alpp,
300 		    sizeof (Alist *), AL_CNT_SCC) == 0))
301 			return (0);
302 	} else if (alpp)
303 		free(alpp);
304 
305 	return (1);
306 }
307 
308 static int
309 dep_visit(Rt_map *, uint_t, Rt_map *, Lm_list *, Sort *, int);
310 
311 static int
312 _dep_visit(int min, Rt_map *clmp, Rt_map *dlmp, uint_t bflags, Lm_list *lml,
313     Sort *sort, int flag)
314 {
315 	int	_min;
316 
317 	/*
318 	 * Only collect objects that belong to the callers link-map.  Catches
319 	 * cross dependencies (filtering) to ld.so.1.
320 	 */
321 	if (LIST(dlmp) != lml)
322 		return (min);
323 
324 	/*
325 	 * Determine if this object hasn't been inspected.
326 	 */
327 	if ((_min = SORTVAL(dlmp)) == -1) {
328 		if (flag & RT_SORT_REV) {
329 			/*
330 			 * For .init processing, only collect objects that have
331 			 * been relocated and haven't already been collected.
332 			 */
333 			if ((FLAGS(dlmp) & (FLG_RT_RELOCED |
334 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
335 				return (min);
336 
337 			/*
338 			 * If this object contains no .init, there's no need to
339 			 * establish a dependency.
340 			 */
341 			if ((INIT(dlmp) == 0) && (INITARRAY(dlmp) == 0))
342 				return (min);
343 		} else {
344 			/*
345 			 * For .fini processing only collect objects that have
346 			 * had their .init collected, and haven't already been
347 			 * .fini collected.
348 			 */
349 			if ((FLAGS(dlmp) & (FLG_RT_INITCLCT |
350 			    FLG_RT_FINICLCT)) != FLG_RT_INITCLCT)
351 				return (min);
352 
353 			/*
354 			 * If we're deleting a subset of objects, only collect
355 			 * those marked for deletion.
356 			 */
357 			if ((flag & RT_SORT_DELETE) &&
358 			    ((FLAGS(dlmp) & FLG_RT_DELETE) == 0))
359 				return (min);
360 
361 			/*
362 			 * If this object contains no .fini, there's no need to
363 			 * establish a dependency.
364 			 */
365 			if ((FINI(dlmp) == 0) && (FINIARRAY(dlmp) == 0))
366 				return (min);
367 		}
368 
369 		/*
370 		 * Inspect this new dependency.
371 		 */
372 		if ((_min = dep_visit(clmp, bflags, dlmp, lml,
373 		    sort, flag)) == -1)
374 			return (-1);
375 	}
376 
377 	/*
378 	 * Keep track of the smallest SORTVAL that has been encountered.  If
379 	 * this value is smaller than the present object, then the dependency
380 	 * edge has cycled back to objects that have been processed earlier
381 	 * along this dependency edge.
382 	 */
383 	if (_min < min) {
384 		DBG_CALL(Dbg_util_edge_out(NAME(clmp), SORTVAL(clmp),
385 		    NAME(sort->s_stack[_min])));
386 		return (_min);
387 	} else
388 		return (min);
389 }
390 
391 /*
392  * Visit the dependencies of each object.
393  */
394 static int
395 dep_visit(Rt_map *clmp, uint_t cbflags, Rt_map *lmp, Lm_list *lml, Sort *sort,
396     int flag)
397 {
398 	int 		min;
399 	Aliste		off;
400 	Bnd_desc	**bdpp;
401 	Dyninfo		*dip;
402 
403 	min = SORTVAL(lmp) = sort->s_sndx;
404 	sort->s_stack[(sort->s_sndx)++] = lmp;
405 
406 	if (FLAGS(lmp) & FLG_RT_INITFRST)
407 		sort->s_initfirst++;
408 
409 	DBG_CALL(Dbg_util_edge_in(clmp, cbflags, lmp, min, flag));
410 
411 	/*
412 	 * Traverse both explicit and implicit dependencies.
413 	 */
414 	for (ALIST_TRAVERSE(DEPENDS(lmp), off, bdpp)) {
415 		if ((min = _dep_visit(min, lmp, (*bdpp)->b_depend,
416 		    (*bdpp)->b_flags, lml, sort, flag)) == -1)
417 			return (-1);
418 	}
419 
420 	/*
421 	 * Traverse any filtee dependencies.
422 	 */
423 	if (((dip = DYNINFO(lmp)) != 0) && (FLAGS1(lmp) & MSK_RT_FILTER)) {
424 		uint_t	cnt, max = DYNINFOCNT(lmp);
425 
426 		for (cnt = 0; cnt < max; cnt++, dip++) {
427 			Pnode	*pnp = (Pnode *)dip->di_info;
428 
429 			if ((pnp == 0) ||
430 			    ((dip->di_flags & MSK_DI_FILTER) == 0))
431 				continue;
432 
433 			for (; pnp; pnp = pnp->p_next) {
434 				Grp_hdl		*ghp = (Grp_hdl *)dip->di_info;
435 				Grp_desc	*gdp;
436 
437 				if ((pnp->p_len == 0) ||
438 				    ((ghp = (Grp_hdl *)pnp->p_info) == 0))
439 					continue;
440 
441 				for (ALIST_TRAVERSE(ghp->gh_depends, off,
442 				    gdp)) {
443 
444 					if (gdp->gd_depend == lmp)
445 						continue;
446 					if ((min = _dep_visit(min, lmp,
447 					    gdp->gd_depend, BND_FILTER, lml,
448 					    sort, flag)) == -1)
449 						return (-1);
450 				}
451 			}
452 		}
453 	}
454 
455 	/*
456 	 * Having returned to where the minimum SORTVAL is equivalent to the
457 	 * object that has just been processed, collect any dependencies that
458 	 * are available on the sorting stack.
459 	 */
460 	if (min == SORTVAL(lmp)) {
461 		if (visit(lml, lmp, sort, flag) == 0)
462 			return (-1);
463 	}
464 	return (min);
465 }
466 
467 
468 #ifndef	LD_BREADTH_DISABLED
469 /*
470  * Reverse LD_BREATH search (used to fire .init's the old fashioned way).
471  */
472 static void
473 rb_visit(Rt_map * lmp, Sort * sort)
474 {
475 	Rt_map *	nlmp;
476 
477 	if ((nlmp = (Rt_map *)NEXT(lmp)) != 0)
478 		rb_visit(nlmp, sort);
479 
480 	/*
481 	 * Only collect objects that have been relocated and haven't already
482 	 * been collected.
483 	 */
484 	if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) ==
485 	    FLG_RT_RELOCED) {
486 		sort->s_lmpa[(sort->s_lndx)++] = lmp;
487 		FLAGS(lmp) |= FLG_RT_INITCLCT;
488 		LIST(lmp)->lm_init--;
489 	}
490 }
491 
492 /*
493  * Forward LD_BREATH search (used to fire .fini's the old fashioned way).
494  */
495 static void
496 fb_visit(Rt_map * lmp, Sort * sort, int flag)
497 {
498 	while (lmp) {
499 		/*
500 		 * If we're called from dlclose() then we only collect those
501 		 * objects marked for deletion.
502 		 */
503 		if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
504 			/*
505 			 * Only collect objects that have had their .init
506 			 * collected, and haven't already been .fini collected.
507 			 */
508 			if ((FLAGS(lmp) &
509 			    (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
510 			    (FLG_RT_INITCLCT)) {
511 				sort->s_lmpa[(sort->s_lndx)++] = lmp;
512 				FLAGS(lmp) |= FLG_RT_FINICLCT;
513 			}
514 		}
515 		lmp = (Rt_map *)NEXT(lmp);
516 	}
517 }
518 #endif
519 
520 /*
521  * Find corresponding strongly connected component structure.
522  */
523 static Alist *
524 trace_find_scc(Sort * sort, Rt_map * lmp)
525 {
526 	Alist		**alpp;
527 	Aliste		off1;
528 
529 	for (ALIST_TRAVERSE(sort->s_scc, off1, alpp)) {
530 		Rt_map	**lmpp;
531 		Aliste	off2;
532 
533 		for (ALIST_TRAVERSE(*alpp, off2, lmpp)) {
534 			if (lmp == *lmpp)
535 				return (*alpp);
536 		}
537 	}
538 	return (NULL);
539 }
540 
541 /*
542  * Print out the .init dependency information (ldd).
543  */
544 static void
545 trace_sort(Sort * sort)
546 {
547 	int 		ndx = 0;
548 	Alist		*alp;
549 	Rt_map		*lmp1;
550 
551 	(void) printf(MSG_ORIG(MSG_STR_NL));
552 
553 	while ((lmp1 = sort->s_lmpa[ndx++]) != NULL) {
554 		static const char	*ffmt, *cfmt = 0, *sfmt = 0;
555 		Bnd_desc **		bdpp;
556 		Aliste			off1;
557 
558 		if ((INIT(lmp1) == 0) || (FLAGS(lmp1) & FLG_RT_INITCALL))
559 			continue;
560 
561 		if (sfmt == 0)
562 			sfmt = MSG_INTL(MSG_LDD_INIT_FMT_02);
563 
564 #ifndef	LD_BREADTH_DISABLED
565 		if (rtld_flags & RT_FL_BREADTH) {
566 			(void) printf(sfmt, NAME(lmp1));
567 			continue;
568 		}
569 #endif
570 		/*
571 		 * If the only component on the strongly connected list is
572 		 * this link-map, then there are no dependencies.
573 		 */
574 		if ((alp = trace_find_scc(sort, lmp1)) == NULL) {
575 			(void) printf(sfmt, NAME(lmp1));
576 			continue;
577 		}
578 
579 		/*
580 		 * Establish message formats for cyclic dependencies.
581 		 */
582 		if (cfmt == 0) {
583 			cfmt = MSG_INTL(MSG_LDD_INIT_FMT_03);
584 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
585 		}
586 
587 		(void) printf(cfmt, NAME(lmp1), CYCGROUP(lmp1));
588 
589 		for (ALIST_TRAVERSE(CALLERS(lmp1), off1, bdpp)) {
590 			Rt_map	**lmpp3, *lmp2 = (*bdpp)->b_caller;
591 			Aliste	off2;
592 
593 			for (ALIST_TRAVERSE(alp, off2, lmpp3)) {
594 				if (lmp2 != *lmpp3)
595 					continue;
596 
597 				(void) printf(ffmt, NAME(*lmpp3));
598 			}
599 		}
600 	}
601 }
602 
603 /*
604  * A reverse ordered list (for .init's) contains INITFIRST elements.  Move each
605  * of these elements to the front of the list.
606  */
607 static void
608 r_initfirst(Sort * sort, int end)
609 {
610 	Rt_map *	tlmp;
611 	int		bgn, ifst, lifst = 0;
612 
613 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
614 		for (ifst = lifst; ifst <= end; ifst++) {
615 			tlmp = sort->s_lmpa[ifst];
616 
617 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
618 				continue;
619 
620 			/*
621 			 * If the INITFIRST element is already at the front of
622 			 * the list leave it there.
623 			 */
624 			if (ifst == bgn) {
625 				lifst = ifst + 1;
626 				break;
627 			}
628 
629 			/*
630 			 * Move the elements from the front of the list up to
631 			 * the INITFIRST element, back one position.
632 			 */
633 			(void) memmove(&sort->s_lmpa[bgn + 1],
634 			    &sort->s_lmpa[bgn],
635 			    ((ifst - bgn) * sizeof (Rt_map *)));
636 
637 			/*
638 			 * Insert INITFIRST element at the front of the list.
639 			 */
640 			sort->s_lmpa[bgn] = tlmp;
641 			lifst = ifst + 1;
642 			break;
643 		}
644 	}
645 }
646 
647 /*
648  * A forward ordered list (for .fini's) contains INITFIRST elements.  Move each
649  * of these elements to the front of the list.
650  */
651 static void
652 f_initfirst(Sort * sort, int end)
653 {
654 	Rt_map *	tlmp;
655 	int		bgn, ifst, lifst = 0;
656 
657 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
658 		for (ifst = lifst; ifst <= end; ifst++) {
659 			tlmp = sort->s_lmpa[ifst];
660 
661 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
662 				continue;
663 
664 			/*
665 			 * If the INITFIRST element is already at the end of
666 			 * the list leave it there.
667 			 */
668 			if (ifst == end)
669 				break;
670 
671 			/*
672 			 * Move the elements from after the INITFIRST element
673 			 * up to the back of the list, up one position.
674 			 */
675 			(void) memmove(&sort->s_lmpa[ifst],
676 			    &sort->s_lmpa[ifst + 1],
677 			    ((end - ifst) * sizeof (Rt_map *)));
678 
679 			/*
680 			 * Insert INITFIRST element at the back of the list.
681 			 */
682 			sort->s_lmpa[end--] = tlmp;
683 			lifst = ifst;
684 			break;
685 		}
686 	}
687 }
688 
689 /*
690  * Sort the dependency
691  */
692 Rt_map **
693 tsort(Rt_map *lmp, int num, int flag)
694 {
695 	Rt_map *	_lmp;
696 	Lm_list *	lml = LIST(lmp);
697 	Word		init = lml->lm_flags & LML_FLG_TRC_INIT;
698 	Sort		sort = { 0 };
699 
700 	if (num == 0)
701 		return (0);
702 
703 	/*
704 	 * Prior to tsorting any .init sections, insure that the `environ'
705 	 * symbol is initialized for this link-map list.
706 	 */
707 	if ((flag & RT_SORT_REV) && ((lml->lm_flags &
708 	    (LML_FLG_TRC_ENABLE | LML_FLG_ENVIRON)) == 0))
709 		set_environ(lml);
710 
711 	/*
712 	 * Allocate memory for link-map list array.  Calloc the array to insure
713 	 * all elements are zero, we might find that no objects need processing.
714 	 */
715 	sort.s_lmp = lmp;
716 	sort.s_num = num + 1;
717 	if ((sort.s_lmpa = calloc(sort.s_num, sizeof (Rt_map *))) == NULL)
718 		return ((Rt_map **)S_ERROR);
719 
720 #ifndef	LD_BREADTH_DISABLED
721 	/*
722 	 * A breadth first search is easy, simply add each object to the
723 	 * link-map array.
724 	 */
725 	if (rtld_flags & RT_FL_BREADTH) {
726 		if (flag & RT_SORT_REV)
727 			rb_visit(lmp, &sort);
728 		else
729 			fb_visit(lmp, &sort, flag);
730 
731 		/*
732 		 * If tracing .init sections (only meaningful for RT_SORT_REV)
733 		 * print out the sorted dependencies.
734 		 */
735 		if (init)
736 			trace_sort(&sort);
737 
738 		return (sort.s_lmpa);
739 	}
740 #endif
741 	/*
742 	 * We need to topologically sort the dependencies.
743 	 */
744 	if ((sort.s_stack = malloc(sort.s_num * sizeof (Rt_map *))) == NULL)
745 		return ((Rt_map **)S_ERROR);
746 
747 	/*
748 	 * Determine where to start searching for tsort() candidates.  Any call
749 	 * to tsort() for .init processing is passed the link-map from which to
750 	 * start searching.  However, if new objects have dependencies on
751 	 * existing objects, or existing objects have been promoted (RTLD_LAZY
752 	 * to RTLD_NOW), then start  searching at the head of the link-map list.
753 	 * These previously loaded objects will have been tagged for inclusion
754 	 * in this tsort() pass.  They still remain on an existing tsort() list,
755 	 * which must have been prempted for control to have arrived here.
756 	 * However, they will be ignored when encountered on any previous
757 	 * tsort() list if their .init has already been called.
758 	 */
759 	if (lml->lm_flags & LML_FLG_OBJREEVAL)
760 		_lmp = LIST(lmp)->lm_head;
761 	else
762 		_lmp = lmp;
763 
764 	DBG_CALL(Dbg_file_bindings(_lmp, flag, lml->lm_flags));
765 	lml->lm_flags &=
766 	    ~(LML_FLG_OBJREEVAL | LML_FLG_OBJADDED | LML_FLG_OBJDELETED);
767 
768 	for (; _lmp; _lmp = (Rt_map *)NEXT(_lmp)) {
769 		if (flag & RT_SORT_REV) {
770 			/*
771 			 * For .init processing, only collect objects that have
772 			 * been relocated and haven't already been collected.
773 			 */
774 			if ((FLAGS(_lmp) & (FLG_RT_RELOCED |
775 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
776 				continue;
777 
778 			if (dep_visit(0, 0, _lmp, lml, &sort, flag) == -1)
779 				return ((Rt_map **)S_ERROR);
780 
781 		} else if (!(flag & RT_SORT_DELETE) ||
782 		    (FLAGS(_lmp) & FLG_RT_DELETE)) {
783 			/*
784 			 * Only collect objects that have had their .init
785 			 * collected, and haven't already been .fini collected.
786 			 */
787 			if (!((FLAGS(_lmp) &
788 			    (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
789 			    (FLG_RT_INITCLCT)))
790 				continue;
791 
792 			if (dep_visit(0, 0, _lmp, lml, &sort, flag) == -1)
793 				return ((Rt_map **)S_ERROR);
794 		}
795 	}
796 
797 	/*
798 	 * The dependencies have been collected such that they are appropriate
799 	 * for an .init order, for .fini order reverse them.
800 	 */
801 	if (flag & RT_SORT_FWD) {
802 		int	bgn = 0, end = sort.s_lndx - 1;
803 
804 		while (bgn < end) {
805 			Rt_map *	tlmp = sort.s_lmpa[end];
806 
807 			sort.s_lmpa[end] = sort.s_lmpa[bgn];
808 			sort.s_lmpa[bgn] = tlmp;
809 
810 			bgn++, end--;
811 		}
812 	}
813 
814 	/*
815 	 * If INITFIRST objects have been collected then move them to the front
816 	 * or end of the list as appropriate.
817 	 */
818 	if (sort.s_initfirst) {
819 		if (flag & RT_SORT_REV)
820 			r_initfirst(&sort, sort.s_lndx - 1);
821 		else
822 			f_initfirst(&sort, sort.s_lndx - 1);
823 	}
824 
825 	/*
826 	 * If tracing .init sections (only meaningful for RT_SORT_REV), print
827 	 * out the sorted dependencies.
828 	 */
829 	if (init)
830 		trace_sort(&sort);
831 
832 	/*
833 	 * Clean any temporary structures prior to return.
834 	 */
835 	if (sort.s_stack)
836 		free(sort.s_stack);
837 
838 	if (sort.s_queue) {
839 		Aliste	off;
840 		Rt_map	**lmpp;
841 
842 		/*
843 		 * Traverse the link-maps collected on the sort queue and
844 		 * delete the depth index.  These link-maps may be traversed
845 		 * again to sort other components either for inits, and almost
846 		 * certainly for .finis.
847 		 */
848 		for (ALIST_TRAVERSE(sort.s_queue, off, lmpp))
849 			IDX(*lmpp) = 0;
850 
851 		free(sort.s_queue);
852 	}
853 
854 	if (sort.s_scc) {
855 		Aliste	off;
856 		Alist	**alpp;
857 
858 		for (ALIST_TRAVERSE(sort.s_scc, off, alpp))
859 			free(*alpp);
860 		free(sort.s_scc);
861 	}
862 
863 	/*
864 	 * The caller is responsible for freeing the sorted link-map list once
865 	 * the associated .init/.fini's have been fired.
866 	 */
867 	DBG_CALL(Dbg_util_nl());
868 	return (sort.s_lmpa);
869 }
870