xref: /illumos-gate/usr/src/cmd/sgs/rtld/common/tsort.c (revision f998c95e3b7029fe5f7542e115f7474ddb8024d7)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2008 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  */
26 #pragma ident	"%Z%%M%	%I%	%E% SMI"
27 
28 /*
29  * Utilities to handle shared object dependency graph.
30  *
31  * The algorithms used in this file are taken from the following book:
32  *	Algorithms in C
33  *		Robert Sedgewick
34  *		Addison-Wesley Publishing company
35  *		ISBN 0-201-51425-7
36  * 	From the following chapters:
37  *		Chapter 29 Elementary Graph Algorithms
38  *		Chapter 32 Directed Graph
39  */
40 #include	"_synonyms.h"
41 
42 #include	<sys/types.h>
43 #include	<stdarg.h>
44 #include	<stdio.h>
45 #include	<dlfcn.h>
46 #include	<signal.h>
47 #include	<locale.h>
48 #include	<string.h>
49 #include	<libintl.h>
50 #include	<debug.h>
51 #include	"_rtld.h"
52 #include	"msg.h"
53 
54 /*
55  * Structure for maintaining sorting state.
56  */
57 typedef struct {
58 	Rt_map		**s_lmpa;	/* link-map[] (returned to caller) */
59 	Rt_map		*s_lmp;		/* originating link-map */
60 	Rt_map		**s_stack;	/* strongly connected component stack */
61 	APlist 		*s_scc;		/* cyclic list */
62 	APlist		*s_queue;	/* depth queue for cyclic components */
63 	int		s_sndx;		/* present stack index */
64 	int 		s_lndx;		/* present link-map index */
65 	int		s_num;		/* number of objects to sort */
66 	int		s_initfirst;	/* no. of INITFIRST entries */
67 } Sort;
68 
69 #define	AL_CNT_SCC	10
70 
71 /*
72  * qsort(3c) comparison function.
73  */
74 static int
75 compare(const void * lmpp1, const void * lmpp2)
76 {
77 	Rt_map	*lmp1 = *((Rt_map **)lmpp1);
78 	Rt_map	*lmp2 = *((Rt_map **)lmpp2);
79 
80 	if (IDX(lmp1) > IDX(lmp2))
81 		return (-1);
82 	if (IDX(lmp1) < IDX(lmp2))
83 		return (1);
84 	return (0);
85 }
86 
87 /*
88  * This routine is called when a cyclic dependency is detected between strongly
89  * connected components.  The nodes within the cycle are reverse breadth-first
90  * sorted.
91  */
92 static int
93 sort_scc(Sort * sort, int fndx, int flag)
94 {
95 	static const char	*tfmt = 0, *ffmt;
96 	static int		cnt = 1;
97 	int			ndx;
98 	Rt_map			*lmp;
99 	Lm_list			*lml = LIST(sort->s_lmp);
100 	Word			lmflags = lml->lm_flags;
101 	Word			init, unref;
102 
103 	/*
104 	 * If this is the first cyclic dependency traverse the new objects that
105 	 * have been added to the link-map list and for each object establish
106 	 * a unique depth index.  We build this dynamically as we have no idea
107 	 * of the number of objects that will be inspected (logic matches that
108 	 * used by dlsym() to traverse lazy dependencies).
109 	 */
110 	if (sort->s_queue == NULL) {
111 		Aliste	idx1;
112 		Rt_map	*lmp, *lmp2;
113 
114 		lmp = sort->s_lmp;
115 		ndx = 1;
116 
117 		if (aplist_append(&sort->s_queue, lmp, sort->s_num) == NULL)
118 			return (0);
119 
120 		IDX(lmp) = ndx++;
121 
122 		for (APLIST_TRAVERSE(sort->s_queue, idx1, lmp2)) {
123 			Bnd_desc	*bdp;
124 			Aliste		idx2;
125 
126 			for (APLIST_TRAVERSE(DEPENDS(lmp2), idx2, bdp)) {
127 				Rt_map	*lmp = bdp->b_depend;
128 
129 				if (IDX(lmp))
130 					continue;
131 
132 				/*
133 				 * If we're .init processing and this depend-
134 				 * encies .init has been called, skip it.
135 				 */
136 				if ((flag & RT_SORT_REV) &&
137 				    (FLAGS(lmp) & FLG_RT_INITCALL))
138 					continue;
139 
140 				if (aplist_append(&sort->s_queue, lmp,
141 				    sort->s_num) == NULL)
142 					return (0);
143 
144 				IDX(lmp) = ndx++;
145 			}
146 		}
147 	}
148 
149 	/*
150 	 * Sort the cyclics.
151 	 */
152 	qsort(&(sort->s_lmpa[fndx]), sort->s_lndx - fndx, sizeof (Rt_map *),
153 	    compare);
154 
155 	/*
156 	 * Under ldd -i, or debugging, print this cycle.  Under ldd -i/-U assign
157 	 * each object a group identifier so that cyclic dependent callers can
158 	 * be better traced (see trace_sort()), or analyzed for non-use.
159 	 */
160 	if (((init = (lmflags & LML_FLG_TRC_INIT)) == 0) &&
161 	    ((unref = (lmflags & LML_FLG_TRC_UNREF)) == 0) &&
162 	    (DBG_ENABLED == 0))
163 		return (1);
164 
165 	if (init) {
166 		if (tfmt == 0) {
167 			tfmt = MSG_INTL(MSG_LDD_INIT_FMT_01);
168 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
169 		}
170 		(void) printf(tfmt, cnt);
171 	}
172 	DBG_CALL(Dbg_util_scc_title(lml, (flag & RT_SORT_REV)));
173 
174 	/*
175 	 * Identify this cyclic group, and under ldd -i print the cycle in the
176 	 * order its components will be run.
177 	 */
178 	if (flag & RT_SORT_REV) {
179 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
180 			lmp = sort->s_lmpa[ndx];
181 			CYCGROUP(lmp) = cnt;
182 
183 			if (init)
184 				(void) printf(ffmt, NAME(lmp));
185 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
186 		}
187 		cnt++;
188 
189 	} else if (DBG_ENABLED) {
190 		for (ndx = sort->s_lndx - 1; ndx >= fndx; ndx--) {
191 			lmp = sort->s_lmpa[ndx];
192 			DBG_CALL(Dbg_util_scc_entry(lmp, ndx));
193 		}
194 	}
195 
196 	/*
197 	 * If we're looking for unused dependencies determine if any of these
198 	 * cyclic components are referenced from outside of the cycle.
199 	 */
200 	if (unref || DBG_ENABLED) {
201 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
202 			Bnd_desc	*bdp;
203 			Aliste		idx;
204 
205 			lmp = sort->s_lmpa[ndx];
206 
207 			/*
208 			 * If this object has a handle then it can be in use by
209 			 * anyone.
210 			 */
211 			if (HANDLES(lmp))
212 				return (1);
213 
214 			/*
215 			 * Traverse this objects callers looking for outside
216 			 * references.
217 			 */
218 			for (APLIST_TRAVERSE(CALLERS(lmp), idx, bdp)) {
219 				Rt_map		*clmp = bdp->b_caller;
220 
221 				if ((bdp->b_flags & BND_REFER) == 0)
222 					continue;
223 
224 				if (CYCGROUP(lmp) != CYCGROUP(clmp))
225 					return (1);
226 			}
227 		}
228 
229 		/*
230 		 * If we're here then none of the cyclic dependents have been
231 		 * referenced from outside of the cycle, mark them as unused.
232 		 */
233 		for (ndx = fndx; ndx < sort->s_lndx; ndx++) {
234 			lmp = sort->s_lmpa[ndx];
235 			FLAGS1(lmp) &= ~FL1_RT_USED;
236 		}
237 	}
238 	return (1);
239 }
240 
241 /*
242  * Take elements off of the stack and move them to the link-map array. Typically
243  * this routine just pops one strongly connected component (individual link-map)
244  * at a time.  When a cyclic dependency has been detected the stack will contain
245  * more than just the present object to process, and will trigger the later call
246  * to sort_scc() to sort these elements.
247  */
248 static int
249 visit(Lm_list *lml, Rt_map * lmp, Sort *sort, int flag)
250 {
251 	APlist		*alp = NULL;
252 	int		num = sort->s_lndx;
253 	Word		tracing = lml->lm_flags & LML_FLG_TRC_ENABLE;
254 	Rt_map		*tlmp;
255 
256 	do {
257 		tlmp = sort->s_stack[--(sort->s_sndx)];
258 		SORTVAL(tlmp) = sort->s_num;
259 		DBG_CALL(Dbg_util_collect(tlmp, sort->s_lndx, flag));
260 		sort->s_lmpa[(sort->s_lndx)++] = tlmp;
261 
262 		if (flag & RT_SORT_REV) {
263 			/*
264 			 * Indicate the object has had its .init collected.
265 			 * Note, that regardless of the object having a .init
266 			 * the object is added to the tsort list, as it's from
267 			 * this list that any post-init flags are established.
268 			 */
269 			FLAGS(tlmp) |= FLG_RT_INITCLCT;
270 			lml->lm_init--;
271 		} else {
272 			/*
273 			 * Indicate the object has had its .fini collected.
274 			 * Note, that regardless of the object having a .fini,
275 			 * the object is added to the tsort list, as it's from
276 			 * this list that any audit_objclose() activity is
277 			 * triggered.
278 			 */
279 			FLAGS(tlmp) |= FLG_RT_FINICLCT;
280 		}
281 
282 		/*
283 		 * If tracing, save the strongly connected component.
284 		 */
285 		if (tracing && (aplist_append(&alp, tlmp,
286 		    AL_CNT_SCC) == 0))
287 			return (0);
288 	} while (tlmp != lmp);
289 
290 	/*
291 	 * Determine if there are cyclic dependencies to process.  If so, sort
292 	 * the components, and retain them for tracing output.
293 	 */
294 	if (sort->s_lndx > (num + 1)) {
295 		if (sort_scc(sort, num, flag) == 0)
296 			return (0);
297 
298 		if (tracing && (aplist_append(&sort->s_scc, alp,
299 		    AL_CNT_SCC) == 0))
300 			return (0);
301 	} else if (alp)
302 		free(alp);
303 
304 	return (1);
305 }
306 
307 static int
308 dep_visit(Lm_list *, Rt_map *, uint_t, Rt_map *, Sort *, int);
309 
310 static int
311 _dep_visit(Lm_list *lml, int min, Rt_map *clmp, Rt_map *dlmp, uint_t bflags,
312     Sort *sort, int flag)
313 {
314 	int	_min;
315 
316 	/*
317 	 * Only collect objects that belong to the callers link-map.  Catches
318 	 * cross dependencies (filtering) to ld.so.1.
319 	 */
320 	if (LIST(dlmp) != lml)
321 		return (min);
322 
323 	/*
324 	 * Determine if this object hasn't been inspected.
325 	 */
326 	if ((_min = SORTVAL(dlmp)) == -1) {
327 		if (flag & RT_SORT_REV) {
328 			/*
329 			 * For .init processing, only collect objects that have
330 			 * been relocated and haven't already been collected.
331 			 */
332 			if ((FLAGS(dlmp) & (FLG_RT_RELOCED |
333 			    FLG_RT_INITCLCT)) != FLG_RT_RELOCED)
334 				return (min);
335 
336 			/*
337 			 * If this object contains no .init, there's no need to
338 			 * establish a dependency.
339 			 */
340 			if ((INIT(dlmp) == 0) && (INITARRAY(dlmp) == 0))
341 				return (min);
342 		} else {
343 			/*
344 			 * For .fini processing only collect objects that have
345 			 * had their .init collected, and haven't already been
346 			 * .fini collected.
347 			 */
348 			if ((FLAGS(dlmp) & (FLG_RT_INITCLCT |
349 			    FLG_RT_FINICLCT)) != FLG_RT_INITCLCT)
350 				return (min);
351 
352 			/*
353 			 * If we're deleting a subset of objects, only collect
354 			 * those marked for deletion.
355 			 */
356 			if ((flag & RT_SORT_DELETE) &&
357 			    ((FLAGS(dlmp) & FLG_RT_DELETE) == 0))
358 				return (min);
359 
360 			/*
361 			 * If this object contains no .fini, there's no need to
362 			 * establish a dependency.
363 			 */
364 			if ((FINI(dlmp) == 0) && (FINIARRAY(dlmp) == 0))
365 				return (min);
366 		}
367 
368 		/*
369 		 * Inspect this new dependency.
370 		 */
371 		if ((_min = dep_visit(lml, clmp, bflags, dlmp,
372 		    sort, flag)) == -1)
373 			return (-1);
374 	}
375 
376 	/*
377 	 * Keep track of the smallest SORTVAL that has been encountered.  If
378 	 * this value is smaller than the present object, then the dependency
379 	 * edge has cycled back to objects that have been processed earlier
380 	 * along this dependency edge.
381 	 */
382 	if (_min < min) {
383 		DBG_CALL(Dbg_util_edge_out(clmp, sort->s_stack[_min]));
384 		return (_min);
385 	} else
386 		return (min);
387 }
388 
389 /*
390  * Visit the dependencies of each object.
391  */
392 static int
393 dep_visit(Lm_list *lml, Rt_map *clmp, uint_t cbflags, Rt_map *lmp, Sort *sort,
394     int flag)
395 {
396 	int 		min;
397 	Aliste		idx;
398 	Bnd_desc	*bdp;
399 	Dyninfo		*dip;
400 
401 	min = SORTVAL(lmp) = sort->s_sndx;
402 	sort->s_stack[(sort->s_sndx)++] = lmp;
403 
404 	if (FLAGS(lmp) & FLG_RT_INITFRST)
405 		sort->s_initfirst++;
406 
407 	DBG_CALL(Dbg_util_edge_in(lml, clmp, cbflags, lmp, min, flag));
408 
409 	/*
410 	 * Traverse both explicit and implicit dependencies.
411 	 */
412 	for (APLIST_TRAVERSE(DEPENDS(lmp), idx, bdp)) {
413 		if ((min = _dep_visit(lml, min, lmp, bdp->b_depend,
414 		    bdp->b_flags, sort, flag)) == -1)
415 			return (-1);
416 	}
417 
418 	/*
419 	 * Traverse any filtee dependencies.
420 	 */
421 	if (((dip = DYNINFO(lmp)) != 0) && (FLAGS1(lmp) & MSK_RT_FILTER)) {
422 		uint_t	cnt, max = DYNINFOCNT(lmp);
423 
424 		for (cnt = 0; cnt < max; cnt++, dip++) {
425 			Pnode	*pnp = (Pnode *)dip->di_info;
426 
427 			if ((pnp == 0) ||
428 			    ((dip->di_flags & MSK_DI_FILTER) == 0))
429 				continue;
430 
431 			for (; pnp; pnp = pnp->p_next) {
432 				Grp_hdl		*ghp = (Grp_hdl *)dip->di_info;
433 				Grp_desc	*gdp;
434 
435 				if ((pnp->p_len == 0) ||
436 				    ((ghp = (Grp_hdl *)pnp->p_info) == 0))
437 					continue;
438 
439 				for (ALIST_TRAVERSE(ghp->gh_depends, idx,
440 				    gdp)) {
441 
442 					if (gdp->gd_depend == lmp)
443 						continue;
444 					if ((min = _dep_visit(lml, min, lmp,
445 					    gdp->gd_depend, BND_FILTER,
446 					    sort, flag)) == -1)
447 						return (-1);
448 				}
449 			}
450 		}
451 	}
452 
453 	/*
454 	 * Having returned to where the minimum SORTVAL is equivalent to the
455 	 * object that has just been processed, collect any dependencies that
456 	 * are available on the sorting stack.
457 	 */
458 	if (min == SORTVAL(lmp)) {
459 		if (visit(lml, lmp, sort, flag) == 0)
460 			return (-1);
461 	}
462 	return (min);
463 }
464 
465 
466 #ifndef	LD_BREADTH_DISABLED
467 /*
468  * Reverse LD_BREATH search (used to fire .init's the old fashioned way).
469  */
470 static void
471 rb_visit(Rt_map * lmp, Sort * sort)
472 {
473 	Rt_map *	nlmp;
474 
475 	if ((nlmp = (Rt_map *)NEXT(lmp)) != 0)
476 		rb_visit(nlmp, sort);
477 
478 	/*
479 	 * Only collect objects that have been relocated and haven't already
480 	 * been collected.
481 	 */
482 	if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) ==
483 	    FLG_RT_RELOCED) {
484 		sort->s_lmpa[(sort->s_lndx)++] = lmp;
485 		FLAGS(lmp) |= FLG_RT_INITCLCT;
486 		LIST(lmp)->lm_init--;
487 	}
488 }
489 
490 /*
491  * Forward LD_BREATH search (used to fire .fini's the old fashioned way).
492  */
493 static void
494 fb_visit(Rt_map * lmp, Sort * sort, int flag)
495 {
496 	while (lmp) {
497 		/*
498 		 * If we're called from dlclose() then we only collect those
499 		 * objects marked for deletion.
500 		 */
501 		if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
502 			/*
503 			 * Only collect objects that have had their .init
504 			 * collected, and haven't already been .fini collected.
505 			 */
506 			if ((FLAGS(lmp) &
507 			    (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
508 			    (FLG_RT_INITCLCT)) {
509 				sort->s_lmpa[(sort->s_lndx)++] = lmp;
510 				FLAGS(lmp) |= FLG_RT_FINICLCT;
511 			}
512 		}
513 		lmp = (Rt_map *)NEXT(lmp);
514 	}
515 }
516 #endif
517 
518 /*
519  * Find corresponding strongly connected component structure.
520  */
521 static APlist *
522 trace_find_scc(Sort *sort, Rt_map *lmp)
523 {
524 	APlist		*alp;
525 	Aliste		idx1;
526 
527 	for (APLIST_TRAVERSE(sort->s_scc, idx1, alp)) {
528 		Rt_map	*lmp2;
529 		Aliste	idx2;
530 
531 		for (APLIST_TRAVERSE(alp, idx2, lmp2)) {
532 			if (lmp == lmp2)
533 				return (alp);
534 		}
535 	}
536 	return (NULL);
537 }
538 
539 /*
540  * Print out the .init dependency information (ldd).
541  */
542 static void
543 trace_sort(Sort * sort)
544 {
545 	int 		ndx = 0;
546 	APlist		*alp;
547 	Rt_map		*lmp1;
548 
549 	(void) printf(MSG_ORIG(MSG_STR_NL));
550 
551 	while ((lmp1 = sort->s_lmpa[ndx++]) != NULL) {
552 		static const char	*ffmt, *cfmt = 0, *sfmt = 0;
553 		Bnd_desc		*bdp;
554 		Aliste			idx1;
555 
556 		if ((INIT(lmp1) == 0) || (FLAGS(lmp1) & FLG_RT_INITCALL))
557 			continue;
558 
559 		if (sfmt == 0)
560 			sfmt = MSG_INTL(MSG_LDD_INIT_FMT_02);
561 
562 #ifndef	LD_BREADTH_DISABLED
563 		if (rtld_flags & RT_FL_BREADTH) {
564 			(void) printf(sfmt, NAME(lmp1));
565 			continue;
566 		}
567 #endif
568 		/*
569 		 * If the only component on the strongly connected list is
570 		 * this link-map, then there are no dependencies.
571 		 */
572 		if ((alp = trace_find_scc(sort, lmp1)) == NULL) {
573 			(void) printf(sfmt, NAME(lmp1));
574 			continue;
575 		}
576 
577 		/*
578 		 * Establish message formats for cyclic dependencies.
579 		 */
580 		if (cfmt == 0) {
581 			cfmt = MSG_INTL(MSG_LDD_INIT_FMT_03);
582 			ffmt = MSG_ORIG(MSG_LDD_INIT_FMT_FILE);
583 		}
584 
585 		(void) printf(cfmt, NAME(lmp1), CYCGROUP(lmp1));
586 
587 		for (APLIST_TRAVERSE(CALLERS(lmp1), idx1, bdp)) {
588 			Rt_map	*lmp3, *lmp2 = bdp->b_caller;
589 			Aliste	idx2;
590 
591 			for (APLIST_TRAVERSE(alp, idx2, lmp3)) {
592 				if (lmp2 != lmp3)
593 					continue;
594 
595 				(void) printf(ffmt, NAME(lmp3));
596 			}
597 		}
598 	}
599 }
600 
601 /*
602  * A reverse ordered list (for .init's) contains INITFIRST elements.  Move each
603  * of these elements to the front of the list.
604  */
605 static void
606 r_initfirst(Sort * sort, int end)
607 {
608 	Rt_map *	tlmp;
609 	int		bgn, ifst, lifst = 0;
610 
611 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
612 		for (ifst = lifst; ifst <= end; ifst++) {
613 			tlmp = sort->s_lmpa[ifst];
614 
615 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
616 				continue;
617 
618 			/*
619 			 * If the INITFIRST element is already at the front of
620 			 * the list leave it there.
621 			 */
622 			if (ifst == bgn) {
623 				lifst = ifst + 1;
624 				break;
625 			}
626 
627 			/*
628 			 * Move the elements from the front of the list up to
629 			 * the INITFIRST element, back one position.
630 			 */
631 			(void) memmove(&sort->s_lmpa[bgn + 1],
632 			    &sort->s_lmpa[bgn],
633 			    ((ifst - bgn) * sizeof (Rt_map *)));
634 
635 			/*
636 			 * Insert INITFIRST element at the front of the list.
637 			 */
638 			sort->s_lmpa[bgn] = tlmp;
639 			lifst = ifst + 1;
640 			break;
641 		}
642 	}
643 }
644 
645 /*
646  * A forward ordered list (for .fini's) contains INITFIRST elements.  Move each
647  * of these elements to the front of the list.
648  */
649 static void
650 f_initfirst(Sort * sort, int end)
651 {
652 	Rt_map *	tlmp;
653 	int		bgn, ifst, lifst = 0;
654 
655 	for (bgn = 0; bgn < sort->s_initfirst; bgn++) {
656 		for (ifst = lifst; ifst <= end; ifst++) {
657 			tlmp = sort->s_lmpa[ifst];
658 
659 			if (!(FLAGS(tlmp) & FLG_RT_INITFRST))
660 				continue;
661 
662 			/*
663 			 * If the INITFIRST element is already at the end of
664 			 * the list leave it there.
665 			 */
666 			if (ifst == end)
667 				break;
668 
669 			/*
670 			 * Move the elements from after the INITFIRST element
671 			 * up to the back of the list, up one position.
672 			 */
673 			(void) memmove(&sort->s_lmpa[ifst],
674 			    &sort->s_lmpa[ifst + 1],
675 			    ((end - ifst) * sizeof (Rt_map *)));
676 
677 			/*
678 			 * Insert INITFIRST element at the back of the list.
679 			 */
680 			sort->s_lmpa[end--] = tlmp;
681 			lifst = ifst;
682 			break;
683 		}
684 	}
685 }
686 
687 /*
688  * Determine whether .init or .fini processing is required.
689  */
690 static int
691 initorfini(Lm_list *lml, Rt_map *lmp, int flag, Sort *sort)
692 {
693 	if (flag & RT_SORT_REV) {
694 		/*
695 		 * For .init processing, only collect objects that have been
696 		 * relocated and haven't already been collected.
697 		 */
698 		if ((FLAGS(lmp) & (FLG_RT_RELOCED | FLG_RT_INITCLCT)) !=
699 		    FLG_RT_RELOCED)
700 			return (0);
701 
702 		if (dep_visit(lml, 0, 0, lmp, sort, flag) == -1)
703 			return (1);
704 
705 	} else if (!(flag & RT_SORT_DELETE) || (FLAGS(lmp) & FLG_RT_DELETE)) {
706 		/*
707 		 * Only collect objects that have had their .init collected,
708 		 * and haven't already been .fini collected.
709 		 */
710 		if (!((FLAGS(lmp) & (FLG_RT_INITCLCT | FLG_RT_FINICLCT)) ==
711 		    (FLG_RT_INITCLCT)))
712 			return (0);
713 
714 		if (dep_visit(lml, 0, 0, lmp, sort, flag) == -1)
715 			return (1);
716 	}
717 	return (0);
718 }
719 
720 /*
721  * Sort the dependency
722  */
723 Rt_map **
724 tsort(Rt_map *lmp, int num, int flag)
725 {
726 	Rt_map *	_lmp;
727 	Lm_list *	lml = LIST(lmp);
728 	Word		init = lml->lm_flags & LML_FLG_TRC_INIT;
729 	Sort		sort = { 0 };
730 
731 	if (num == 0)
732 		return (0);
733 
734 	/*
735 	 * Prior to tsorting any .init sections, insure that the `environ'
736 	 * symbol is initialized for this link-map list.
737 	 */
738 	if ((flag & RT_SORT_REV) && ((lml->lm_flags &
739 	    (LML_FLG_TRC_ENABLE | LML_FLG_ENVIRON)) == 0))
740 		set_environ(lml);
741 
742 	/*
743 	 * Allocate memory for link-map list array.  Calloc the array to insure
744 	 * all elements are zero, we might find that no objects need processing.
745 	 */
746 	sort.s_lmp = lmp;
747 	sort.s_num = num + 1;
748 	if ((sort.s_lmpa = calloc(sort.s_num, sizeof (Rt_map *))) == NULL)
749 		return ((Rt_map **)S_ERROR);
750 
751 #ifndef	LD_BREADTH_DISABLED
752 	/*
753 	 * A breadth first search is easy, simply add each object to the
754 	 * link-map array.
755 	 */
756 	if (rtld_flags & RT_FL_BREADTH) {
757 		if (flag & RT_SORT_REV)
758 			rb_visit(lmp, &sort);
759 		else
760 			fb_visit(lmp, &sort, flag);
761 
762 		/*
763 		 * If tracing .init sections (only meaningful for RT_SORT_REV)
764 		 * print out the sorted dependencies.
765 		 */
766 		if (init)
767 			trace_sort(&sort);
768 
769 		return (sort.s_lmpa);
770 	}
771 #endif
772 	/*
773 	 * We need to topologically sort the dependencies.
774 	 */
775 	if ((sort.s_stack = malloc(sort.s_num * sizeof (Rt_map *))) == NULL)
776 		return ((Rt_map **)S_ERROR);
777 
778 	/*
779 	 * Determine where to start searching for tsort() candidates.  Any call
780 	 * to tsort() for .init processing is passed the link-map from which to
781 	 * start searching.  However, if new objects have dependencies on
782 	 * existing objects, or existing objects have been promoted (RTLD_LAZY
783 	 * to RTLD_NOW), then start searching at the head of the link-map list.
784 	 * These previously loaded objects will have been tagged for inclusion
785 	 * in this tsort() pass.  They still remain on an existing tsort() list,
786 	 * which must have been prempted for control to have arrived here.
787 	 * However, they will be ignored when encountered on any previous
788 	 * tsort() list if their .init has already been called.
789 	 */
790 	if (lml->lm_flags & LML_FLG_OBJREEVAL)
791 		_lmp = lml->lm_head;
792 	else
793 		_lmp = lmp;
794 
795 	DBG_CALL(Dbg_file_bindings(_lmp, flag));
796 	lml->lm_flags &=
797 	    ~(LML_FLG_OBJREEVAL | LML_FLG_OBJADDED | LML_FLG_OBJDELETED);
798 
799 	/*
800 	 * If interposers exist, inspect these objects first.
801 	 *
802 	 * Interposers can provide implicit dependencies - for example, an
803 	 * application that has a dependency on libumem will caused any other
804 	 * dependencies of the application that use the malloc family, to
805 	 * have an implicit dependency on libumem.  However, under the default
806 	 * condition of lazy binding, these dependency relationships on libumem
807 	 * are unknown to the tsorting process (ie. a call to one of the malloc
808 	 * family has not occurred to establish the dependency).  This lack of
809 	 * dependency information makes the interposer look "standalone",
810 	 * whereas the interposers .init/.fini should be analyzed with respect
811 	 * to the dependency relationship libumem will eventually create.
812 	 *
813 	 * By inspecting interposing objects first, we are able to trigger
814 	 * their .init sections to be accounted for before any others.
815 	 * Selecting these .init sections first is important for the malloc
816 	 * libraries, as these libraries need to prepare for pthread_atfork().
817 	 * However, handling interposer libraries in this generic fashion
818 	 * should help provide for other dependency relationships that may
819 	 * exist.
820 	 */
821 	if ((lml->lm_flags & (LML_FLG_INTRPOSE | LML_FLG_INTRPOSETSORT)) ==
822 	    LML_FLG_INTRPOSE) {
823 		Rt_map	*ilmp = _lmp;
824 
825 		/*
826 		 * Unless the executable is tagged as an interposer, skip to
827 		 * the next object.
828 		 */
829 		if ((FLAGS(ilmp) & MSK_RT_INTPOSE) == 0)
830 			ilmp = (Rt_map *)NEXT(ilmp);
831 
832 		for (; ilmp; ilmp = (Rt_map *)NEXT(ilmp)) {
833 			if ((FLAGS(ilmp) & MSK_RT_INTPOSE) == 0)
834 				break;
835 
836 			if (initorfini(lml, ilmp, (flag | RT_SORT_INTPOSE),
837 			    &sort) != 0)
838 				return ((Rt_map **)S_ERROR);
839 		}
840 
841 		/*
842 		 * Once all interposers are processed, there is no need to
843 		 * look for interposers again.  An interposer can only
844 		 * be introduced before any relocation takes place, thus
845 		 * interposer .init's will be grabbed during the first tsort
846 		 * starting at the head of the link-map list.
847 		 *
848 		 * Interposers can't be unloaded.  Thus interposer .fini's can
849 		 * only be called during atexit() processing.  The interposer
850 		 * tsort flag is removed from each link-map list during
851 		 * atexit_fini() so that the interposers .fini sections are
852 		 * processed appropriately.
853 		 */
854 		lml->lm_flags |= LML_FLG_INTRPOSETSORT;
855 	}
856 
857 	/*
858 	 * Inspect any standard objects.
859 	 */
860 	for (; _lmp; _lmp = (Rt_map *)NEXT(_lmp)) {
861 		if (FLAGS(_lmp) & MSK_RT_INTPOSE)
862 			continue;
863 
864 		if (initorfini(lml, _lmp, flag, &sort) != 0)
865 			return ((Rt_map **)S_ERROR);
866 	}
867 
868 	/*
869 	 * The dependencies have been collected such that they are appropriate
870 	 * for an .init order, for .fini order reverse them.
871 	 */
872 	if (flag & RT_SORT_FWD) {
873 		int	bgn = 0, end = sort.s_lndx - 1;
874 
875 		while (bgn < end) {
876 			Rt_map *	tlmp = sort.s_lmpa[end];
877 
878 			sort.s_lmpa[end] = sort.s_lmpa[bgn];
879 			sort.s_lmpa[bgn] = tlmp;
880 
881 			bgn++, end--;
882 		}
883 	}
884 
885 	/*
886 	 * If INITFIRST objects have been collected then move them to the front
887 	 * or end of the list as appropriate.
888 	 */
889 	if (sort.s_initfirst) {
890 		if (flag & RT_SORT_REV)
891 			r_initfirst(&sort, sort.s_lndx - 1);
892 		else
893 			f_initfirst(&sort, sort.s_lndx - 1);
894 	}
895 
896 	/*
897 	 * If tracing .init sections (only meaningful for RT_SORT_REV), print
898 	 * out the sorted dependencies.
899 	 */
900 	if (init)
901 		trace_sort(&sort);
902 
903 	/*
904 	 * Clean any temporary structures prior to return.
905 	 */
906 	if (sort.s_stack)
907 		free(sort.s_stack);
908 
909 	if (sort.s_queue) {
910 		Aliste idx;
911 		Rt_map	*lmp2;
912 
913 		/*
914 		 * Traverse the link-maps collected on the sort queue and
915 		 * delete the depth index.  These link-maps may be traversed
916 		 * again to sort other components either for inits, and almost
917 		 * certainly for .finis.
918 		 */
919 		for (APLIST_TRAVERSE(sort.s_queue, idx, lmp2))
920 			IDX(lmp2) = 0;
921 
922 		free(sort.s_queue);
923 	}
924 
925 	if (sort.s_scc) {
926 		Aliste	idx;
927 		APlist	*alp;
928 
929 		for (APLIST_TRAVERSE(sort.s_scc, idx, alp))
930 			free(alp);
931 		free(sort.s_scc);
932 	}
933 
934 	/*
935 	 * The caller is responsible for freeing the sorted link-map list once
936 	 * the associated .init/.fini's have been fired.
937 	 */
938 	DBG_CALL(Dbg_util_nl(lml, DBG_NL_STD));
939 	return (sort.s_lmpa);
940 }
941