xref: /titanic_51/usr/src/cmd/fm/modules/common/eversholt/itree.c (revision 5c46c6a053defc1bf25f2e08df2653bc7717ed0b)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2006 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  *
26  * itree.c -- instance tree creation and manipulation
27  *
28  * this module provides the instance tree
29  */
30 
31 #pragma ident	"%Z%%M%	%I%	%E% SMI"
32 
33 #include <stdio.h>
34 #include <ctype.h>
35 #include <string.h>
36 #include <strings.h>
37 #include "alloc.h"
38 #include "out.h"
39 #include "stable.h"
40 #include "literals.h"
41 #include "lut.h"
42 #include "tree.h"
43 #include "ptree.h"
44 #include "itree.h"
45 #include "ipath.h"
46 #include "iexpr.h"
47 #include "eval.h"
48 #include "config.h"
49 
50 /*
51  * struct info contains the state we keep when expanding a prop statement
52  * as part of constructing the instance tree.  state kept in struct info
53  * is the non-recursive stuff -- the stuff that doesn't need to be on
54  * the stack.  the rest of the state that is passed between all the
55  * mutually recursive functions, is required to be on the stack since
56  * we need to backtrack and recurse as we do the instance tree construction.
57  */
58 struct info {
59 	struct lut *lut;
60 	struct node *anp;	/* arrow np */
61 	struct lut *ex;		/* dictionary of explicit iterators */
62 	struct config *croot;
63 } Ninfo;
64 
65 /*
66  * struct wildcardinfo is used to track wildcarded portions of paths.
67  *
68  * for example, if the epname of an event is "c/d" and the path "a/b/c/d"
69  * exists, the wildcard path ewname is filled in with the path "a/b".  when
70  * matching is done, epname is temporarily replaced with the concatenation
71  * of ewname and epname.  cpstart is set to the (struct config *)
72  * corresponding to component "c".
73  *
74  * a linked list of these structs is used to track the expansion of each
75  * event node as it is processed in vmatch() --> vmatch_event() calls.
76  */
77 struct wildcardinfo {
78 	struct node *nptop;		/* event node fed to vmatch */
79 	struct node *oldepname;		/* epname without the wildcard part */
80 	enum status {
81 		WC_UNDEFINED,		/* struct is not yet initialized */
82 		WC_UNDERCONSTRUCTION,	/* wildcard path not yet done */
83 		WC_COMPLETE		/* wildcard path done and is in use */
84 	} s;
85 	struct wildcardpath {
86 		struct node *ewname;	/* wildcard path */
87 		struct config *cpstart;	/* starting cp node for oldepname */
88 		int refcount;		/* number of event nodes using this */
89 	} *p;
90 	struct wildcardpath *matchwc;	/* ptr to wc path to be matched */
91 	struct wildcardinfo *next;
92 };
93 
94 static void vmatch(struct info *infop, struct node *np,
95     struct node *lnp, struct node *anp, struct wildcardinfo **wcproot);
96 static void hmatch(struct info *infop, struct node *np, struct node *nextnp);
97 static void itree_pbubble(int flags, struct bubble *bp);
98 static void itree_pruner(void *left, void *right, void *arg);
99 static void itree_destructor(void *left, void *right, void *arg);
100 static int itree_set_arrow_traits(struct arrow *ap, struct node *fromev,
101     struct node *toev, struct lut *ex);
102 static void itree_free_arrowlists(struct bubble *bubp, int arrows_too);
103 static void itree_prune_arrowlists(struct bubble *bubp);
104 static void arrow_add_within(struct arrow *ap, struct node *xpr);
105 static struct arrow *itree_add_arrow(struct bubble *frombubblep,
106     struct bubble *tobubblep, struct node *apnode, struct node *fromevent,
107     struct node *toevent, struct lut *ex);
108 static struct constraintlist *itree_add_constraint(struct arrow *arrowp,
109     struct node *c);
110 static struct bubble *itree_add_bubble(struct event *eventp,
111     enum bubbletype btype, int nork, int gen);
112 static void itree_free_bubble(struct bubble *freeme);
113 static void itree_free_constraints(struct arrow *ap);
114 
115 /*
116  * the following struct contains the state we build up during
117  * vertical and horizontal expansion so that generate()
118  * has everything it needs to construct the appropriate arrows.
119  * after setting up the state by calling:
120  *	generate_arrownp()
121  *	generate_nork()
122  *	generate_new()
123  *	generate_from()
124  *	generate_to()
125  * the actual arrow generation is done by calling:
126  *	generate()
127  */
128 static struct {
129 	int generation;		/* generation number of arrow set */
130 	struct node *arrownp;	/* top-level parse tree for arrow */
131 	int n;			/* n value associated with arrow */
132 	int k;			/* k value associated with arrow */
133 	struct node *fromnp;	/* left-hand-side event in parse tree */
134 	struct node *tonp;	/* right-hand-side event in parse tree */
135 	struct event *frome;	/* left-hand-side event in instance tree */
136 	struct event *toe;	/* right-hand-side event in instance tree */
137 	struct bubble *frombp;	/* bubble arrow comes from */
138 	struct bubble *tobp;	/* bubble arrow goes to */
139 } G;
140 
141 static void
142 generate_arrownp(struct node *arrownp)
143 {
144 	G.arrownp = arrownp;
145 }
146 
147 static void
148 generate_nork(int n, int k)
149 {
150 	G.n = n;
151 	G.k = k;
152 }
153 
154 static void
155 generate_new(void)
156 {
157 	G.generation++;
158 }
159 
160 static void
161 generate_from(struct node *fromeventnp, struct event *fromevent)
162 {
163 	G.fromnp = fromeventnp;
164 	G.frome = fromevent;
165 
166 	out(O_ALTFP|O_VERB3|O_NONL, "from bubble on ");
167 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp);
168 	out(O_ALTFP|O_VERB3, NULL);
169 
170 	G.frombp = itree_add_bubble(G.frome, B_FROM, G.n, 0);
171 }
172 
173 static void
174 generate_to(struct node *toeventnp, struct event *toevent)
175 {
176 	G.tonp = toeventnp;
177 	G.toe = toevent;
178 
179 	out(O_ALTFP|O_VERB3|O_NONL, "to bubble (gen %d) on ", G.generation);
180 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp);
181 	out(O_ALTFP|O_VERB3, NULL);
182 
183 	G.tobp = itree_add_bubble(G.toe, B_TO, G.k, G.generation);
184 }
185 
186 static void
187 generate(struct lut *ex)
188 {
189 	ASSERT(G.arrownp != NULL);
190 	ASSERT(G.fromnp != NULL);
191 	ASSERT(G.frome != NULL);
192 	ASSERT(G.frombp != NULL);
193 	ASSERT(G.tonp != NULL);
194 	ASSERT(G.toe != NULL);
195 	ASSERT(G.tobp != NULL);
196 
197 	out(O_ALTFP|O_VERB3|O_NONL, "        Arrow \"");
198 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp);
199 	out(O_ALTFP|O_VERB3|O_NONL, "\" -> \"");
200 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp);
201 
202 	if (itree_add_arrow(G.frombp, G.tobp, G.arrownp,
203 	    G.fromnp, G.tonp, ex) == NULL) {
204 		out(O_ALTFP|O_VERB3, "\" (prevented by constraints)");
205 	} else {
206 		out(O_ALTFP|O_VERB3, "\"");
207 	}
208 }
209 
210 enum childnode_action {
211 	CN_NONE,
212 	CN_INSTANTIZE,
213 	CN_DUP
214 };
215 
216 static struct node *
217 tname_dup(struct node *namep, enum childnode_action act)
218 {
219 	struct node *retp = NULL;
220 	const char *file;
221 	int line;
222 
223 	if (namep == NULL)
224 		return (NULL);
225 
226 	file = namep->file;
227 	line = namep->line;
228 
229 	for (; namep != NULL; namep = namep->u.name.next) {
230 		struct node *newnp = newnode(T_NAME, file, line);
231 
232 		newnp->u.name.t = namep->u.name.t;
233 		newnp->u.name.s = namep->u.name.s;
234 		newnp->u.name.last = newnp;
235 		newnp->u.name.it = namep->u.name.it;
236 		newnp->u.name.cp = namep->u.name.cp;
237 
238 		if (act == CN_DUP) {
239 			struct node *npc;
240 
241 			npc = namep->u.name.child;
242 			if (npc != NULL) {
243 				switch (npc->t) {
244 				case T_NUM:
245 					newnp->u.name.child =
246 						newnode(T_NUM, file, line);
247 					newnp->u.name.child->u.ull =
248 						npc->u.ull;
249 					break;
250 				case T_NAME:
251 					newnp->u.name.child =
252 						tree_name(npc->u.name.s,
253 							npc->u.name.it,
254 							file, line);
255 					break;
256 				default:
257 					out(O_DIE, "tname_dup: "
258 					    "invalid child type %s",
259 					    ptree_nodetype2str(npc->t));
260 				}
261 			}
262 		} else if (act == CN_INSTANTIZE) {
263 			newnp->u.name.child = newnode(T_NUM, file, line);
264 
265 			if (namep->u.name.child == NULL ||
266 			    namep->u.name.child->t != T_NUM) {
267 				int inum;
268 
269 				ASSERT(newnp->u.name.cp != NULL);
270 				config_getcompname(newnp->u.name.cp,
271 						    NULL, &inum);
272 				newnp->u.name.child->u.ull =
273 					(unsigned long long)inum;
274 			} else {
275 				newnp->u.name.child->u.ull =
276 					namep->u.name.child->u.ull;
277 			}
278 		}
279 
280 		if (retp == NULL) {
281 			retp = newnp;
282 		} else {
283 			retp->u.name.last->u.name.next = newnp;
284 			retp->u.name.last = newnp;
285 		}
286 	}
287 
288 	return (retp);
289 }
290 
291 struct prop_wlk_data {
292 	struct lut *props;
293 	struct node *epname;
294 };
295 
296 static struct lut *props2instance(struct node *, struct node *);
297 
298 /*
299  * let oldepname be a subset of epname.  return the subsection of epname
300  * that ends with oldepname.  make each component in the path explicitly
301  * instanced (i.e., with a T_NUM child).
302  */
303 static struct node *
304 tname_dup_to_epname(struct node *oldepname, struct node *epname)
305 {
306 	struct node *npref, *npend, *np1, *np2;
307 	struct node *ret = NULL;
308 	int foundmatch = 0;
309 
310 	if (epname == NULL)
311 		return (NULL);
312 
313 	/*
314 	 * search for the longest path in epname which contains
315 	 * oldnode->u.event.epname.  set npend to point to just past the
316 	 * end of this path.
317 	 */
318 	npend = NULL;
319 	for (npref = epname; npref; npref = npref->u.name.next) {
320 		if (npref->u.name.s == oldepname->u.name.s) {
321 			for (np1 = npref, np2 = oldepname;
322 			    np1 != NULL && np2 != NULL;
323 			    np1 = np1->u.name.next, np2 = np2->u.name.next) {
324 				if (np1->u.name.s != np2->u.name.s)
325 					break;
326 			}
327 			if (np2 == NULL) {
328 				foundmatch = 1;
329 				npend = np1;
330 				if (np1 == NULL) {
331 					/* oldepname matched npref up to end */
332 					break;
333 				}
334 			}
335 		}
336 	}
337 
338 	if (foundmatch == 0) {
339 		/*
340 		 * if oldepname could not be found in epname, return a
341 		 * duplicate of the former.  do not try to instantize
342 		 * oldepname since it might not be a path.
343 		 */
344 		return (tname_dup(oldepname, CN_DUP));
345 	}
346 
347 	/*
348 	 * dup (epname -- npend).  all children should be T_NUMs.
349 	 */
350 	for (npref = epname;
351 	    ! (npref == NULL || npref == npend);
352 	    npref = npref->u.name.next) {
353 		struct node *newnp = newnode(T_NAME, oldepname->file,
354 					    oldepname->line);
355 
356 		newnp->u.name.t = npref->u.name.t;
357 		newnp->u.name.s = npref->u.name.s;
358 		newnp->u.name.last = newnp;
359 		newnp->u.name.it = npref->u.name.it;
360 		newnp->u.name.cp = npref->u.name.cp;
361 
362 		newnp->u.name.child = newnode(T_NUM, oldepname->file,
363 					    oldepname->line);
364 
365 		if (npref->u.name.child == NULL ||
366 		    npref->u.name.child->t != T_NUM) {
367 			int childnum;
368 
369 			ASSERT(npref->u.name.cp != NULL);
370 			config_getcompname(npref->u.name.cp, NULL, &childnum);
371 			newnp->u.name.child->u.ull = childnum;
372 		} else {
373 			newnp->u.name.child->u.ull =
374 				npref->u.name.child->u.ull;
375 		}
376 
377 		if (ret == NULL) {
378 			ret = newnp;
379 		} else {
380 			ret->u.name.last->u.name.next = newnp;
381 			ret->u.name.last = newnp;
382 		}
383 	}
384 
385 	return (ret);
386 }
387 
388 /*
389  * restriction: oldnode->u.event.epname has to be equivalent to or a subset
390  * of epname
391  */
392 static struct node *
393 tevent_dup_to_epname(struct node *oldnode, struct node *epname)
394 {
395 	struct node *ret;
396 
397 	ret = newnode(T_EVENT, oldnode->file, oldnode->line);
398 	ret->u.event.ename = tname_dup(oldnode->u.event.ename, CN_NONE);
399 	ret->u.event.epname = tname_dup_to_epname(oldnode->u.event.epname,
400 						    epname);
401 	return (ret);
402 }
403 
404 static void
405 nv_instantiate(void *name, void *val, void *arg)
406 {
407 	struct prop_wlk_data *pd = (struct prop_wlk_data *)arg;
408 	struct node *orhs = (struct node *)val;
409 	struct node *nrhs;
410 
411 	/* handle engines by instantizing the entire engine */
412 	if (name == L_engine) {
413 		ASSERT(orhs->t == T_EVENT);
414 		ASSERT(orhs->u.event.ename->u.name.t == N_SERD);
415 
416 		/* there are only SERD engines for now */
417 
418 		nrhs = newnode(T_SERD, orhs->file, orhs->line);
419 		nrhs->u.stmt.np = tevent_dup_to_epname(orhs, pd->epname);
420 		nrhs->u.stmt.lutp = props2instance(orhs, pd->epname);
421 		pd->props = lut_add(pd->props, name, nrhs, NULL);
422 		return;
423 	}
424 
425 	switch (orhs->t) {
426 	case T_NUM:
427 		nrhs = newnode(T_NUM, orhs->file, orhs->line);
428 		nrhs->u.ull = orhs->u.ull;
429 		pd->props = lut_add(pd->props, name, nrhs, NULL);
430 		break;
431 	case T_TIMEVAL:
432 		nrhs = newnode(T_TIMEVAL, orhs->file, orhs->line);
433 		nrhs->u.ull = orhs->u.ull;
434 		pd->props = lut_add(pd->props, name, nrhs, NULL);
435 		break;
436 	case T_NAME:
437 		nrhs = tname_dup_to_epname(orhs, pd->epname);
438 		pd->props = lut_add(pd->props, name, nrhs, NULL);
439 		break;
440 	case T_EVENT:
441 		nrhs = tevent_dup_to_epname(orhs, pd->epname);
442 		pd->props = lut_add(pd->props, name, nrhs, NULL);
443 		break;
444 	case T_GLOBID:
445 		nrhs = newnode(T_GLOBID, orhs->file, orhs->line);
446 		nrhs->u.globid.s = orhs->u.globid.s;
447 		pd->props = lut_add(pd->props, name, nrhs, NULL);
448 		break;
449 	case T_FUNC:
450 		/* for T_FUNC, we don't duplicate it, just point to node */
451 		pd->props = lut_add(pd->props, name, orhs, NULL);
452 		break;
453 	default:
454 		out(O_DIE, "unexpected nvpair value type %s",
455 		    ptree_nodetype2str(((struct node *)val)->t));
456 	}
457 }
458 
459 static struct lut *
460 props2instance(struct node *eventnp, struct node *epname)
461 {
462 	struct prop_wlk_data pd;
463 
464 	pd.props = NULL;
465 	pd.epname = epname;
466 
467 	ASSERT(eventnp->u.event.declp != NULL);
468 	lut_walk(eventnp->u.event.declp->u.stmt.lutp, nv_instantiate, &pd);
469 	return (pd.props);
470 }
471 
472 /*ARGSUSED*/
473 static void
474 instances_destructor(void *left, void *right, void *arg)
475 {
476 	struct node *dn = (struct node *)right;
477 
478 	if (dn->t == T_SERD) {
479 		/* we allocated the lut during itree_create(), so free it */
480 		lut_free(dn->u.stmt.lutp, instances_destructor, NULL);
481 		dn->u.stmt.lutp = NULL;
482 	}
483 	if (dn->t != T_FUNC)	/* T_FUNC pointed to original node */
484 		tree_free(dn);
485 }
486 
487 /*ARGSUSED*/
488 static void
489 payloadprops_destructor(void *left, void *right, void *arg)
490 {
491 	FREE(right);
492 }
493 
494 /*
495  * event_cmp -- used via lut_lookup/lut_add on instance tree lut
496  */
497 static int
498 event_cmp(struct event *ep1, struct event *ep2)
499 {
500 	int diff;
501 
502 	if ((diff = ep2->enode->u.event.ename->u.name.s -
503 	    ep1->enode->u.event.ename->u.name.s) != 0)
504 		return (diff);
505 	if ((diff = (char *)ep2->ipp - (char *)ep1->ipp) != 0)
506 		return (diff);
507 	return (0);
508 
509 }
510 
511 struct event *
512 itree_lookup(struct lut *itp, const char *ename, const struct ipath *ipp)
513 {
514 	struct event searchevent;	/* just used for searching */
515 	struct node searcheventnode;
516 	struct node searchenamenode;
517 
518 	searchevent.enode = &searcheventnode;
519 	searcheventnode.t = T_EVENT;
520 	searcheventnode.u.event.ename = &searchenamenode;
521 	searchenamenode.t = T_NAME;
522 	searchenamenode.u.name.s = ename;
523 	searchevent.ipp = ipp;
524 	return (lut_lookup(itp, (void *)&searchevent, (lut_cmp)event_cmp));
525 }
526 
527 static struct event *
528 find_or_add_event(struct info *infop, struct node *np)
529 {
530 	struct event *ret;
531 	struct event searchevent;	/* just used for searching */
532 
533 	ASSERTeq(np->t, T_EVENT, ptree_nodetype2str);
534 
535 	searchevent.enode = np;
536 	searchevent.ipp = ipath(np->u.event.epname);
537 	if ((ret = lut_lookup(infop->lut, (void *)&searchevent,
538 	    (lut_cmp)event_cmp)) != NULL)
539 		return (ret);
540 
541 	/* wasn't already in tree, allocate it */
542 	ret = MALLOC(sizeof (*ret));
543 	bzero(ret, sizeof (*ret));
544 
545 	ret->t = np->u.event.ename->u.name.t;
546 	ret->enode = np;
547 	ret->ipp = searchevent.ipp;
548 	ret->props = props2instance(np, np->u.event.epname);
549 
550 	infop->lut = lut_add(infop->lut, (void *)ret, (void *)ret,
551 	    (lut_cmp)event_cmp);
552 
553 	return (ret);
554 }
555 
556 /*
557  * hmatch_event -- perform any appropriate horizontal expansion on an event
558  *
559  * this routine is used to perform horizontal expansion on both the
560  * left-hand-side events in a prop, and the right-hand-side events.
561  * when called to handle a left-side event, nextnp point to the right
562  * side of the prop that should be passed to hmatch() for each match
563  * found by horizontal expansion.   when no horizontal expansion exists,
564  * we will still "match" one event for every event found in the list on
565  * the left-hand-side of the prop because vmatch() already found that
566  * there's at least one match during vertical expansion.
567  */
568 static void
569 hmatch_event(struct info *infop, struct node *eventnp, struct node *epname,
570     struct config *ncp, struct node *nextnp, int rematch)
571 {
572 	if (epname == NULL) {
573 		/*
574 		 * end of pathname recursion, either we just located
575 		 * a left-hand-side event and we're ready to move on
576 		 * to the expanding the right-hand-side events, or
577 		 * we're further down the recursion and we just located
578 		 * a right-hand-side event.  the passed-in parameter
579 		 * "nextnp" tells us whether we're working on the left
580 		 * side and need to move on to nextnp, or if nextnp is
581 		 * NULL, we're working on the right side.
582 		 */
583 		if (nextnp) {
584 			/*
585 			 * finished a left side expansion, move on to right.
586 			 * tell generate() what event we just matched so
587 			 * it can be used at the source of any arrows
588 			 * we generate as we match events on the right side.
589 			 */
590 			generate_from(eventnp,
591 			    find_or_add_event(infop, eventnp));
592 			hmatch(infop, nextnp, NULL);
593 		} else {
594 			/*
595 			 * finished a right side expansion.  tell generate
596 			 * the information about the destination and let
597 			 * it construct the arrows as appropriate.
598 			 */
599 			generate_to(eventnp,
600 			    find_or_add_event(infop, eventnp));
601 			generate(infop->ex);
602 		}
603 
604 		return;
605 	}
606 
607 	ASSERTeq(epname->t, T_NAME, ptree_nodetype2str);
608 
609 	/*
610 	 * we only get here when eventnp already has a completely
611 	 * instanced epname in it already.  so we first recurse
612 	 * down to the end of the name and as the recursion pops
613 	 * up, we look for opportunities to advance horizontal
614 	 * expansions on to the next match.  when we do advance
615 	 * horizontal expansions, we potentially render all cp
616 	 * pointers on all components to the right as invalid,
617 	 * so we pass in an "ncp" config handle so matching against
618 	 * the config can happen.
619 	 */
620 	if (rematch) {
621 		struct config *ocp = epname->u.name.cp;
622 		char *ncp_s;
623 		int ncp_num, num;
624 
625 		for (; ncp; ncp = config_next(ncp)) {
626 			config_getcompname(ncp, &ncp_s, &ncp_num);
627 
628 			if (ncp_s == epname->u.name.s) {
629 				/* found a matching component name */
630 				config_getcompname(epname->u.name.cp,
631 				    NULL, &num);
632 
633 				if (epname->u.name.it != IT_HORIZONTAL &&
634 				    ncp_num != num)
635 					continue;
636 
637 				epname->u.name.cp = ncp;
638 				hmatch_event(infop, eventnp,
639 				    epname->u.name.next, config_child(ncp),
640 				    nextnp, 1);
641 			}
642 		}
643 
644 		epname->u.name.cp = ocp;
645 
646 		return;		/* no more config to match against */
647 
648 	} else {
649 		hmatch_event(infop, eventnp, epname->u.name.next, ncp,
650 		    nextnp, 0);
651 	}
652 
653 	if (epname->u.name.it == IT_HORIZONTAL) {
654 		struct config *cp;
655 		struct config *ocp = epname->u.name.cp;
656 		char *cp_s;
657 		int cp_num;
658 		int ocp_num;
659 		struct iterinfo *iterinfop = NULL;
660 		const char *iters;
661 
662 		config_getcompname(ocp, NULL, &ocp_num);
663 
664 		for (cp = config_next(ocp); cp; cp = config_next(cp)) {
665 			config_getcompname(cp, &cp_s, &cp_num);
666 
667 			if (cp_s == epname->u.name.s) {
668 				ASSERT(epname->u.name.child != NULL);
669 
670 				iters = epname->u.name.child->u.name.s;
671 				if ((iterinfop = lut_lookup(infop->ex,
672 				    (void *)iters, NULL)) == NULL) {
673 					out(O_DIE,
674 					    "hmatch_event: internal error: "
675 					    "iterator \"%s\" undefined", iters);
676 				} else {
677 					/* advance dict entry to next match */
678 					iterinfop->num = cp_num;
679 				}
680 				epname->u.name.cp = cp;
681 				hmatch_event(infop, eventnp,
682 				    epname->u.name.next, config_child(cp),
683 				    nextnp, 1);
684 			}
685 		}
686 
687 		if (iterinfop != NULL) {
688 			/* restore dict entry */
689 			iterinfop->num = ocp_num;
690 		}
691 		epname->u.name.cp = ocp;
692 	}
693 }
694 
695 /*
696  * hmatch -- check for horizontal expansion matches
697  *
698  * np points to the things we're matching (like a T_LIST or a T_EVENT)
699  * and if we're working on a left-side of a prop, nextnp points to
700  * the other side of the prop that we'll tackle next when this recursion
701  * bottoms out.  when all the events in the entire prop arrow have been
702  * horizontally expanded, generate() will be called to generate the
703  * actualy arrow.
704  */
705 static void
706 hmatch(struct info *infop, struct node *np, struct node *nextnp)
707 {
708 	if (np == NULL)
709 		return;		/* all done */
710 
711 	/*
712 	 * for each item in the list of events (which could just
713 	 * be a single event, or it could get larger in the loop
714 	 * below due to horizontal expansion), call hmatch on
715 	 * the right side and create arrows to each element.
716 	 */
717 
718 	switch (np->t) {
719 	case T_LIST:
720 		/* loop through the list */
721 		if (np->u.expr.left)
722 			hmatch(infop, np->u.expr.left, nextnp);
723 		if (np->u.expr.right)
724 			hmatch(infop, np->u.expr.right, nextnp);
725 		break;
726 
727 	case T_EVENT:
728 		hmatch_event(infop, np, np->u.event.epname,
729 		    NULL, nextnp, 0);
730 		break;
731 
732 	default:
733 		outfl(O_DIE, np->file, np->line,
734 		    "hmatch: unexpected type: %s",
735 		    ptree_nodetype2str(np->t));
736 	}
737 }
738 
739 static int
740 itree_np2nork(struct node *norknp)
741 {
742 	if (norknp == NULL)
743 		return (1);
744 	else if (norknp->t == T_NAME && norknp->u.name.s == L_A)
745 		return (-1);	/* our internal version of "all" */
746 	else if (norknp->t == T_NUM)
747 		return ((int)norknp->u.ull);
748 	else
749 		out(O_DIE, norknp->file, norknp->line,
750 		    "itree_np2nork: internal error type %s",
751 		    ptree_nodetype2str(norknp->t));
752 	/*NOTREACHED*/
753 	return (1);
754 }
755 
756 static struct iterinfo *
757 newiterinfo(int num, struct node *np)
758 {
759 	struct iterinfo *ret = MALLOC(sizeof (*ret));
760 
761 	ret->num = num;
762 	ret->np = np;
763 
764 	return (ret);
765 }
766 
767 /*ARGSUSED*/
768 static void
769 iterinfo_destructor(void *left, void *right, void *arg)
770 {
771 	struct iterinfo *iterinfop = (struct iterinfo *)right;
772 
773 	bzero(iterinfop, sizeof (*iterinfop));
774 	FREE(iterinfop);
775 }
776 
777 /*
778  * return 1 if wildcard path for wcp matches another wildcard path;
779  * return 0 if otherwise.
780  */
781 static int
782 wc_paths_match(struct wildcardinfo *wcp)
783 {
784 	struct node *np1, *np2;
785 
786 	ASSERT(wcp->matchwc != NULL);
787 
788 	for (np1 = wcp->p->ewname, np2 = wcp->matchwc->ewname;
789 	    np1 != NULL && np2 != NULL;
790 	    np1 = np1->u.name.next, np2 = np2->u.name.next) {
791 		/*
792 		 * names must match
793 		 */
794 		if (np1->u.name.s != np2->u.name.s)
795 			return (0);
796 
797 		/*
798 		 * children must exist and have the same numerical value
799 		 */
800 		if (np1->u.name.child == NULL || np2->u.name.child == NULL)
801 			return (0);
802 
803 		if (np1->u.name.child->t != T_NUM ||
804 		    np2->u.name.child->t != T_NUM)
805 			return (0);
806 
807 		if (np1->u.name.child->u.ull != np2->u.name.child->u.ull)
808 			return (0);
809 	}
810 
811 	/*
812 	 * return true only if we have matches for all entries of n1 and
813 	 * n2.  note that NULL wildcard paths (i.e., both wcp->p->ewname
814 	 * and wcp->matchwc->ewname are NULL) will be considered as
815 	 * matching paths.
816 	 */
817 	if (np1 == NULL && np2 == NULL)
818 		return (1);
819 
820 	return (0);
821 }
822 
823 /*
824  * update epname to include the wildcarded portion
825  */
826 static void
827 create_wildcardedpath(struct wildcardinfo **wcproot)
828 {
829 	struct wildcardinfo *wcp;
830 	struct node *nptop;
831 
832 	wcp = *wcproot;
833 
834 	if (wcp->s == WC_UNDERCONSTRUCTION) {
835 		ASSERT(wcp->p->refcount == 1);
836 		wcp->s = WC_COMPLETE;
837 	}
838 
839 	/* path has no wildcard */
840 	if (wcp->p->ewname == NULL)
841 		return;
842 
843 	/*
844 	 * get to this point if a wildcard portion of the path exists.
845 	 *
846 	 * first set oldepname to the start of the existing epname for use
847 	 * in future comparisons, then update epname to include the
848 	 * wildcard portion.
849 	 */
850 	nptop = wcp->nptop;
851 
852 	ASSERT(wcp->oldepname == nptop->u.event.epname);
853 
854 	nptop->u.event.epname =	tname_dup(wcp->p->ewname, CN_DUP);
855 	nptop->u.event.epname = tree_name_append(nptop->u.event.epname,
856 					tname_dup(wcp->oldepname, CN_DUP));
857 }
858 
859 /*
860  * restore epname to its former (nonwildcarded) state
861  */
862 static void
863 undo_wildcardedpath(struct wildcardinfo **wcproot)
864 {
865 	struct wildcardinfo *wcp;
866 
867 	wcp = *wcproot;
868 
869 	if (wcp->s == WC_COMPLETE) {
870 		ASSERT(wcp->p->refcount == 1);
871 		wcp->s = WC_UNDERCONSTRUCTION;
872 	}
873 
874 	/* path has no wildcard */
875 	if (wcp->p->ewname == NULL)
876 		return;
877 
878 	ASSERT(wcp->oldepname != NULL);
879 
880 	tree_free(wcp->nptop->u.event.epname);
881 	wcp->nptop->u.event.epname = wcp->oldepname;
882 }
883 
884 enum wildcard_action {
885 	WA_NONE,	/* do not do any wildcards */
886 	WA_SINGLE,	/* do wildcard only for current cp node */
887 	WA_ALL		/* do wildcards for all cp nodes */
888 };
889 
890 static void
891 vmatch_event(struct info *infop, struct config *cp, struct node *np,
892 	    struct node *lnp, struct node *anp,
893 	    struct wildcardinfo **wcproot, enum wildcard_action dowildcard)
894 {
895 	struct wildcardinfo *wcp;
896 	char *cp_s;
897 	int cp_num;
898 
899 	wcp = *wcproot;
900 
901 	if ((np == NULL && wcp->oldepname != NULL) ||
902 	    (cp == NULL && wcp->oldepname == NULL)) {
903 		/*
904 		 * get to this point if the pathname matched the config
905 		 * (but not necessarily a match at the end).  first check
906 		 * for any matching wildcard paths.
907 		 */
908 		if (wcp->matchwc != NULL && wc_paths_match(wcp) == 0)
909 			return;
910 
911 		create_wildcardedpath(wcproot);
912 		vmatch(infop, np, lnp, anp, wcproot);
913 		undo_wildcardedpath(wcproot);
914 
915 		return;
916 	}
917 
918 	if (cp == NULL)
919 		return;	/* no more config to match against */
920 
921 	for (; cp; cp = config_next(cp)) {
922 		config_getcompname(cp, &cp_s, &cp_num);
923 
924 		if (cp_s == np->u.name.s &&
925 		    ! (wcp->s == WC_UNDERCONSTRUCTION &&
926 		    dowildcard == WA_SINGLE)) {
927 			/* found a matching component name */
928 			if (np->u.name.child &&
929 			    np->u.name.child->t == T_NUM) {
930 				/*
931 				 * an explicit instance number was given
932 				 * in the source.  so only consider this
933 				 * a configuration match if the number
934 				 * also matches.
935 				 */
936 				if (cp_num != np->u.name.child->u.ull)
937 					continue;
938 
939 				np->u.name.cp = cp;
940 			} else {
941 				struct iterinfo *iterinfop;
942 				const char *iters;
943 
944 				/*
945 				 * vertical iterator.  look it up in
946 				 * the appropriate lut and if we get
947 				 * back a value it is either one that we
948 				 * set earlier, in which case we record
949 				 * the new value for this iteration and
950 				 * keep matching, or it is one that was
951 				 * set by an earlier reference to the
952 				 * iterator, in which case we only consider
953 				 * this a configuration match if the number
954 				 * matches cp_num.
955 				 */
956 
957 				ASSERT(np->u.name.child != NULL);
958 				ASSERT(np->u.name.child->t == T_NAME);
959 				iters = np->u.name.child->u.name.s;
960 
961 				if ((iterinfop = lut_lookup(infop->ex,
962 				    (void *)iters, NULL)) == NULL) {
963 					/* we're the first use, record our np */
964 					infop->ex = lut_add(infop->ex,
965 					    (void *)iters,
966 					    newiterinfo(cp_num, np), NULL);
967 				} else if (np == iterinfop->np) {
968 					/*
969 					 * we're the first use, back again
970 					 * for another iteration.  so update
971 					 * the num bound to this iterator in
972 					 * the lut.
973 					 */
974 					iterinfop->num = cp_num;
975 				} else if (cp_num != iterinfop->num) {
976 					/*
977 					 * an earlier reference to this
978 					 * iterator bound it to a different
979 					 * instance number, so there's no
980 					 * match here after all.
981 					 *
982 					 * however, it's possible that this
983 					 * component should really be part of
984 					 * the wildcard.  we explore this by
985 					 * forcing this component into the
986 					 * wildcarded section.
987 					 *
988 					 * for an more details of what's
989 					 * going to happen now, see
990 					 * comments block below entitled
991 					 * "forcing components into
992 					 * wildcard path".
993 					 */
994 					if (dowildcard == WA_ALL &&
995 					    wcp->s == WC_UNDERCONSTRUCTION) {
996 						vmatch_event(infop, cp, np,
997 							    lnp, anp, wcproot,
998 							    WA_SINGLE);
999 					}
1000 					continue;
1001 				}
1002 				np->u.name.cp = cp;
1003 			}
1004 
1005 			/*
1006 			 * if wildcarding was done in a call earlier in the
1007 			 * stack, record the current cp as the first
1008 			 * matching and nonwildcarded cp.
1009 			 */
1010 			if (dowildcard == WA_ALL &&
1011 			    wcp->s == WC_UNDERCONSTRUCTION)
1012 				wcp->p->cpstart = cp;
1013 
1014 			/*
1015 			 * if this was an IT_HORIZONTAL name,
1016 			 * hmatch() will use the cp to expand
1017 			 * all matches horizontally into a list.
1018 			 * we know the list will contain at least
1019 			 * one element (the one we just matched),
1020 			 * so we just store cp and let hmatch_event()
1021 			 * do the rest.
1022 			 *
1023 			 * recurse on to next component.  note that
1024 			 * wildcarding is now turned off.
1025 			 */
1026 			vmatch_event(infop, config_child(cp), np->u.name.next,
1027 				    lnp, anp, wcproot, WA_NONE);
1028 
1029 			/*
1030 			 * forcing components into wildcard path:
1031 			 *
1032 			 * if this component is the first match, force it
1033 			 * to be part of the wildcarded path and see if we
1034 			 * can get additional matches.  repeat call to
1035 			 * vmatch_event() with the same np, making sure
1036 			 * wildcarding is forced for this component alone
1037 			 * and not its peers by specifying vmatch_event(
1038 			 * ..., WA_SINGLE).  in other words, in the call to
1039 			 * vmatch_event() below, there should be no loop
1040 			 * over cp's peers since that is being done in the
1041 			 * current loop [i.e., the loop we're in now].
1042 			 *
1043 			 * here's an example.  suppose we have the
1044 			 * definition
1045 			 *	event foo@x/y
1046 			 * and configuration
1047 			 *	a0/x0/y0/a1/x1/y1
1048 			 *
1049 			 * the code up to this point will treat "a0" as the
1050 			 * wildcarded part of the path and "x0/y0" as the
1051 			 * nonwildcarded part, resulting in the instanced
1052 			 * event
1053 			 *	foo@a0/x0/y0
1054 			 *
1055 			 * in order to discover the next match (.../x1/y1)
1056 			 * in the configuration we have to force "x0" into
1057 			 * the wildcarded part of the path.  the following
1058 			 * call to vmatch_event(..., WA_SINGLE) does this.
1059 			 * by doing so, we discover the wildcarded part
1060 			 * "a0/x0/y0/a1" and the nonwildcarded part "x1/y1"
1061 			 *
1062 			 * the following call to vmatch_event() is also
1063 			 * needed to properly handle the configuration
1064 			 *	b0/x0/b1/x1/y1
1065 			 *
1066 			 * the recursions into vmatch_event() will start
1067 			 * off uncovering "b0" as the wildcarded part and
1068 			 * "x0" as the start of the nonwildcarded path.
1069 			 * however, the next recursion will not result in a
1070 			 * match since there is no "y" following "x0".  the
1071 			 * subsequent match of (wildcard = "b0/x0/b1" and
1072 			 * nonwildcard = "x1/y1") will be discovered only
1073 			 * if "x0" is forced to be a part of the wildcarded
1074 			 * path.
1075 			 */
1076 			if (dowildcard == WA_ALL &&
1077 			    wcp->s == WC_UNDERCONSTRUCTION) {
1078 				vmatch_event(infop, cp, np, lnp, anp,
1079 					    wcproot, WA_SINGLE);
1080 			}
1081 
1082 			if (np->u.name.it == IT_HORIZONTAL) {
1083 				/*
1084 				 * hmatch() finished iterating through
1085 				 * the configuration as described above, so
1086 				 * don't continue iterating here.
1087 				 */
1088 				return;
1089 			}
1090 
1091 		} else if ((dowildcard == WA_SINGLE || dowildcard == WA_ALL) &&
1092 			    wcp->s == WC_UNDERCONSTRUCTION) {
1093 			/*
1094 			 * no matching cp, and we are constructing our own
1095 			 * wildcard path.  (in other words, we are not
1096 			 * referencing a wildcard path created for an
1097 			 * earlier event.)
1098 			 *
1099 			 * add wildcard entry, then recurse on to config
1100 			 * child
1101 			 */
1102 			struct node *cpnode, *prevlast;
1103 
1104 			cpnode = tree_name(cp_s, IT_NONE, NULL, 0);
1105 			cpnode->u.name.child = newnode(T_NUM, NULL, 0);
1106 			cpnode->u.name.child->u.ull = cp_num;
1107 			cpnode->u.name.cp = cp;
1108 
1109 			if (wcp->p->ewname == NULL) {
1110 				prevlast = NULL;
1111 				wcp->p->ewname = cpnode;
1112 			} else {
1113 				prevlast = wcp->p->ewname->u.name.last;
1114 				wcp->p->ewname =
1115 					tree_name_append(wcp->p->ewname,
1116 							    cpnode);
1117 			}
1118 
1119 			vmatch_event(infop, config_child(cp), np, lnp, anp,
1120 				    wcproot, WA_ALL);
1121 
1122 			/*
1123 			 * back out last addition to ewname and continue
1124 			 * with loop
1125 			 */
1126 			tree_free(cpnode);
1127 			if (prevlast == NULL) {
1128 				wcp->p->ewname = NULL;
1129 			} else {
1130 				prevlast->u.name.next = NULL;
1131 				wcp->p->ewname->u.name.last = prevlast;
1132 			}
1133 
1134 			/*
1135 			 * return if wildcarding is done only for this cp
1136 			 */
1137 			if (dowildcard == WA_SINGLE)
1138 				return;
1139 		}
1140 	}
1141 }
1142 
1143 /*
1144  * for the event node np, which will be subjected to pathname
1145  * expansion/matching, create a (struct wildcardinfo) to hold wildcard
1146  * information.  this struct will be inserted into the first location in
1147  * the list that starts with *wcproot.
1148  *
1149  * cp is the starting node of the configuration; cpstart, which is output,
1150  * is the starting node of the nonwildcarded portion of the path.
1151  */
1152 static void
1153 add_wildcardentry(struct wildcardinfo **wcproot, struct config *cp,
1154 		struct node *np)
1155 {
1156 	struct wildcardinfo *wcpnew, *wcp;
1157 	struct node *np1, *np2;
1158 
1159 	/*
1160 	 * create entry for np
1161 	 */
1162 	wcpnew = MALLOC(sizeof (struct wildcardinfo));
1163 	bzero(wcpnew, sizeof (struct wildcardinfo));
1164 	wcpnew->nptop = np;
1165 	wcpnew->oldepname = np->u.event.epname;
1166 	wcpnew->s = WC_UNDERCONSTRUCTION;
1167 
1168 	wcpnew->p = MALLOC(sizeof (struct wildcardpath));
1169 	bzero(wcpnew->p, sizeof (struct wildcardpath));
1170 	wcpnew->p->cpstart = cp;
1171 	wcpnew->p->refcount = 1;
1172 
1173 	/*
1174 	 * search all completed entries for an epname whose first entry
1175 	 * matches.  note that NULL epnames are considered valid and can be
1176 	 * matched.
1177 	 */
1178 	np2 = wcpnew->oldepname;
1179 	for (wcp = *wcproot; wcp; wcp = wcp->next) {
1180 		ASSERT(wcp->s == WC_COMPLETE);
1181 
1182 		np1 = wcp->oldepname;
1183 		if ((np1 && np2 && np1->u.name.s == np2->u.name.s) ||
1184 		    (np1 == NULL && np2 == NULL)) {
1185 			/*
1186 			 * if we find a match in a completed entry, set
1187 			 * matchwc to indicate that we would like to match
1188 			 * it.  it is necessary to do this since wildcards
1189 			 * for each event are constructed independently.
1190 			 */
1191 			wcpnew->matchwc = wcp->p;
1192 
1193 			wcp->p->refcount++;
1194 			break;
1195 		}
1196 	}
1197 
1198 	wcpnew->next = *wcproot;
1199 	*wcproot = wcpnew;
1200 }
1201 
1202 static void
1203 delete_wildcardentry(struct wildcardinfo **wcproot)
1204 {
1205 	struct wildcardinfo *wcp;
1206 
1207 	wcp = *wcproot;
1208 	*wcproot = wcp->next;
1209 
1210 	switch (wcp->s) {
1211 	case WC_UNDERCONSTRUCTION:
1212 	case WC_COMPLETE:
1213 		if (wcp->matchwc != NULL)
1214 			wcp->matchwc->refcount--;
1215 
1216 		ASSERT(wcp->p->refcount == 1);
1217 		tree_free(wcp->p->ewname);
1218 		FREE(wcp->p);
1219 		break;
1220 
1221 	default:
1222 		out(O_DIE, "deletewc: invalid status");
1223 		break;
1224 	}
1225 
1226 	FREE(wcp);
1227 }
1228 
1229 /*
1230  * vmatch -- find the next vertical expansion match in the config database
1231  *
1232  * this routine is called with three node pointers:
1233  *	 np -- the parse we're matching
1234  *	lnp -- the rest of the list we're currently working on
1235  *	anp -- the rest of the arrow we're currently working on
1236  *
1237  * the expansion matching happens via three types of recursion:
1238  *
1239  *	- when given an arrow, handle the left-side and then recursively
1240  *	  handle the right side (which might be another cascaded arrow).
1241  *
1242  *	- when handling one side of an arrow, recurse through the T_LIST
1243  *	  to get to each event (or just move on to the event if there
1244  *	  is a single event instead of a list)  since the arrow parse
1245  *	  trees recurse left, we actually start with the right-most
1246  *	  event list in the prop statement and work our way towards
1247  *	  the left-most event list.
1248  *
1249  *	- when handling an event, recurse down each component of the
1250  *	  pathname, matching in the config database and recording the
1251  *	  matches in the explicit iterator dictionary as we go.
1252  *
1253  * when the bottom of this matching recursion is met, meaning we have
1254  * set the "cp" pointers on all the names in the entire statement,
1255  * we call hmatch() which does it's own recursion to handle horizontal
1256  * expandsion and then call generate() to generate nodes, bubbles, and
1257  * arrows in the instance tree.  generate() looks at the cp pointers to
1258  * see what instance numbers were matched in the configuration database.
1259  *
1260  * when horizontal expansion appears, vmatch() finds only the first match
1261  * and hmatch() then takes the horizontal expansion through all the other
1262  * matches when generating the arrows in the instance tree.
1263  *
1264  * the "infop" passed down through the recursion contains a dictionary
1265  * of the explicit iterators (all the implicit iterators have been converted
1266  * to explicit iterators when the parse tree was created by tree.c), which
1267  * allows things like this to work correctly:
1268  *
1269  *	prop error.a@x[n]/y/z -> error.b@x/y[n]/z -> error.c@x/y/z[n];
1270  *
1271  * during the top level call, the explicit iterator "n" will match an
1272  * instance number in the config database, and the result will be recorded
1273  * in the explicit iterator dictionary and passed down via "infop".  so
1274  * when the recursive call tries to match y[n] in the config database, it
1275  * will only match the same instance number as x[n] did since the dictionary
1276  * is consulted to see if "n" took on a value already.
1277  *
1278  * at any point during the recursion, match*() can return to indicate
1279  * a match was not found in the config database and that the caller should
1280  * move on to the next potential match, if any.
1281  *
1282  * constraints are completely ignored by match(), so the statement:
1283  *
1284  *	prop error.a@x[n] -> error.b@x[n] {n != 0};
1285  *
1286  * might very well match x[0] if it appears in the config database.  it
1287  * is the generate() routine that takes that match and then decides what
1288  * arrow, if any, should be generated in the instance tree.  generate()
1289  * looks at the explicit iterator dictionary to get values like "n" in
1290  * the above example so that it can evaluate constraints.
1291  *
1292  */
1293 static void
1294 vmatch(struct info *infop, struct node *np, struct node *lnp,
1295     struct node *anp, struct wildcardinfo **wcproot)
1296 {
1297 	if (np == NULL) {
1298 		if (lnp)
1299 			vmatch(infop, lnp, NULL, anp, wcproot);
1300 		else if (anp)
1301 			vmatch(infop, anp, NULL, NULL, wcproot);
1302 		else {
1303 			struct node *src;
1304 			struct node *dst;
1305 
1306 			/* end of vertical match recursion */
1307 			outfl(O_ALTFP|O_VERB3|O_NONL,
1308 			    infop->anp->file, infop->anp->line, "vmatch: ");
1309 			ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, infop->anp);
1310 			out(O_ALTFP|O_VERB3, NULL);
1311 
1312 			generate_nork(
1313 			    itree_np2nork(infop->anp->u.arrow.nnp),
1314 			    itree_np2nork(infop->anp->u.arrow.knp));
1315 			dst = infop->anp->u.arrow.rhs;
1316 			src = infop->anp->u.arrow.lhs;
1317 			for (;;) {
1318 				generate_new();	/* new set of arrows */
1319 				if (src->t == T_ARROW) {
1320 					hmatch(infop, src->u.arrow.rhs, dst);
1321 					generate_nork(
1322 					    itree_np2nork(src->u.arrow.nnp),
1323 					    itree_np2nork(src->u.arrow.knp));
1324 					dst = src->u.arrow.rhs;
1325 					src = src->u.arrow.lhs;
1326 				} else {
1327 					hmatch(infop, src, dst);
1328 					break;
1329 				}
1330 			}
1331 		}
1332 		return;
1333 	}
1334 
1335 	switch (np->t) {
1336 	case T_EVENT: {
1337 		add_wildcardentry(wcproot, config_child(infop->croot), np);
1338 		vmatch_event(infop, config_child(infop->croot),
1339 			    np->u.event.epname, lnp, anp, wcproot, WA_ALL);
1340 		delete_wildcardentry(wcproot);
1341 		break;
1342 	}
1343 	case T_LIST:
1344 		ASSERT(lnp == NULL);
1345 		vmatch(infop, np->u.expr.right, np->u.expr.left, anp, wcproot);
1346 		break;
1347 
1348 	case T_ARROW:
1349 		ASSERT(lnp == NULL && anp == NULL);
1350 		vmatch(infop, np->u.arrow.rhs, NULL, np->u.arrow.lhs, wcproot);
1351 		break;
1352 
1353 	default:
1354 		outfl(O_DIE, np->file, np->line,
1355 		    "vmatch: unexpected type: %s",
1356 		    ptree_nodetype2str(np->t));
1357 	}
1358 }
1359 
1360 static void
1361 cp_reset(struct node *np)
1362 {
1363 	if (np == NULL)
1364 		return;
1365 	switch (np->t) {
1366 	case T_NAME:
1367 		np->u.name.cp = NULL;
1368 		cp_reset(np->u.name.next);
1369 		break;
1370 
1371 	case T_LIST:
1372 		cp_reset(np->u.expr.left);
1373 		cp_reset(np->u.expr.right);
1374 		break;
1375 
1376 	case T_ARROW:
1377 		cp_reset(np->u.arrow.lhs);
1378 		cp_reset(np->u.arrow.rhs);
1379 		break;
1380 
1381 	case T_EVENT:
1382 		cp_reset(np->u.event.epname);
1383 		break;
1384 	}
1385 }
1386 
1387 /*
1388  * itree_create -- apply the current config to the current parse tree
1389  *
1390  * returns a lut mapping fully-instance-qualified names to struct events.
1391  *
1392  */
1393 struct lut *
1394 itree_create(struct config *croot)
1395 {
1396 	struct lut *retval;
1397 	struct node *propnp;
1398 
1399 	Ninfo.lut = NULL;
1400 	Ninfo.croot = croot;
1401 	for (propnp = Props; propnp; propnp = propnp->u.stmt.next) {
1402 		struct node *anp = propnp->u.stmt.np;
1403 		struct wildcardinfo *wcproot = NULL;
1404 
1405 		ASSERTeq(anp->t, T_ARROW, ptree_nodetype2str);
1406 
1407 		Ninfo.anp = anp;
1408 		Ninfo.ex = NULL;
1409 
1410 		generate_arrownp(anp);
1411 		vmatch(&Ninfo, anp, NULL, NULL, &wcproot);
1412 
1413 		if (Ninfo.ex) {
1414 			lut_free(Ninfo.ex, iterinfo_destructor, NULL);
1415 			Ninfo.ex = NULL;
1416 		}
1417 		ASSERT(wcproot == NULL);
1418 		cp_reset(anp);
1419 	}
1420 
1421 	retval = Ninfo.lut;
1422 	Ninfo.lut = NULL;
1423 	return (retval);
1424 }
1425 
1426 void
1427 itree_free(struct lut *lutp)
1428 {
1429 	lut_free(lutp, itree_destructor, NULL);
1430 }
1431 
1432 void
1433 itree_prune(struct lut *lutp)
1434 {
1435 	lut_walk(lutp, itree_pruner, NULL);
1436 }
1437 
1438 int
1439 itree_nameinstancecmp(struct node *np1, struct node *np2)
1440 {
1441 	int np1type = (int)np1->u.name.t;
1442 	int np2type = (int)np2->u.name.t;
1443 	int num1;
1444 	int num2;
1445 
1446 	while (np1 && np2 && np1->u.name.s == np2->u.name.s) {
1447 		if (np1->u.name.next != NULL && np2->u.name.next != NULL) {
1448 			if (np1->u.name.cp != NULL) {
1449 				config_getcompname(np1->u.name.cp, NULL, &num1);
1450 			} else {
1451 				ASSERT(np1->u.name.child != NULL);
1452 				ASSERT(np1->u.name.child->t == T_NUM);
1453 				num1 = (int)np1->u.name.child->u.ull;
1454 			}
1455 
1456 			if (np2->u.name.cp != NULL) {
1457 				config_getcompname(np2->u.name.cp, NULL, &num2);
1458 			} else {
1459 				ASSERT(np2->u.name.child != NULL);
1460 				ASSERT(np2->u.name.child->t == T_NUM);
1461 				num2 = (int)np2->u.name.child->u.ull;
1462 			}
1463 
1464 			if (num1 != num2)
1465 				return (num1 - num2);
1466 		}
1467 
1468 		np1 = np1->u.name.next;
1469 		np2 = np2->u.name.next;
1470 	}
1471 	if (np1 == NULL)
1472 		if (np2 == NULL)
1473 			return (np1type - np2type);
1474 		else
1475 			return (-1);
1476 	else if (np2 == NULL)
1477 		return (1);
1478 	else
1479 		return (strcmp(np1->u.name.s, np2->u.name.s));
1480 }
1481 
1482 void
1483 itree_pevent_brief(int flags, struct event *ep)
1484 {
1485 	ASSERT(ep != NULL);
1486 	ASSERT(ep->enode != NULL);
1487 	ASSERT(ep->ipp != NULL);
1488 
1489 	ipath_print(flags, ep->enode->u.event.ename->u.name.s, ep->ipp);
1490 }
1491 
1492 /*ARGSUSED*/
1493 static void
1494 itree_pevent(struct event *lhs, struct event *ep, void *arg)
1495 {
1496 	struct plut_wlk_data propd;
1497 	struct bubble *bp;
1498 	int flags = (int)arg;
1499 
1500 	itree_pevent_brief(flags, ep);
1501 	if (ep->t == N_EREPORT)
1502 		out(flags, " (count %d)", ep->count);
1503 	else
1504 		out(flags, NULL);
1505 
1506 	if (ep->props) {
1507 		propd.flags = flags;
1508 		propd.first = 1;
1509 		out(flags, "Properties:");
1510 		lut_walk(ep->props, ptree_plut, (void *)&propd);
1511 	}
1512 
1513 	for (bp = itree_next_bubble(ep, NULL); bp;
1514 	    bp = itree_next_bubble(ep, bp)) {
1515 		/* Print only TO bubbles in this loop */
1516 		if (bp->t != B_TO)
1517 			continue;
1518 		itree_pbubble(flags, bp);
1519 	}
1520 
1521 	for (bp = itree_next_bubble(ep, NULL); bp;
1522 	    bp = itree_next_bubble(ep, bp)) {
1523 		/* Print only INHIBIT bubbles in this loop */
1524 		if (bp->t != B_INHIBIT)
1525 			continue;
1526 		itree_pbubble(flags, bp);
1527 	}
1528 
1529 	for (bp = itree_next_bubble(ep, NULL); bp;
1530 	    bp = itree_next_bubble(ep, bp)) {
1531 		/* Print only FROM bubbles in this loop */
1532 		if (bp->t != B_FROM)
1533 			continue;
1534 		itree_pbubble(flags, bp);
1535 	}
1536 }
1537 
1538 static void
1539 itree_pbubble(int flags, struct bubble *bp)
1540 {
1541 	struct constraintlist *cp;
1542 	struct arrowlist *ap;
1543 
1544 	ASSERT(bp != NULL);
1545 
1546 	out(flags|O_NONL, "   ");
1547 	if (bp->mark)
1548 		out(flags|O_NONL, "*");
1549 	else
1550 		out(flags|O_NONL, " ");
1551 	if (bp->t == B_FROM)
1552 		out(flags|O_NONL, "N=%d to:", bp->nork);
1553 	else if (bp->t == B_TO)
1554 		out(flags|O_NONL, "K=%d from:", bp->nork);
1555 	else
1556 		out(flags|O_NONL, "K=%d masked from:", bp->nork);
1557 
1558 	if (bp->t == B_TO || bp->t == B_INHIBIT) {
1559 		for (ap = itree_next_arrow(bp, NULL); ap;
1560 		    ap = itree_next_arrow(bp, ap)) {
1561 			ASSERT(ap->arrowp->head == bp);
1562 			ASSERT(ap->arrowp->tail != NULL);
1563 			ASSERT(ap->arrowp->tail->myevent != NULL);
1564 			out(flags|O_NONL, " ");
1565 			itree_pevent_brief(flags, ap->arrowp->tail->myevent);
1566 		}
1567 		out(flags, NULL);
1568 		return;
1569 	}
1570 
1571 	for (ap = itree_next_arrow(bp, NULL); ap;
1572 	    ap = itree_next_arrow(bp, ap)) {
1573 		ASSERT(ap->arrowp->tail == bp);
1574 		ASSERT(ap->arrowp->head != NULL);
1575 		ASSERT(ap->arrowp->head->myevent != NULL);
1576 
1577 		out(flags|O_NONL, " ");
1578 		itree_pevent_brief(flags, ap->arrowp->head->myevent);
1579 
1580 		out(flags|O_NONL, " ");
1581 		ptree_timeval(flags, &ap->arrowp->mindelay);
1582 		out(flags|O_NONL, ",");
1583 		ptree_timeval(flags, &ap->arrowp->maxdelay);
1584 
1585 		/* Display anything from the propogation node? */
1586 		out(O_VERB3|O_NONL, " <%s:%d>",
1587 		    ap->arrowp->pnode->file, ap->arrowp->pnode->line);
1588 
1589 		if (itree_next_constraint(ap->arrowp, NULL))
1590 			out(flags|O_NONL, " {");
1591 
1592 		for (cp = itree_next_constraint(ap->arrowp, NULL); cp;
1593 		    cp = itree_next_constraint(ap->arrowp, cp)) {
1594 			ptree(flags, cp->cnode, 1, 0);
1595 			if (itree_next_constraint(ap->arrowp, cp))
1596 				out(flags|O_NONL, ", ");
1597 		}
1598 
1599 		if (itree_next_constraint(ap->arrowp, NULL))
1600 			out(flags|O_NONL, "}");
1601 	}
1602 	out(flags, NULL);
1603 }
1604 
1605 void
1606 itree_ptree(int flags, struct lut *itp)
1607 {
1608 	lut_walk(itp, (lut_cb)itree_pevent, (void *)flags);
1609 }
1610 
1611 /*ARGSUSED*/
1612 static void
1613 itree_destructor(void *left, void *right, void *arg)
1614 {
1615 	struct event *ep = (struct event *)right;
1616 	struct bubble *nextbub, *bub;
1617 
1618 	/* Free the properties */
1619 	if (ep->props)
1620 		lut_free(ep->props, instances_destructor, NULL);
1621 
1622 	/* Free the payload properties */
1623 	if (ep->payloadprops)
1624 		lut_free(ep->payloadprops, payloadprops_destructor, NULL);
1625 
1626 	/* Free my bubbles */
1627 	for (bub = ep->bubbles; bub != NULL; ) {
1628 		nextbub = bub->next;
1629 		/*
1630 		 * Free arrows if they are FROM me.  Free arrowlists on
1631 		 * other types of bubbles (but not the attached arrows,
1632 		 * which will be freed when we free the originating
1633 		 * bubble.
1634 		 */
1635 		if (bub->t == B_FROM)
1636 			itree_free_arrowlists(bub, 1);
1637 		else
1638 			itree_free_arrowlists(bub, 0);
1639 		itree_free_bubble(bub);
1640 		bub = nextbub;
1641 	}
1642 
1643 	if (ep->nvp != NULL)
1644 		nvlist_free(ep->nvp);
1645 	bzero(ep, sizeof (*ep));
1646 	FREE(ep);
1647 }
1648 
1649 /*ARGSUSED*/
1650 static void
1651 itree_pruner(void *left, void *right, void *arg)
1652 {
1653 	struct event *ep = (struct event *)right;
1654 	struct bubble *nextbub, *bub;
1655 
1656 	if (ep->keep_in_tree)
1657 		return;
1658 
1659 	/* Free the properties */
1660 	lut_free(ep->props, instances_destructor, NULL);
1661 
1662 	/* Free the payload properties */
1663 	lut_free(ep->payloadprops, payloadprops_destructor, NULL);
1664 
1665 	/* Free my bubbles */
1666 	for (bub = ep->bubbles; bub != NULL; ) {
1667 		nextbub = bub->next;
1668 		itree_prune_arrowlists(bub);
1669 		itree_free_bubble(bub);
1670 		bub = nextbub;
1671 	}
1672 
1673 	if (ep->nvp != NULL)
1674 		nvlist_free(ep->nvp);
1675 	ep->props = NULL;
1676 	ep->payloadprops = NULL;
1677 	ep->bubbles = NULL;
1678 	ep->nvp = NULL;
1679 }
1680 
1681 static void
1682 itree_free_bubble(struct bubble *freeme)
1683 {
1684 	bzero(freeme, sizeof (*freeme));
1685 	FREE(freeme);
1686 }
1687 
1688 static struct bubble *
1689 itree_add_bubble(struct event *eventp, enum bubbletype btype, int nork, int gen)
1690 {
1691 	struct bubble *prev = NULL;
1692 	struct bubble *curr;
1693 	struct bubble *newb;
1694 
1695 	/* Use existing bubbles as appropriate when possible */
1696 	for (curr = eventp->bubbles;
1697 	    curr != NULL;
1698 	    prev = curr, curr = curr->next) {
1699 		if (btype == B_TO && curr->t == B_TO) {
1700 			/* see if an existing "to" bubble works for us */
1701 			if (gen == curr->gen)
1702 				return (curr);	/* matched gen number */
1703 			else if (nork == 1 && curr->nork == 1) {
1704 				curr->gen = gen;
1705 				return (curr);	/* coalesce K==1 bubbles */
1706 			}
1707 		} else if (btype == B_FROM && curr->t == B_FROM) {
1708 			/* see if an existing "from" bubble works for us */
1709 			if ((nork == N_IS_ALL && curr->nork == N_IS_ALL) ||
1710 			    (nork == 0 && curr->nork == 0))
1711 				return (curr);
1712 		}
1713 	}
1714 
1715 	newb = MALLOC(sizeof (struct bubble));
1716 	newb->next = NULL;
1717 	newb->t = btype;
1718 	newb->myevent = eventp;
1719 	newb->nork = nork;
1720 	newb->mark = 0;
1721 	newb->gen = gen;
1722 	newb->arrows = NULL;
1723 
1724 	if (prev == NULL)
1725 		eventp->bubbles = newb;
1726 	else
1727 		prev->next = newb;
1728 
1729 	return (newb);
1730 }
1731 
1732 struct bubble *
1733 itree_next_bubble(struct event *eventp, struct bubble *last)
1734 {
1735 	struct bubble *next;
1736 
1737 	for (;;) {
1738 		if (last != NULL)
1739 			next = last->next;
1740 		else
1741 			next = eventp->bubbles;
1742 
1743 		if (next == NULL || next->arrows != NULL)
1744 			return (next);
1745 
1746 		/* bubble was empty, skip it */
1747 		last = next;
1748 	}
1749 }
1750 
1751 static void
1752 add_arrow(struct bubble *bp, struct arrow *ap)
1753 {
1754 	struct arrowlist *prev = NULL;
1755 	struct arrowlist *curr;
1756 	struct arrowlist *newal;
1757 
1758 	newal = MALLOC(sizeof (struct arrowlist));
1759 	bzero(newal, sizeof (struct arrowlist));
1760 	newal->arrowp = ap;
1761 
1762 	curr = itree_next_arrow(bp, NULL);
1763 	while (curr != NULL) {
1764 		prev = curr;
1765 		curr = itree_next_arrow(bp, curr);
1766 	}
1767 
1768 	if (prev == NULL)
1769 		bp->arrows = newal;
1770 	else
1771 		prev->next = newal;
1772 }
1773 
1774 static struct arrow *
1775 itree_add_arrow(struct bubble *frombubblep, struct bubble *tobubblep,
1776     struct node *apnode, struct node *fromevent, struct node *toevent,
1777     struct lut *ex)
1778 {
1779 	struct arrow *newa;
1780 
1781 	ASSERTeq(frombubblep->t, B_FROM, itree_bubbletype2str);
1782 	ASSERTinfo(tobubblep->t == B_TO || tobubblep->t == B_INHIBIT,
1783 	    itree_bubbletype2str(tobubblep->t));
1784 	newa = MALLOC(sizeof (struct arrow));
1785 	bzero(newa, sizeof (struct arrow));
1786 	newa->tail = frombubblep;
1787 	newa->head = tobubblep;
1788 	newa->pnode = apnode;
1789 	newa->constraints = NULL;
1790 
1791 	/*
1792 	 *  Set default delays, then try to re-set them from
1793 	 *  any within() constraints.
1794 	 */
1795 	newa->mindelay = newa->maxdelay = 0ULL;
1796 	if (itree_set_arrow_traits(newa, fromevent, toevent, ex) == 0) {
1797 		FREE(newa);
1798 		return (NULL);
1799 	}
1800 
1801 	add_arrow(frombubblep, newa);
1802 	add_arrow(tobubblep, newa);
1803 	return (newa);
1804 }
1805 
1806 /* returns false if traits show that arrow should not be added after all */
1807 static int
1808 itree_set_arrow_traits(struct arrow *ap, struct node *fromev,
1809     struct node *toev, struct lut *ex)
1810 {
1811 	struct node *epnames[] = { NULL, NULL, NULL };
1812 	struct node *newc = NULL;
1813 
1814 	ASSERTeq(fromev->t, T_EVENT, ptree_nodetype2str);
1815 	ASSERTeq(toev->t, T_EVENT, ptree_nodetype2str);
1816 
1817 	/*
1818 	 * search for the within values first on the declaration of
1819 	 * the destination event, and then on the prop.  this allows
1820 	 * one to specify a "default" within by putting it on the
1821 	 * declaration,  but then allow overriding on the prop statement.
1822 	 */
1823 	arrow_add_within(ap, toev->u.event.declp->u.stmt.np->u.event.eexprlist);
1824 	arrow_add_within(ap, toev->u.event.eexprlist);
1825 
1826 	/*
1827 	 * handle any global constraints inherited from the
1828 	 * "fromev" event's declaration
1829 	 */
1830 	ASSERT(fromev->u.event.declp != NULL);
1831 	ASSERT(fromev->u.event.declp->u.stmt.np != NULL);
1832 
1833 #ifdef	notdef
1834 	/* XXX not quite ready to evaluate constraints from decls yet */
1835 	if (fromev->u.event.declp->u.stmt.np->u.event.eexprlist)
1836 		(void) itree_add_constraint(ap,
1837 		    fromev->u.event.declp->u.stmt.np->u.event.eexprlist);
1838 #endif	/* notdef */
1839 
1840 	/* handle constraints on the from event in the prop statement */
1841 	epnames[0] = fromev->u.event.epname;
1842 	epnames[1] = toev->u.event.epname;
1843 	if (eval_potential(fromev->u.event.eexprlist, ex, epnames, &newc) == 0)
1844 		return (0);		/* constraint disallows arrow */
1845 
1846 	/*
1847 	 * handle any global constraints inherited from the
1848 	 * "toev" event's declaration
1849 	 */
1850 	ASSERT(toev->u.event.declp != NULL);
1851 	ASSERT(toev->u.event.declp->u.stmt.np != NULL);
1852 
1853 #ifdef	notdef
1854 	/* XXX not quite ready to evaluate constraints from decls yet */
1855 	if (toev->u.event.declp->u.stmt.np->u.event.eexprlist)
1856 		(void) itree_add_constraint(ap,
1857 		    toev->u.event.declp->u.stmt.np->u.event.eexprlist);
1858 #endif	/* notdef */
1859 
1860 	/* handle constraints on the to event in the prop statement */
1861 	epnames[0] = toev->u.event.epname;
1862 	epnames[1] = fromev->u.event.epname;
1863 	if (eval_potential(toev->u.event.eexprlist, ex, epnames, &newc) == 0) {
1864 		if (newc != NULL)
1865 			tree_free(newc);
1866 		return (0);		/* constraint disallows arrow */
1867 	}
1868 
1869 	/* if we came up with any deferred constraints, add them to arrow */
1870 	if (newc != NULL)
1871 		(void) itree_add_constraint(ap, iexpr(newc));
1872 
1873 	return (1);	/* constraints allow arrow */
1874 }
1875 
1876 /*
1877  * Set within() constraint.  If the constraint were set multiple times,
1878  * the last one would "win".
1879  */
1880 static void
1881 arrow_add_within(struct arrow *ap, struct node *xpr)
1882 {
1883 	struct node *arglist;
1884 
1885 	/* end of expressions list */
1886 	if (xpr == NULL)
1887 		return;
1888 
1889 	switch (xpr->t) {
1890 	case T_LIST:
1891 		arrow_add_within(ap, xpr->u.expr.left);
1892 		arrow_add_within(ap, xpr->u.expr.right);
1893 		return;
1894 	case T_FUNC:
1895 		if (xpr->u.func.s != L_within)
1896 			return;
1897 		arglist = xpr->u.func.arglist;
1898 		switch (arglist->t) {
1899 		case T_TIMEVAL:
1900 			ap->mindelay = 0;
1901 			ap->maxdelay = arglist->u.ull;
1902 			break;
1903 		case T_NAME:
1904 			ASSERT(arglist->u.name.s == L_infinity);
1905 			ap->mindelay = 0;
1906 			ap->maxdelay = TIMEVAL_EVENTUALLY;
1907 			break;
1908 		case T_LIST:
1909 			ASSERT(arglist->u.expr.left->t == T_TIMEVAL);
1910 			ap->mindelay = arglist->u.expr.left->u.ull;
1911 			switch (arglist->u.expr.right->t) {
1912 			case T_TIMEVAL:
1913 				ap->maxdelay = arglist->u.ull;
1914 				break;
1915 			case T_NAME:
1916 				ASSERT(arglist->u.expr.right->u.name.s ==
1917 				    L_infinity);
1918 				ap->maxdelay = TIMEVAL_EVENTUALLY;
1919 				break;
1920 			default:
1921 				out(O_DIE, "within: unexpected 2nd arg type");
1922 			}
1923 			break;
1924 		default:
1925 			out(O_DIE, "within: unexpected 1st arg type");
1926 		}
1927 		break;
1928 	default:
1929 		return;
1930 	}
1931 }
1932 
1933 static void
1934 itree_free_arrowlists(struct bubble *bubp, int arrows_too)
1935 {
1936 	struct arrowlist *al, *nal;
1937 
1938 	al = bubp->arrows;
1939 	while (al != NULL) {
1940 		nal = al->next;
1941 		if (arrows_too) {
1942 			itree_free_constraints(al->arrowp);
1943 			bzero(al->arrowp, sizeof (struct arrow));
1944 			FREE(al->arrowp);
1945 		}
1946 		bzero(al, sizeof (*al));
1947 		FREE(al);
1948 		al = nal;
1949 	}
1950 }
1951 
1952 static void
1953 itree_delete_arrow(struct bubble *bubp, struct arrow *arrow)
1954 {
1955 	struct arrowlist *al, *oal;
1956 
1957 	al = bubp->arrows;
1958 	if (al->arrowp == arrow) {
1959 		bubp->arrows = al->next;
1960 		bzero(al, sizeof (*al));
1961 		FREE(al);
1962 		return;
1963 	}
1964 	while (al != NULL) {
1965 		oal = al;
1966 		al = al->next;
1967 		ASSERT(al != NULL);
1968 		if (al->arrowp == arrow) {
1969 			oal->next = al->next;
1970 			bzero(al, sizeof (*al));
1971 			FREE(al);
1972 			return;
1973 		}
1974 	}
1975 }
1976 
1977 static void
1978 itree_prune_arrowlists(struct bubble *bubp)
1979 {
1980 	struct arrowlist *al, *nal;
1981 
1982 	al = bubp->arrows;
1983 	while (al != NULL) {
1984 		nal = al->next;
1985 		if (bubp->t == B_FROM)
1986 			itree_delete_arrow(al->arrowp->head, al->arrowp);
1987 		else
1988 			itree_delete_arrow(al->arrowp->tail, al->arrowp);
1989 		itree_free_constraints(al->arrowp);
1990 		bzero(al->arrowp, sizeof (struct arrow));
1991 		FREE(al->arrowp);
1992 		bzero(al, sizeof (*al));
1993 		FREE(al);
1994 		al = nal;
1995 	}
1996 }
1997 
1998 struct arrowlist *
1999 itree_next_arrow(struct bubble *bubble, struct arrowlist *last)
2000 {
2001 	struct arrowlist *next;
2002 
2003 	if (last != NULL)
2004 		next = last->next;
2005 	else
2006 		next = bubble->arrows;
2007 	return (next);
2008 }
2009 
2010 static struct constraintlist *
2011 itree_add_constraint(struct arrow *arrowp, struct node *c)
2012 {
2013 	struct constraintlist *prev = NULL;
2014 	struct constraintlist *curr;
2015 	struct constraintlist *newc;
2016 
2017 	for (curr = arrowp->constraints;
2018 	    curr != NULL;
2019 	    prev = curr, curr = curr->next);
2020 
2021 	newc = MALLOC(sizeof (struct constraintlist));
2022 	newc->next = NULL;
2023 	newc->cnode = c;
2024 
2025 	if (prev == NULL)
2026 		arrowp->constraints = newc;
2027 	else
2028 		prev->next = newc;
2029 
2030 	return (newc);
2031 }
2032 
2033 struct constraintlist *
2034 itree_next_constraint(struct arrow *arrowp, struct constraintlist *last)
2035 {
2036 	struct constraintlist *next;
2037 
2038 	if (last != NULL)
2039 		next = last->next;
2040 	else
2041 		next = arrowp->constraints;
2042 	return (next);
2043 }
2044 
2045 static void
2046 itree_free_constraints(struct arrow *ap)
2047 {
2048 	struct constraintlist *cl, *ncl;
2049 
2050 	cl = ap->constraints;
2051 	while (cl != NULL) {
2052 		ncl = cl->next;
2053 		ASSERT(cl->cnode != NULL);
2054 		if (!iexpr_cached(cl->cnode))
2055 			tree_free(cl->cnode);
2056 		else
2057 			iexpr_free(cl->cnode);
2058 		bzero(cl, sizeof (*cl));
2059 		FREE(cl);
2060 		cl = ncl;
2061 	}
2062 }
2063 
2064 const char *
2065 itree_bubbletype2str(enum bubbletype t)
2066 {
2067 	static char buf[100];
2068 
2069 	switch (t) {
2070 	case B_FROM:	return L_from;
2071 	case B_TO:	return L_to;
2072 	case B_INHIBIT:	return L_inhibit;
2073 	default:
2074 		(void) sprintf(buf, "[unexpected bubbletype: %d]", t);
2075 		return (buf);
2076 	}
2077 }
2078 
2079 /*
2080  * itree_fini -- clean up any half-built itrees
2081  */
2082 void
2083 itree_fini(void)
2084 {
2085 	if (Ninfo.lut != NULL) {
2086 		itree_free(Ninfo.lut);
2087 		Ninfo.lut = NULL;
2088 	}
2089 	if (Ninfo.ex) {
2090 		lut_free(Ninfo.ex, iterinfo_destructor, NULL);
2091 		Ninfo.ex = NULL;
2092 	}
2093 }
2094