xref: /illumos-gate/usr/src/cmd/fm/modules/common/eversholt/itree.c (revision f808c858fa61e7769218966759510a8b1190dfcf)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License (the "License").
6  * You may not use this file except in compliance with the License.
7  *
8  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
9  * or http://www.opensolaris.org/os/licensing.
10  * See the License for the specific language governing permissions
11  * and limitations under the License.
12  *
13  * When distributing Covered Code, include this CDDL HEADER in each
14  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
15  * If applicable, add the following below this CDDL HEADER, with the
16  * fields enclosed by brackets "[]" replaced with your own identifying
17  * information: Portions Copyright [yyyy] [name of copyright owner]
18  *
19  * CDDL HEADER END
20  */
21 
22 /*
23  * Copyright 2006 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  *
26  * itree.c -- instance tree creation and manipulation
27  *
28  * this module provides the instance tree
29  */
30 
31 #pragma ident	"%Z%%M%	%I%	%E% SMI"
32 
33 #include <stdio.h>
34 #include <ctype.h>
35 #include <string.h>
36 #include <strings.h>
37 #include "alloc.h"
38 #include "out.h"
39 #include "stable.h"
40 #include "literals.h"
41 #include "lut.h"
42 #include "tree.h"
43 #include "ptree.h"
44 #include "itree.h"
45 #include "ipath.h"
46 #include "iexpr.h"
47 #include "eval.h"
48 #include "config.h"
49 
50 /*
51  * struct info contains the state we keep when expanding a prop statement
52  * as part of constructing the instance tree.  state kept in struct info
53  * is the non-recursive stuff -- the stuff that doesn't need to be on
54  * the stack.  the rest of the state that is passed between all the
55  * mutually recursive functions, is required to be on the stack since
56  * we need to backtrack and recurse as we do the instance tree construction.
57  */
58 struct info {
59 	struct lut *lut;
60 	struct node *anp;	/* arrow np */
61 	struct lut *ex;		/* dictionary of explicit iterators */
62 	struct config *croot;
63 } Ninfo;
64 
65 /*
66  * struct wildcardinfo is used to track wildcarded portions of paths.
67  *
68  * for example, if the epname of an event is "c/d" and the path "a/b/c/d"
69  * exists, the wildcard path ewname is filled in with the path "a/b".  when
70  * matching is done, epname is temporarily replaced with the concatenation
71  * of ewname and epname.  cpstart is set to the (struct config *)
72  * corresponding to component "c".
73  *
74  * a linked list of these structs is used to track the expansion of each
75  * event node as it is processed in vmatch() --> vmatch_event() calls.
76  */
77 struct wildcardinfo {
78 	struct node *nptop;		/* event node fed to vmatch */
79 	struct node *oldepname;		/* epname without the wildcard part */
80 	enum status {
81 		WC_UNDEFINED,		/* struct is not yet initialized */
82 		WC_UNDERCONSTRUCTION,	/* wildcard path not yet done */
83 		WC_COMPLETE		/* wildcard path done and is in use */
84 	} s;
85 	struct wildcardpath {
86 		struct node *ewname;	/* wildcard path */
87 		struct config *cpstart;	/* starting cp node for oldepname */
88 		int refcount;		/* number of event nodes using this */
89 	} *p;
90 	struct wildcardpath *matchwc;	/* ptr to wc path to be matched */
91 	struct wildcardinfo *next;
92 };
93 
94 static void vmatch(struct info *infop, struct node *np,
95     struct node *lnp, struct node *anp, struct wildcardinfo **wcproot);
96 static void hmatch(struct info *infop, struct node *np, struct node *nextnp);
97 static void itree_pbubble(int flags, struct bubble *bp);
98 static void itree_pruner(void *left, void *right, void *arg);
99 static void itree_destructor(void *left, void *right, void *arg);
100 static int itree_set_arrow_traits(struct arrow *ap, struct node *fromev,
101     struct node *toev, struct lut *ex);
102 static void itree_free_arrowlists(struct bubble *bubp, int arrows_too);
103 static void itree_prune_arrowlists(struct bubble *bubp);
104 static void arrow_add_within(struct arrow *ap, struct node *xpr);
105 static struct arrow *itree_add_arrow(struct bubble *frombubblep,
106     struct bubble *tobubblep, struct node *apnode, struct node *fromevent,
107     struct node *toevent, struct lut *ex);
108 static struct constraintlist *itree_add_constraint(struct arrow *arrowp,
109     struct node *c);
110 static struct bubble *itree_add_bubble(struct event *eventp,
111     enum bubbletype btype, int nork, int gen);
112 static void itree_free_bubble(struct bubble *freeme);
113 static void itree_free_constraints(struct arrow *ap);
114 
115 /*
116  * the following struct contains the state we build up during
117  * vertical and horizontal expansion so that generate()
118  * has everything it needs to construct the appropriate arrows.
119  * after setting up the state by calling:
120  *	generate_arrownp()
121  *	generate_nork()
122  *	generate_new()
123  *	generate_from()
124  *	generate_to()
125  * the actual arrow generation is done by calling:
126  *	generate()
127  */
128 static struct {
129 	int generation;		/* generation number of arrow set */
130 	struct node *arrownp;	/* top-level parse tree for arrow */
131 	int n;			/* n value associated with arrow */
132 	int k;			/* k value associated with arrow */
133 	struct node *fromnp;	/* left-hand-side event in parse tree */
134 	struct node *tonp;	/* right-hand-side event in parse tree */
135 	struct event *frome;	/* left-hand-side event in instance tree */
136 	struct event *toe;	/* right-hand-side event in instance tree */
137 	struct bubble *frombp;	/* bubble arrow comes from */
138 	struct bubble *tobp;	/* bubble arrow goes to */
139 } G;
140 
141 static void
142 generate_arrownp(struct node *arrownp)
143 {
144 	G.arrownp = arrownp;
145 }
146 
147 static void
148 generate_nork(int n, int k)
149 {
150 	G.n = n;
151 	G.k = k;
152 }
153 
154 static void
155 generate_new(void)
156 {
157 	G.generation++;
158 }
159 
160 static void
161 generate_from(struct node *fromeventnp, struct event *fromevent)
162 {
163 	G.fromnp = fromeventnp;
164 	G.frome = fromevent;
165 
166 	out(O_ALTFP|O_VERB3|O_NONL, "from bubble on ");
167 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp);
168 	out(O_ALTFP|O_VERB3, NULL);
169 
170 	G.frombp = itree_add_bubble(G.frome, B_FROM, G.n, 0);
171 }
172 
173 static void
174 generate_to(struct node *toeventnp, struct event *toevent)
175 {
176 	G.tonp = toeventnp;
177 	G.toe = toevent;
178 
179 	out(O_ALTFP|O_VERB3|O_NONL, "to bubble (gen %d) on ", G.generation);
180 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp);
181 	out(O_ALTFP|O_VERB3, NULL);
182 
183 	G.tobp = itree_add_bubble(G.toe, B_TO, G.k, G.generation);
184 }
185 
186 static void
187 generate(struct lut *ex)
188 {
189 	ASSERT(G.arrownp != NULL);
190 	ASSERT(G.fromnp != NULL);
191 	ASSERT(G.frome != NULL);
192 	ASSERT(G.frombp != NULL);
193 	ASSERT(G.tonp != NULL);
194 	ASSERT(G.toe != NULL);
195 	ASSERT(G.tobp != NULL);
196 
197 	out(O_ALTFP|O_VERB3|O_NONL, "        Arrow \"");
198 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp);
199 	out(O_ALTFP|O_VERB3|O_NONL, "\" -> \"");
200 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp);
201 
202 	if (itree_add_arrow(G.frombp, G.tobp, G.arrownp,
203 	    G.fromnp, G.tonp, ex) == NULL) {
204 		out(O_ALTFP|O_VERB3, "\" (prevented by constraints)");
205 	} else {
206 		out(O_ALTFP|O_VERB3, "\"");
207 	}
208 }
209 
210 enum childnode_action {
211 	CN_NONE,
212 	CN_INSTANTIZE,
213 	CN_DUP
214 };
215 
216 static struct node *
217 tname_dup(struct node *namep, enum childnode_action act)
218 {
219 	struct node *retp = NULL;
220 	const char *file;
221 	int line;
222 
223 	if (namep == NULL)
224 		return (NULL);
225 
226 	file = namep->file;
227 	line = namep->line;
228 
229 	for (; namep != NULL; namep = namep->u.name.next) {
230 		struct node *newnp = newnode(T_NAME, file, line);
231 
232 		newnp->u.name.t = namep->u.name.t;
233 		newnp->u.name.s = namep->u.name.s;
234 		newnp->u.name.last = newnp;
235 		newnp->u.name.it = namep->u.name.it;
236 		newnp->u.name.cp = namep->u.name.cp;
237 
238 		if (act == CN_DUP) {
239 			struct node *npc;
240 
241 			npc = namep->u.name.child;
242 			if (npc != NULL) {
243 				switch (npc->t) {
244 				case T_NUM:
245 					newnp->u.name.child =
246 						newnode(T_NUM, file, line);
247 					newnp->u.name.child->u.ull =
248 						npc->u.ull;
249 					break;
250 				case T_NAME:
251 					newnp->u.name.child =
252 						tree_name(npc->u.name.s,
253 							npc->u.name.it,
254 							file, line);
255 					break;
256 				default:
257 					out(O_DIE, "tname_dup: "
258 					    "invalid child type %s",
259 					    ptree_nodetype2str(npc->t));
260 				}
261 			}
262 		} else if (act == CN_INSTANTIZE) {
263 			newnp->u.name.child = newnode(T_NUM, file, line);
264 
265 			if (namep->u.name.child == NULL ||
266 			    namep->u.name.child->t != T_NUM) {
267 				int inum;
268 
269 				ASSERT(newnp->u.name.cp != NULL);
270 				config_getcompname(newnp->u.name.cp,
271 						    NULL, &inum);
272 				newnp->u.name.child->u.ull =
273 					(unsigned long long)inum;
274 			} else {
275 				newnp->u.name.child->u.ull =
276 					namep->u.name.child->u.ull;
277 			}
278 		}
279 
280 		if (retp == NULL) {
281 			retp = newnp;
282 		} else {
283 			retp->u.name.last->u.name.next = newnp;
284 			retp->u.name.last = newnp;
285 		}
286 	}
287 
288 	return (retp);
289 }
290 
291 struct prop_wlk_data {
292 	struct lut *props;
293 	struct node *epname;
294 };
295 
296 static struct lut *props2instance(struct node *, struct node *);
297 
298 /*
299  * let oldepname be a subset of epname.  return the subsection of epname
300  * that ends with oldepname.  make each component in the path explicitly
301  * instanced (i.e., with a T_NUM child).
302  */
303 static struct node *
304 tname_dup_to_epname(struct node *oldepname, struct node *epname)
305 {
306 	struct node *npref, *npend, *np1, *np2;
307 	struct node *ret = NULL;
308 	int foundmatch = 0;
309 
310 	if (epname == NULL)
311 		return (NULL);
312 
313 	/*
314 	 * search for the longest path in epname which contains
315 	 * oldnode->u.event.epname.  set npend to point to just past the
316 	 * end of this path.
317 	 */
318 	npend = NULL;
319 	for (npref = epname; npref; npref = npref->u.name.next) {
320 		if (npref->u.name.s == oldepname->u.name.s) {
321 			for (np1 = npref, np2 = oldepname;
322 			    np1 != NULL && np2 != NULL;
323 			    np1 = np1->u.name.next, np2 = np2->u.name.next) {
324 				if (np1->u.name.s != np2->u.name.s)
325 					break;
326 			}
327 			if (np2 == NULL) {
328 				foundmatch = 1;
329 				npend = np1;
330 				if (np1 == NULL) {
331 					/* oldepname matched npref up to end */
332 					break;
333 				}
334 			}
335 		}
336 	}
337 
338 	if (foundmatch == 0) {
339 		/*
340 		 * if oldepname could not be found in epname, return a
341 		 * duplicate of the former.  do not try to instantize
342 		 * oldepname since it might not be a path.
343 		 */
344 		return (tname_dup(oldepname, CN_DUP));
345 	}
346 
347 	/*
348 	 * dup (epname -- npend).  all children should be T_NUMs.
349 	 */
350 	for (npref = epname;
351 	    ! (npref == NULL || npref == npend);
352 	    npref = npref->u.name.next) {
353 		struct node *newnp = newnode(T_NAME, oldepname->file,
354 					    oldepname->line);
355 
356 		newnp->u.name.t = npref->u.name.t;
357 		newnp->u.name.s = npref->u.name.s;
358 		newnp->u.name.last = newnp;
359 		newnp->u.name.it = npref->u.name.it;
360 		newnp->u.name.cp = npref->u.name.cp;
361 
362 		newnp->u.name.child = newnode(T_NUM, oldepname->file,
363 					    oldepname->line);
364 
365 		if (npref->u.name.child == NULL ||
366 		    npref->u.name.child->t != T_NUM) {
367 			int childnum;
368 
369 			ASSERT(npref->u.name.cp != NULL);
370 			config_getcompname(npref->u.name.cp, NULL, &childnum);
371 			newnp->u.name.child->u.ull = childnum;
372 		} else {
373 			newnp->u.name.child->u.ull =
374 				npref->u.name.child->u.ull;
375 		}
376 
377 		if (ret == NULL) {
378 			ret = newnp;
379 		} else {
380 			ret->u.name.last->u.name.next = newnp;
381 			ret->u.name.last = newnp;
382 		}
383 	}
384 
385 	return (ret);
386 }
387 
388 /*
389  * restriction: oldnode->u.event.epname has to be equivalent to or a subset
390  * of epname
391  */
392 static struct node *
393 tevent_dup_to_epname(struct node *oldnode, struct node *epname)
394 {
395 	struct node *ret;
396 
397 	ret = newnode(T_EVENT, oldnode->file, oldnode->line);
398 	ret->u.event.ename = tname_dup(oldnode->u.event.ename, CN_NONE);
399 	ret->u.event.epname = tname_dup_to_epname(oldnode->u.event.epname,
400 						    epname);
401 	return (ret);
402 }
403 
404 static void
405 nv_instantiate(void *name, void *val, void *arg)
406 {
407 	struct prop_wlk_data *pd = (struct prop_wlk_data *)arg;
408 	struct node *orhs = (struct node *)val;
409 	struct node *nrhs;
410 
411 	/* handle engines by instantizing the entire engine */
412 	if (name == L_engine) {
413 		ASSERT(orhs->t == T_EVENT);
414 		ASSERT(orhs->u.event.ename->u.name.t == N_SERD);
415 
416 		/* there are only SERD engines for now */
417 
418 		nrhs = newnode(T_SERD, orhs->file, orhs->line);
419 		nrhs->u.stmt.np = tevent_dup_to_epname(orhs, pd->epname);
420 		nrhs->u.stmt.lutp = props2instance(orhs, pd->epname);
421 		pd->props = lut_add(pd->props, name, nrhs, NULL);
422 		return;
423 	}
424 
425 	switch (orhs->t) {
426 	case T_NUM:
427 		nrhs = newnode(T_NUM, orhs->file, orhs->line);
428 		nrhs->u.ull = orhs->u.ull;
429 		pd->props = lut_add(pd->props, name, nrhs, NULL);
430 		break;
431 	case T_TIMEVAL:
432 		nrhs = newnode(T_TIMEVAL, orhs->file, orhs->line);
433 		nrhs->u.ull = orhs->u.ull;
434 		pd->props = lut_add(pd->props, name, nrhs, NULL);
435 		break;
436 	case T_NAME:
437 		nrhs = tname_dup_to_epname(orhs, pd->epname);
438 		pd->props = lut_add(pd->props, name, nrhs, NULL);
439 		break;
440 	case T_EVENT:
441 		nrhs = tevent_dup_to_epname(orhs, pd->epname);
442 		pd->props = lut_add(pd->props, name, nrhs, NULL);
443 		break;
444 	case T_GLOBID:
445 		nrhs = newnode(T_GLOBID, orhs->file, orhs->line);
446 		nrhs->u.globid.s = orhs->u.globid.s;
447 		pd->props = lut_add(pd->props, name, nrhs, NULL);
448 		break;
449 	case T_FUNC:
450 		/* for T_FUNC, we don't duplicate it, just point to node */
451 		pd->props = lut_add(pd->props, name, orhs, NULL);
452 		break;
453 	default:
454 		out(O_DIE, "unexpected nvpair value type %s",
455 		    ptree_nodetype2str(((struct node *)val)->t));
456 	}
457 }
458 
459 static struct lut *
460 props2instance(struct node *eventnp, struct node *epname)
461 {
462 	struct prop_wlk_data pd;
463 
464 	pd.props = NULL;
465 	pd.epname = epname;
466 
467 	ASSERT(eventnp->u.event.declp != NULL);
468 	lut_walk(eventnp->u.event.declp->u.stmt.lutp, nv_instantiate, &pd);
469 	return (pd.props);
470 }
471 
472 /*ARGSUSED*/
473 static void
474 instances_destructor(void *left, void *right, void *arg)
475 {
476 	struct node *dn = (struct node *)right;
477 
478 	if (dn->t == T_SERD) {
479 		/* we allocated the lut during itree_create(), so free it */
480 		lut_free(dn->u.stmt.lutp, instances_destructor, NULL);
481 		dn->u.stmt.lutp = NULL;
482 	}
483 	if (dn->t != T_FUNC)	/* T_FUNC pointed to original node */
484 		tree_free(dn);
485 }
486 
487 /*ARGSUSED*/
488 static void
489 payloadprops_destructor(void *left, void *right, void *arg)
490 {
491 	FREE(right);
492 }
493 
494 /*
495  * event_cmp -- used via lut_lookup/lut_add on instance tree lut
496  */
497 static int
498 event_cmp(struct event *ep1, struct event *ep2)
499 {
500 	int diff;
501 
502 	if ((diff = ep2->enode->u.event.ename->u.name.s -
503 	    ep1->enode->u.event.ename->u.name.s) != 0)
504 		return (diff);
505 	if ((diff = (char *)ep2->ipp - (char *)ep1->ipp) != 0)
506 		return (diff);
507 	return (0);
508 
509 }
510 
511 struct event *
512 itree_lookup(struct lut *itp, const char *ename, const struct ipath *ipp)
513 {
514 	struct event searchevent;	/* just used for searching */
515 	struct node searcheventnode;
516 	struct node searchenamenode;
517 
518 	searchevent.enode = &searcheventnode;
519 	searcheventnode.t = T_EVENT;
520 	searcheventnode.u.event.ename = &searchenamenode;
521 	searchenamenode.t = T_NAME;
522 	searchenamenode.u.name.s = ename;
523 	searchevent.ipp = ipp;
524 	return (lut_lookup(itp, (void *)&searchevent, (lut_cmp)event_cmp));
525 }
526 
527 static struct event *
528 find_or_add_event(struct info *infop, struct node *np)
529 {
530 	struct event *ret;
531 	struct event searchevent;	/* just used for searching */
532 
533 	ASSERTeq(np->t, T_EVENT, ptree_nodetype2str);
534 
535 	searchevent.enode = np;
536 	searchevent.ipp = ipath(np->u.event.epname);
537 	if ((ret = lut_lookup(infop->lut, (void *)&searchevent,
538 	    (lut_cmp)event_cmp)) != NULL)
539 		return (ret);
540 
541 	/* wasn't already in tree, allocate it */
542 	ret = MALLOC(sizeof (*ret));
543 	bzero(ret, sizeof (*ret));
544 
545 	ret->t = np->u.event.ename->u.name.t;
546 	ret->enode = np;
547 	ret->ipp = searchevent.ipp;
548 	ret->props = props2instance(np, np->u.event.epname);
549 
550 	infop->lut = lut_add(infop->lut, (void *)ret, (void *)ret,
551 	    (lut_cmp)event_cmp);
552 
553 	return (ret);
554 }
555 
556 /*
557  * hmatch_event -- perform any appropriate horizontal expansion on an event
558  *
559  * this routine is used to perform horizontal expansion on both the
560  * left-hand-side events in a prop, and the right-hand-side events.
561  * when called to handle a left-side event, nextnp point to the right
562  * side of the prop that should be passed to hmatch() for each match
563  * found by horizontal expansion.   when no horizontal expansion exists,
564  * we will still "match" one event for every event found in the list on
565  * the left-hand-side of the prop because vmatch() already found that
566  * there's at least one match during vertical expansion.
567  */
568 static void
569 hmatch_event(struct info *infop, struct node *eventnp, struct node *epname,
570     struct config *ncp, struct node *nextnp, int rematch)
571 {
572 	if (epname == NULL) {
573 		/*
574 		 * end of pathname recursion, either we just located
575 		 * a left-hand-side event and we're ready to move on
576 		 * to the expanding the right-hand-side events, or
577 		 * we're further down the recursion and we just located
578 		 * a right-hand-side event.  the passed-in parameter
579 		 * "nextnp" tells us whether we're working on the left
580 		 * side and need to move on to nextnp, or if nextnp is
581 		 * NULL, we're working on the right side.
582 		 */
583 		if (nextnp) {
584 			/*
585 			 * finished a left side expansion, move on to right.
586 			 * tell generate() what event we just matched so
587 			 * it can be used at the source of any arrows
588 			 * we generate as we match events on the right side.
589 			 */
590 			generate_from(eventnp,
591 			    find_or_add_event(infop, eventnp));
592 			hmatch(infop, nextnp, NULL);
593 		} else {
594 			/*
595 			 * finished a right side expansion.  tell generate
596 			 * the information about the destination and let
597 			 * it construct the arrows as appropriate.
598 			 */
599 			generate_to(eventnp,
600 			    find_or_add_event(infop, eventnp));
601 			generate(infop->ex);
602 		}
603 
604 		return;
605 	}
606 
607 	ASSERTeq(epname->t, T_NAME, ptree_nodetype2str);
608 
609 	/*
610 	 * we only get here when eventnp already has a completely
611 	 * instanced epname in it already.  so we first recurse
612 	 * down to the end of the name and as the recursion pops
613 	 * up, we look for opportunities to advance horizontal
614 	 * expansions on to the next match.  when we do advance
615 	 * horizontal expansions, we potentially render all cp
616 	 * pointers on all components to the right as invalid,
617 	 * so we pass in an "ncp" config handle so matching against
618 	 * the config can happen.
619 	 */
620 	if (rematch) {
621 		struct config *ocp = epname->u.name.cp;
622 		char *ncp_s;
623 		int ncp_num, num;
624 		struct iterinfo *iterinfop = NULL;
625 		const char *iters;
626 
627 		for (; ncp; ncp = config_next(ncp)) {
628 			config_getcompname(ncp, &ncp_s, &ncp_num);
629 
630 			if (ncp_s == epname->u.name.s) {
631 				/* found a matching component name */
632 				config_getcompname(epname->u.name.cp,
633 				    NULL, &num);
634 
635 				if (epname->u.name.it != IT_HORIZONTAL &&
636 				    ncp_num != num)
637 					continue;
638 
639 				iters = epname->u.name.child->u.name.s;
640 				if ((iterinfop = lut_lookup(infop->ex,
641 				    (void *)iters, NULL)) == NULL) {
642 					out(O_DIE,
643 					    "hmatch_event: internal error: "
644 					    "iterator \"%s\" undefined", iters);
645 				} else {
646 					/* advance dict entry to next match */
647 					iterinfop->num = ncp_num;
648 				}
649 				epname->u.name.cp = ncp;
650 				hmatch_event(infop, eventnp,
651 				    epname->u.name.next, config_child(ncp),
652 				    nextnp, 1);
653 			}
654 		}
655 		if (iterinfop != NULL) {
656 			/* restore dict entry */
657 			iterinfop->num = num;
658 		}
659 
660 		epname->u.name.cp = ocp;
661 
662 		return;		/* no more config to match against */
663 
664 	} else {
665 		hmatch_event(infop, eventnp, epname->u.name.next, ncp,
666 		    nextnp, 0);
667 	}
668 
669 	if (epname->u.name.it == IT_HORIZONTAL) {
670 		struct config *cp;
671 		struct config *ocp = epname->u.name.cp;
672 		char *cp_s;
673 		int cp_num;
674 		int ocp_num;
675 		struct iterinfo *iterinfop = NULL;
676 		const char *iters;
677 
678 		config_getcompname(ocp, NULL, &ocp_num);
679 
680 		for (cp = config_next(ocp); cp; cp = config_next(cp)) {
681 			config_getcompname(cp, &cp_s, &cp_num);
682 
683 			if (cp_s == epname->u.name.s) {
684 				ASSERT(epname->u.name.child != NULL);
685 
686 				iters = epname->u.name.child->u.name.s;
687 				if ((iterinfop = lut_lookup(infop->ex,
688 				    (void *)iters, NULL)) == NULL) {
689 					out(O_DIE,
690 					    "hmatch_event: internal error: "
691 					    "iterator \"%s\" undefined", iters);
692 				} else {
693 					/* advance dict entry to next match */
694 					iterinfop->num = cp_num;
695 				}
696 				epname->u.name.cp = cp;
697 				hmatch_event(infop, eventnp,
698 				    epname->u.name.next, config_child(cp),
699 				    nextnp, 1);
700 			}
701 		}
702 
703 		if (iterinfop != NULL) {
704 			/* restore dict entry */
705 			iterinfop->num = ocp_num;
706 		}
707 		epname->u.name.cp = ocp;
708 	}
709 }
710 
711 /*
712  * hmatch -- check for horizontal expansion matches
713  *
714  * np points to the things we're matching (like a T_LIST or a T_EVENT)
715  * and if we're working on a left-side of a prop, nextnp points to
716  * the other side of the prop that we'll tackle next when this recursion
717  * bottoms out.  when all the events in the entire prop arrow have been
718  * horizontally expanded, generate() will be called to generate the
719  * actualy arrow.
720  */
721 static void
722 hmatch(struct info *infop, struct node *np, struct node *nextnp)
723 {
724 	if (np == NULL)
725 		return;		/* all done */
726 
727 	/*
728 	 * for each item in the list of events (which could just
729 	 * be a single event, or it could get larger in the loop
730 	 * below due to horizontal expansion), call hmatch on
731 	 * the right side and create arrows to each element.
732 	 */
733 
734 	switch (np->t) {
735 	case T_LIST:
736 		/* loop through the list */
737 		if (np->u.expr.left)
738 			hmatch(infop, np->u.expr.left, nextnp);
739 		if (np->u.expr.right)
740 			hmatch(infop, np->u.expr.right, nextnp);
741 		break;
742 
743 	case T_EVENT:
744 		hmatch_event(infop, np, np->u.event.epname,
745 		    NULL, nextnp, 0);
746 		break;
747 
748 	default:
749 		outfl(O_DIE, np->file, np->line,
750 		    "hmatch: unexpected type: %s",
751 		    ptree_nodetype2str(np->t));
752 	}
753 }
754 
755 static int
756 itree_np2nork(struct node *norknp)
757 {
758 	if (norknp == NULL)
759 		return (1);
760 	else if (norknp->t == T_NAME && norknp->u.name.s == L_A)
761 		return (-1);	/* our internal version of "all" */
762 	else if (norknp->t == T_NUM)
763 		return ((int)norknp->u.ull);
764 	else
765 		out(O_DIE, norknp->file, norknp->line,
766 		    "itree_np2nork: internal error type %s",
767 		    ptree_nodetype2str(norknp->t));
768 	/*NOTREACHED*/
769 	return (1);
770 }
771 
772 static struct iterinfo *
773 newiterinfo(int num, struct node *np)
774 {
775 	struct iterinfo *ret = MALLOC(sizeof (*ret));
776 
777 	ret->num = num;
778 	ret->np = np;
779 
780 	return (ret);
781 }
782 
783 /*ARGSUSED*/
784 static void
785 iterinfo_destructor(void *left, void *right, void *arg)
786 {
787 	struct iterinfo *iterinfop = (struct iterinfo *)right;
788 
789 	bzero(iterinfop, sizeof (*iterinfop));
790 	FREE(iterinfop);
791 }
792 
793 /*
794  * return 1 if wildcard path for wcp matches another wildcard path;
795  * return 0 if otherwise.
796  */
797 static int
798 wc_paths_match(struct wildcardinfo *wcp)
799 {
800 	struct node *np1, *np2;
801 
802 	ASSERT(wcp->matchwc != NULL);
803 
804 	for (np1 = wcp->p->ewname, np2 = wcp->matchwc->ewname;
805 	    np1 != NULL && np2 != NULL;
806 	    np1 = np1->u.name.next, np2 = np2->u.name.next) {
807 		/*
808 		 * names must match
809 		 */
810 		if (np1->u.name.s != np2->u.name.s)
811 			return (0);
812 
813 		/*
814 		 * children must exist and have the same numerical value
815 		 */
816 		if (np1->u.name.child == NULL || np2->u.name.child == NULL)
817 			return (0);
818 
819 		if (np1->u.name.child->t != T_NUM ||
820 		    np2->u.name.child->t != T_NUM)
821 			return (0);
822 
823 		if (np1->u.name.child->u.ull != np2->u.name.child->u.ull)
824 			return (0);
825 	}
826 
827 	/*
828 	 * return true only if we have matches for all entries of n1 and
829 	 * n2.  note that NULL wildcard paths (i.e., both wcp->p->ewname
830 	 * and wcp->matchwc->ewname are NULL) will be considered as
831 	 * matching paths.
832 	 */
833 	if (np1 == NULL && np2 == NULL)
834 		return (1);
835 
836 	return (0);
837 }
838 
839 /*
840  * update epname to include the wildcarded portion
841  */
842 static void
843 create_wildcardedpath(struct wildcardinfo **wcproot)
844 {
845 	struct wildcardinfo *wcp;
846 	struct node *nptop;
847 
848 	wcp = *wcproot;
849 
850 	if (wcp->s == WC_UNDERCONSTRUCTION) {
851 		ASSERT(wcp->p->refcount == 1);
852 		wcp->s = WC_COMPLETE;
853 	}
854 
855 	/* path has no wildcard */
856 	if (wcp->p->ewname == NULL)
857 		return;
858 
859 	/*
860 	 * get to this point if a wildcard portion of the path exists.
861 	 *
862 	 * first set oldepname to the start of the existing epname for use
863 	 * in future comparisons, then update epname to include the
864 	 * wildcard portion.
865 	 */
866 	nptop = wcp->nptop;
867 
868 	ASSERT(wcp->oldepname == nptop->u.event.epname);
869 
870 	nptop->u.event.epname =	tname_dup(wcp->p->ewname, CN_DUP);
871 	nptop->u.event.epname = tree_name_append(nptop->u.event.epname,
872 					tname_dup(wcp->oldepname, CN_DUP));
873 }
874 
875 /*
876  * restore epname to its former (nonwildcarded) state
877  */
878 static void
879 undo_wildcardedpath(struct wildcardinfo **wcproot)
880 {
881 	struct wildcardinfo *wcp;
882 
883 	wcp = *wcproot;
884 
885 	if (wcp->s == WC_COMPLETE) {
886 		ASSERT(wcp->p->refcount == 1);
887 		wcp->s = WC_UNDERCONSTRUCTION;
888 	}
889 
890 	/* path has no wildcard */
891 	if (wcp->p->ewname == NULL)
892 		return;
893 
894 	ASSERT(wcp->oldepname != NULL);
895 
896 	tree_free(wcp->nptop->u.event.epname);
897 	wcp->nptop->u.event.epname = wcp->oldepname;
898 }
899 
900 enum wildcard_action {
901 	WA_NONE,	/* do not do any wildcards */
902 	WA_SINGLE,	/* do wildcard only for current cp node */
903 	WA_ALL		/* do wildcards for all cp nodes */
904 };
905 
906 static void
907 vmatch_event(struct info *infop, struct config *cp, struct node *np,
908 	    struct node *lnp, struct node *anp,
909 	    struct wildcardinfo **wcproot, enum wildcard_action dowildcard)
910 {
911 	struct wildcardinfo *wcp;
912 	char *cp_s;
913 	int cp_num;
914 
915 	wcp = *wcproot;
916 
917 	if ((np == NULL && wcp->oldepname != NULL) ||
918 	    (cp == NULL && wcp->oldepname == NULL)) {
919 		/*
920 		 * get to this point if the pathname matched the config
921 		 * (but not necessarily a match at the end).  first check
922 		 * for any matching wildcard paths.
923 		 */
924 		if (wcp->matchwc != NULL && wc_paths_match(wcp) == 0)
925 			return;
926 
927 		create_wildcardedpath(wcproot);
928 		vmatch(infop, np, lnp, anp, wcproot);
929 		undo_wildcardedpath(wcproot);
930 
931 		return;
932 	}
933 
934 	if (cp == NULL)
935 		return;	/* no more config to match against */
936 
937 	for (; cp; cp = config_next(cp)) {
938 		config_getcompname(cp, &cp_s, &cp_num);
939 
940 		if (cp_s == np->u.name.s &&
941 		    ! (wcp->s == WC_UNDERCONSTRUCTION &&
942 		    dowildcard == WA_SINGLE)) {
943 			/* found a matching component name */
944 			if (np->u.name.child &&
945 			    np->u.name.child->t == T_NUM) {
946 				/*
947 				 * an explicit instance number was given
948 				 * in the source.  so only consider this
949 				 * a configuration match if the number
950 				 * also matches.
951 				 */
952 				if (cp_num != np->u.name.child->u.ull)
953 					continue;
954 
955 				np->u.name.cp = cp;
956 			} else {
957 				struct iterinfo *iterinfop;
958 				const char *iters;
959 
960 				/*
961 				 * vertical iterator.  look it up in
962 				 * the appropriate lut and if we get
963 				 * back a value it is either one that we
964 				 * set earlier, in which case we record
965 				 * the new value for this iteration and
966 				 * keep matching, or it is one that was
967 				 * set by an earlier reference to the
968 				 * iterator, in which case we only consider
969 				 * this a configuration match if the number
970 				 * matches cp_num.
971 				 */
972 
973 				ASSERT(np->u.name.child != NULL);
974 				ASSERT(np->u.name.child->t == T_NAME);
975 				iters = np->u.name.child->u.name.s;
976 
977 				if ((iterinfop = lut_lookup(infop->ex,
978 				    (void *)iters, NULL)) == NULL) {
979 					/* we're the first use, record our np */
980 					infop->ex = lut_add(infop->ex,
981 					    (void *)iters,
982 					    newiterinfo(cp_num, np), NULL);
983 				} else if (np == iterinfop->np) {
984 					/*
985 					 * we're the first use, back again
986 					 * for another iteration.  so update
987 					 * the num bound to this iterator in
988 					 * the lut.
989 					 */
990 					iterinfop->num = cp_num;
991 				} else if (cp_num != iterinfop->num) {
992 					/*
993 					 * an earlier reference to this
994 					 * iterator bound it to a different
995 					 * instance number, so there's no
996 					 * match here after all.
997 					 *
998 					 * however, it's possible that this
999 					 * component should really be part of
1000 					 * the wildcard.  we explore this by
1001 					 * forcing this component into the
1002 					 * wildcarded section.
1003 					 *
1004 					 * for an more details of what's
1005 					 * going to happen now, see
1006 					 * comments block below entitled
1007 					 * "forcing components into
1008 					 * wildcard path".
1009 					 */
1010 					if (dowildcard == WA_ALL &&
1011 					    wcp->s == WC_UNDERCONSTRUCTION) {
1012 						vmatch_event(infop, cp, np,
1013 							    lnp, anp, wcproot,
1014 							    WA_SINGLE);
1015 					}
1016 					continue;
1017 				}
1018 				np->u.name.cp = cp;
1019 			}
1020 
1021 			/*
1022 			 * if wildcarding was done in a call earlier in the
1023 			 * stack, record the current cp as the first
1024 			 * matching and nonwildcarded cp.
1025 			 */
1026 			if (dowildcard == WA_ALL &&
1027 			    wcp->s == WC_UNDERCONSTRUCTION)
1028 				wcp->p->cpstart = cp;
1029 
1030 			/*
1031 			 * if this was an IT_HORIZONTAL name,
1032 			 * hmatch() will use the cp to expand
1033 			 * all matches horizontally into a list.
1034 			 * we know the list will contain at least
1035 			 * one element (the one we just matched),
1036 			 * so we just store cp and let hmatch_event()
1037 			 * do the rest.
1038 			 *
1039 			 * recurse on to next component.  note that
1040 			 * wildcarding is now turned off.
1041 			 */
1042 			vmatch_event(infop, config_child(cp), np->u.name.next,
1043 				    lnp, anp, wcproot, WA_NONE);
1044 
1045 			/*
1046 			 * forcing components into wildcard path:
1047 			 *
1048 			 * if this component is the first match, force it
1049 			 * to be part of the wildcarded path and see if we
1050 			 * can get additional matches.  repeat call to
1051 			 * vmatch_event() with the same np, making sure
1052 			 * wildcarding is forced for this component alone
1053 			 * and not its peers by specifying vmatch_event(
1054 			 * ..., WA_SINGLE).  in other words, in the call to
1055 			 * vmatch_event() below, there should be no loop
1056 			 * over cp's peers since that is being done in the
1057 			 * current loop [i.e., the loop we're in now].
1058 			 *
1059 			 * here's an example.  suppose we have the
1060 			 * definition
1061 			 *	event foo@x/y
1062 			 * and configuration
1063 			 *	a0/x0/y0/a1/x1/y1
1064 			 *
1065 			 * the code up to this point will treat "a0" as the
1066 			 * wildcarded part of the path and "x0/y0" as the
1067 			 * nonwildcarded part, resulting in the instanced
1068 			 * event
1069 			 *	foo@a0/x0/y0
1070 			 *
1071 			 * in order to discover the next match (.../x1/y1)
1072 			 * in the configuration we have to force "x0" into
1073 			 * the wildcarded part of the path.  the following
1074 			 * call to vmatch_event(..., WA_SINGLE) does this.
1075 			 * by doing so, we discover the wildcarded part
1076 			 * "a0/x0/y0/a1" and the nonwildcarded part "x1/y1"
1077 			 *
1078 			 * the following call to vmatch_event() is also
1079 			 * needed to properly handle the configuration
1080 			 *	b0/x0/b1/x1/y1
1081 			 *
1082 			 * the recursions into vmatch_event() will start
1083 			 * off uncovering "b0" as the wildcarded part and
1084 			 * "x0" as the start of the nonwildcarded path.
1085 			 * however, the next recursion will not result in a
1086 			 * match since there is no "y" following "x0".  the
1087 			 * subsequent match of (wildcard = "b0/x0/b1" and
1088 			 * nonwildcard = "x1/y1") will be discovered only
1089 			 * if "x0" is forced to be a part of the wildcarded
1090 			 * path.
1091 			 */
1092 			if (dowildcard == WA_ALL &&
1093 			    wcp->s == WC_UNDERCONSTRUCTION) {
1094 				vmatch_event(infop, cp, np, lnp, anp,
1095 					    wcproot, WA_SINGLE);
1096 			}
1097 
1098 			if (np->u.name.it == IT_HORIZONTAL) {
1099 				/*
1100 				 * hmatch() finished iterating through
1101 				 * the configuration as described above, so
1102 				 * don't continue iterating here.
1103 				 */
1104 				return;
1105 			}
1106 
1107 		} else if ((dowildcard == WA_SINGLE || dowildcard == WA_ALL) &&
1108 			    wcp->s == WC_UNDERCONSTRUCTION) {
1109 			/*
1110 			 * no matching cp, and we are constructing our own
1111 			 * wildcard path.  (in other words, we are not
1112 			 * referencing a wildcard path created for an
1113 			 * earlier event.)
1114 			 *
1115 			 * add wildcard entry, then recurse on to config
1116 			 * child
1117 			 */
1118 			struct node *cpnode, *prevlast;
1119 
1120 			cpnode = tree_name(cp_s, IT_NONE, NULL, 0);
1121 			cpnode->u.name.child = newnode(T_NUM, NULL, 0);
1122 			cpnode->u.name.child->u.ull = cp_num;
1123 			cpnode->u.name.cp = cp;
1124 
1125 			if (wcp->p->ewname == NULL) {
1126 				prevlast = NULL;
1127 				wcp->p->ewname = cpnode;
1128 			} else {
1129 				prevlast = wcp->p->ewname->u.name.last;
1130 				wcp->p->ewname =
1131 					tree_name_append(wcp->p->ewname,
1132 							    cpnode);
1133 			}
1134 
1135 			vmatch_event(infop, config_child(cp), np, lnp, anp,
1136 				    wcproot, WA_ALL);
1137 
1138 			/*
1139 			 * back out last addition to ewname and continue
1140 			 * with loop
1141 			 */
1142 			tree_free(cpnode);
1143 			if (prevlast == NULL) {
1144 				wcp->p->ewname = NULL;
1145 			} else {
1146 				prevlast->u.name.next = NULL;
1147 				wcp->p->ewname->u.name.last = prevlast;
1148 			}
1149 
1150 			/*
1151 			 * return if wildcarding is done only for this cp
1152 			 */
1153 			if (dowildcard == WA_SINGLE)
1154 				return;
1155 		}
1156 	}
1157 }
1158 
1159 /*
1160  * for the event node np, which will be subjected to pathname
1161  * expansion/matching, create a (struct wildcardinfo) to hold wildcard
1162  * information.  this struct will be inserted into the first location in
1163  * the list that starts with *wcproot.
1164  *
1165  * cp is the starting node of the configuration; cpstart, which is output,
1166  * is the starting node of the nonwildcarded portion of the path.
1167  */
1168 static void
1169 add_wildcardentry(struct wildcardinfo **wcproot, struct config *cp,
1170 		struct node *np)
1171 {
1172 	struct wildcardinfo *wcpnew, *wcp;
1173 	struct node *np1, *np2;
1174 
1175 	/*
1176 	 * create entry for np
1177 	 */
1178 	wcpnew = MALLOC(sizeof (struct wildcardinfo));
1179 	bzero(wcpnew, sizeof (struct wildcardinfo));
1180 	wcpnew->nptop = np;
1181 	wcpnew->oldepname = np->u.event.epname;
1182 	wcpnew->s = WC_UNDERCONSTRUCTION;
1183 
1184 	wcpnew->p = MALLOC(sizeof (struct wildcardpath));
1185 	bzero(wcpnew->p, sizeof (struct wildcardpath));
1186 	wcpnew->p->cpstart = cp;
1187 	wcpnew->p->refcount = 1;
1188 
1189 	/*
1190 	 * search all completed entries for an epname whose first entry
1191 	 * matches.  note that NULL epnames are considered valid and can be
1192 	 * matched.
1193 	 */
1194 	np2 = wcpnew->oldepname;
1195 	for (wcp = *wcproot; wcp; wcp = wcp->next) {
1196 		ASSERT(wcp->s == WC_COMPLETE);
1197 
1198 		np1 = wcp->oldepname;
1199 		if ((np1 && np2 && np1->u.name.s == np2->u.name.s) ||
1200 		    (np1 == NULL && np2 == NULL)) {
1201 			/*
1202 			 * if we find a match in a completed entry, set
1203 			 * matchwc to indicate that we would like to match
1204 			 * it.  it is necessary to do this since wildcards
1205 			 * for each event are constructed independently.
1206 			 */
1207 			wcpnew->matchwc = wcp->p;
1208 
1209 			wcp->p->refcount++;
1210 			break;
1211 		}
1212 	}
1213 
1214 	wcpnew->next = *wcproot;
1215 	*wcproot = wcpnew;
1216 }
1217 
1218 static void
1219 delete_wildcardentry(struct wildcardinfo **wcproot)
1220 {
1221 	struct wildcardinfo *wcp;
1222 
1223 	wcp = *wcproot;
1224 	*wcproot = wcp->next;
1225 
1226 	switch (wcp->s) {
1227 	case WC_UNDERCONSTRUCTION:
1228 	case WC_COMPLETE:
1229 		if (wcp->matchwc != NULL)
1230 			wcp->matchwc->refcount--;
1231 
1232 		ASSERT(wcp->p->refcount == 1);
1233 		tree_free(wcp->p->ewname);
1234 		FREE(wcp->p);
1235 		break;
1236 
1237 	default:
1238 		out(O_DIE, "deletewc: invalid status");
1239 		break;
1240 	}
1241 
1242 	FREE(wcp);
1243 }
1244 
1245 /*
1246  * vmatch -- find the next vertical expansion match in the config database
1247  *
1248  * this routine is called with three node pointers:
1249  *	 np -- the parse we're matching
1250  *	lnp -- the rest of the list we're currently working on
1251  *	anp -- the rest of the arrow we're currently working on
1252  *
1253  * the expansion matching happens via three types of recursion:
1254  *
1255  *	- when given an arrow, handle the left-side and then recursively
1256  *	  handle the right side (which might be another cascaded arrow).
1257  *
1258  *	- when handling one side of an arrow, recurse through the T_LIST
1259  *	  to get to each event (or just move on to the event if there
1260  *	  is a single event instead of a list)  since the arrow parse
1261  *	  trees recurse left, we actually start with the right-most
1262  *	  event list in the prop statement and work our way towards
1263  *	  the left-most event list.
1264  *
1265  *	- when handling an event, recurse down each component of the
1266  *	  pathname, matching in the config database and recording the
1267  *	  matches in the explicit iterator dictionary as we go.
1268  *
1269  * when the bottom of this matching recursion is met, meaning we have
1270  * set the "cp" pointers on all the names in the entire statement,
1271  * we call hmatch() which does it's own recursion to handle horizontal
1272  * expandsion and then call generate() to generate nodes, bubbles, and
1273  * arrows in the instance tree.  generate() looks at the cp pointers to
1274  * see what instance numbers were matched in the configuration database.
1275  *
1276  * when horizontal expansion appears, vmatch() finds only the first match
1277  * and hmatch() then takes the horizontal expansion through all the other
1278  * matches when generating the arrows in the instance tree.
1279  *
1280  * the "infop" passed down through the recursion contains a dictionary
1281  * of the explicit iterators (all the implicit iterators have been converted
1282  * to explicit iterators when the parse tree was created by tree.c), which
1283  * allows things like this to work correctly:
1284  *
1285  *	prop error.a@x[n]/y/z -> error.b@x/y[n]/z -> error.c@x/y/z[n];
1286  *
1287  * during the top level call, the explicit iterator "n" will match an
1288  * instance number in the config database, and the result will be recorded
1289  * in the explicit iterator dictionary and passed down via "infop".  so
1290  * when the recursive call tries to match y[n] in the config database, it
1291  * will only match the same instance number as x[n] did since the dictionary
1292  * is consulted to see if "n" took on a value already.
1293  *
1294  * at any point during the recursion, match*() can return to indicate
1295  * a match was not found in the config database and that the caller should
1296  * move on to the next potential match, if any.
1297  *
1298  * constraints are completely ignored by match(), so the statement:
1299  *
1300  *	prop error.a@x[n] -> error.b@x[n] {n != 0};
1301  *
1302  * might very well match x[0] if it appears in the config database.  it
1303  * is the generate() routine that takes that match and then decides what
1304  * arrow, if any, should be generated in the instance tree.  generate()
1305  * looks at the explicit iterator dictionary to get values like "n" in
1306  * the above example so that it can evaluate constraints.
1307  *
1308  */
1309 static void
1310 vmatch(struct info *infop, struct node *np, struct node *lnp,
1311     struct node *anp, struct wildcardinfo **wcproot)
1312 {
1313 	if (np == NULL) {
1314 		if (lnp)
1315 			vmatch(infop, lnp, NULL, anp, wcproot);
1316 		else if (anp)
1317 			vmatch(infop, anp, NULL, NULL, wcproot);
1318 		else {
1319 			struct node *src;
1320 			struct node *dst;
1321 
1322 			/* end of vertical match recursion */
1323 			outfl(O_ALTFP|O_VERB3|O_NONL,
1324 			    infop->anp->file, infop->anp->line, "vmatch: ");
1325 			ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, infop->anp);
1326 			out(O_ALTFP|O_VERB3, NULL);
1327 
1328 			generate_nork(
1329 			    itree_np2nork(infop->anp->u.arrow.nnp),
1330 			    itree_np2nork(infop->anp->u.arrow.knp));
1331 			dst = infop->anp->u.arrow.rhs;
1332 			src = infop->anp->u.arrow.lhs;
1333 			for (;;) {
1334 				generate_new();	/* new set of arrows */
1335 				if (src->t == T_ARROW) {
1336 					hmatch(infop, src->u.arrow.rhs, dst);
1337 					generate_nork(
1338 					    itree_np2nork(src->u.arrow.nnp),
1339 					    itree_np2nork(src->u.arrow.knp));
1340 					dst = src->u.arrow.rhs;
1341 					src = src->u.arrow.lhs;
1342 				} else {
1343 					hmatch(infop, src, dst);
1344 					break;
1345 				}
1346 			}
1347 		}
1348 		return;
1349 	}
1350 
1351 	switch (np->t) {
1352 	case T_EVENT: {
1353 		add_wildcardentry(wcproot, config_child(infop->croot), np);
1354 		vmatch_event(infop, config_child(infop->croot),
1355 			    np->u.event.epname, lnp, anp, wcproot, WA_ALL);
1356 		delete_wildcardentry(wcproot);
1357 		break;
1358 	}
1359 	case T_LIST:
1360 		ASSERT(lnp == NULL);
1361 		vmatch(infop, np->u.expr.right, np->u.expr.left, anp, wcproot);
1362 		break;
1363 
1364 	case T_ARROW:
1365 		ASSERT(lnp == NULL && anp == NULL);
1366 		vmatch(infop, np->u.arrow.rhs, NULL, np->u.arrow.lhs, wcproot);
1367 		break;
1368 
1369 	default:
1370 		outfl(O_DIE, np->file, np->line,
1371 		    "vmatch: unexpected type: %s",
1372 		    ptree_nodetype2str(np->t));
1373 	}
1374 }
1375 
1376 static void
1377 cp_reset(struct node *np)
1378 {
1379 	if (np == NULL)
1380 		return;
1381 	switch (np->t) {
1382 	case T_NAME:
1383 		np->u.name.cp = NULL;
1384 		cp_reset(np->u.name.next);
1385 		break;
1386 
1387 	case T_LIST:
1388 		cp_reset(np->u.expr.left);
1389 		cp_reset(np->u.expr.right);
1390 		break;
1391 
1392 	case T_ARROW:
1393 		cp_reset(np->u.arrow.lhs);
1394 		cp_reset(np->u.arrow.rhs);
1395 		break;
1396 
1397 	case T_EVENT:
1398 		cp_reset(np->u.event.epname);
1399 		break;
1400 	}
1401 }
1402 
1403 /*
1404  * itree_create -- apply the current config to the current parse tree
1405  *
1406  * returns a lut mapping fully-instance-qualified names to struct events.
1407  *
1408  */
1409 struct lut *
1410 itree_create(struct config *croot)
1411 {
1412 	struct lut *retval;
1413 	struct node *propnp;
1414 
1415 	Ninfo.lut = NULL;
1416 	Ninfo.croot = croot;
1417 	for (propnp = Props; propnp; propnp = propnp->u.stmt.next) {
1418 		struct node *anp = propnp->u.stmt.np;
1419 		struct wildcardinfo *wcproot = NULL;
1420 
1421 		ASSERTeq(anp->t, T_ARROW, ptree_nodetype2str);
1422 
1423 		Ninfo.anp = anp;
1424 		Ninfo.ex = NULL;
1425 
1426 		generate_arrownp(anp);
1427 		vmatch(&Ninfo, anp, NULL, NULL, &wcproot);
1428 
1429 		if (Ninfo.ex) {
1430 			lut_free(Ninfo.ex, iterinfo_destructor, NULL);
1431 			Ninfo.ex = NULL;
1432 		}
1433 		ASSERT(wcproot == NULL);
1434 		cp_reset(anp);
1435 	}
1436 
1437 	retval = Ninfo.lut;
1438 	Ninfo.lut = NULL;
1439 	return (retval);
1440 }
1441 
1442 void
1443 itree_free(struct lut *lutp)
1444 {
1445 	lut_free(lutp, itree_destructor, NULL);
1446 }
1447 
1448 void
1449 itree_prune(struct lut *lutp)
1450 {
1451 	lut_walk(lutp, itree_pruner, NULL);
1452 }
1453 
1454 int
1455 itree_nameinstancecmp(struct node *np1, struct node *np2)
1456 {
1457 	int np1type = (int)np1->u.name.t;
1458 	int np2type = (int)np2->u.name.t;
1459 	int num1;
1460 	int num2;
1461 
1462 	while (np1 && np2 && np1->u.name.s == np2->u.name.s) {
1463 		if (np1->u.name.next != NULL && np2->u.name.next != NULL) {
1464 			if (np1->u.name.cp != NULL) {
1465 				config_getcompname(np1->u.name.cp, NULL, &num1);
1466 			} else {
1467 				ASSERT(np1->u.name.child != NULL);
1468 				ASSERT(np1->u.name.child->t == T_NUM);
1469 				num1 = (int)np1->u.name.child->u.ull;
1470 			}
1471 
1472 			if (np2->u.name.cp != NULL) {
1473 				config_getcompname(np2->u.name.cp, NULL, &num2);
1474 			} else {
1475 				ASSERT(np2->u.name.child != NULL);
1476 				ASSERT(np2->u.name.child->t == T_NUM);
1477 				num2 = (int)np2->u.name.child->u.ull;
1478 			}
1479 
1480 			if (num1 != num2)
1481 				return (num1 - num2);
1482 		}
1483 
1484 		np1 = np1->u.name.next;
1485 		np2 = np2->u.name.next;
1486 	}
1487 	if (np1 == NULL)
1488 		if (np2 == NULL)
1489 			return (np1type - np2type);
1490 		else
1491 			return (-1);
1492 	else if (np2 == NULL)
1493 		return (1);
1494 	else
1495 		return (strcmp(np1->u.name.s, np2->u.name.s));
1496 }
1497 
1498 void
1499 itree_pevent_brief(int flags, struct event *ep)
1500 {
1501 	ASSERT(ep != NULL);
1502 	ASSERT(ep->enode != NULL);
1503 	ASSERT(ep->ipp != NULL);
1504 
1505 	ipath_print(flags, ep->enode->u.event.ename->u.name.s, ep->ipp);
1506 }
1507 
1508 /*ARGSUSED*/
1509 static void
1510 itree_pevent(struct event *lhs, struct event *ep, void *arg)
1511 {
1512 	struct plut_wlk_data propd;
1513 	struct bubble *bp;
1514 	int flags = (int)arg;
1515 
1516 	itree_pevent_brief(flags, ep);
1517 	if (ep->t == N_EREPORT)
1518 		out(flags, " (count %d)", ep->count);
1519 	else
1520 		out(flags, NULL);
1521 
1522 	if (ep->props) {
1523 		propd.flags = flags;
1524 		propd.first = 1;
1525 		out(flags, "Properties:");
1526 		lut_walk(ep->props, ptree_plut, (void *)&propd);
1527 	}
1528 
1529 	for (bp = itree_next_bubble(ep, NULL); bp;
1530 	    bp = itree_next_bubble(ep, bp)) {
1531 		/* Print only TO bubbles in this loop */
1532 		if (bp->t != B_TO)
1533 			continue;
1534 		itree_pbubble(flags, bp);
1535 	}
1536 
1537 	for (bp = itree_next_bubble(ep, NULL); bp;
1538 	    bp = itree_next_bubble(ep, bp)) {
1539 		/* Print only INHIBIT bubbles in this loop */
1540 		if (bp->t != B_INHIBIT)
1541 			continue;
1542 		itree_pbubble(flags, bp);
1543 	}
1544 
1545 	for (bp = itree_next_bubble(ep, NULL); bp;
1546 	    bp = itree_next_bubble(ep, bp)) {
1547 		/* Print only FROM bubbles in this loop */
1548 		if (bp->t != B_FROM)
1549 			continue;
1550 		itree_pbubble(flags, bp);
1551 	}
1552 }
1553 
1554 static void
1555 itree_pbubble(int flags, struct bubble *bp)
1556 {
1557 	struct constraintlist *cp;
1558 	struct arrowlist *ap;
1559 
1560 	ASSERT(bp != NULL);
1561 
1562 	out(flags|O_NONL, "   ");
1563 	if (bp->mark)
1564 		out(flags|O_NONL, "*");
1565 	else
1566 		out(flags|O_NONL, " ");
1567 	if (bp->t == B_FROM)
1568 		out(flags|O_NONL, "N=%d to:", bp->nork);
1569 	else if (bp->t == B_TO)
1570 		out(flags|O_NONL, "K=%d from:", bp->nork);
1571 	else
1572 		out(flags|O_NONL, "K=%d masked from:", bp->nork);
1573 
1574 	if (bp->t == B_TO || bp->t == B_INHIBIT) {
1575 		for (ap = itree_next_arrow(bp, NULL); ap;
1576 		    ap = itree_next_arrow(bp, ap)) {
1577 			ASSERT(ap->arrowp->head == bp);
1578 			ASSERT(ap->arrowp->tail != NULL);
1579 			ASSERT(ap->arrowp->tail->myevent != NULL);
1580 			out(flags|O_NONL, " ");
1581 			itree_pevent_brief(flags, ap->arrowp->tail->myevent);
1582 		}
1583 		out(flags, NULL);
1584 		return;
1585 	}
1586 
1587 	for (ap = itree_next_arrow(bp, NULL); ap;
1588 	    ap = itree_next_arrow(bp, ap)) {
1589 		ASSERT(ap->arrowp->tail == bp);
1590 		ASSERT(ap->arrowp->head != NULL);
1591 		ASSERT(ap->arrowp->head->myevent != NULL);
1592 
1593 		out(flags|O_NONL, " ");
1594 		itree_pevent_brief(flags, ap->arrowp->head->myevent);
1595 
1596 		out(flags|O_NONL, " ");
1597 		ptree_timeval(flags, &ap->arrowp->mindelay);
1598 		out(flags|O_NONL, ",");
1599 		ptree_timeval(flags, &ap->arrowp->maxdelay);
1600 
1601 		/* Display anything from the propogation node? */
1602 		out(O_VERB3|O_NONL, " <%s:%d>",
1603 		    ap->arrowp->pnode->file, ap->arrowp->pnode->line);
1604 
1605 		if (itree_next_constraint(ap->arrowp, NULL))
1606 			out(flags|O_NONL, " {");
1607 
1608 		for (cp = itree_next_constraint(ap->arrowp, NULL); cp;
1609 		    cp = itree_next_constraint(ap->arrowp, cp)) {
1610 			ptree(flags, cp->cnode, 1, 0);
1611 			if (itree_next_constraint(ap->arrowp, cp))
1612 				out(flags|O_NONL, ", ");
1613 		}
1614 
1615 		if (itree_next_constraint(ap->arrowp, NULL))
1616 			out(flags|O_NONL, "}");
1617 	}
1618 	out(flags, NULL);
1619 }
1620 
1621 void
1622 itree_ptree(int flags, struct lut *itp)
1623 {
1624 	lut_walk(itp, (lut_cb)itree_pevent, (void *)flags);
1625 }
1626 
1627 /*ARGSUSED*/
1628 static void
1629 itree_destructor(void *left, void *right, void *arg)
1630 {
1631 	struct event *ep = (struct event *)right;
1632 	struct bubble *nextbub, *bub;
1633 
1634 	/* Free the properties */
1635 	if (ep->props)
1636 		lut_free(ep->props, instances_destructor, NULL);
1637 
1638 	/* Free the payload properties */
1639 	if (ep->payloadprops)
1640 		lut_free(ep->payloadprops, payloadprops_destructor, NULL);
1641 
1642 	/* Free my bubbles */
1643 	for (bub = ep->bubbles; bub != NULL; ) {
1644 		nextbub = bub->next;
1645 		/*
1646 		 * Free arrows if they are FROM me.  Free arrowlists on
1647 		 * other types of bubbles (but not the attached arrows,
1648 		 * which will be freed when we free the originating
1649 		 * bubble.
1650 		 */
1651 		if (bub->t == B_FROM)
1652 			itree_free_arrowlists(bub, 1);
1653 		else
1654 			itree_free_arrowlists(bub, 0);
1655 		itree_free_bubble(bub);
1656 		bub = nextbub;
1657 	}
1658 
1659 	if (ep->nvp != NULL)
1660 		nvlist_free(ep->nvp);
1661 	bzero(ep, sizeof (*ep));
1662 	FREE(ep);
1663 }
1664 
1665 /*ARGSUSED*/
1666 static void
1667 itree_pruner(void *left, void *right, void *arg)
1668 {
1669 	struct event *ep = (struct event *)right;
1670 	struct bubble *nextbub, *bub;
1671 
1672 	if (ep->keep_in_tree)
1673 		return;
1674 
1675 	/* Free the properties */
1676 	lut_free(ep->props, instances_destructor, NULL);
1677 
1678 	/* Free the payload properties */
1679 	lut_free(ep->payloadprops, payloadprops_destructor, NULL);
1680 
1681 	/* Free my bubbles */
1682 	for (bub = ep->bubbles; bub != NULL; ) {
1683 		nextbub = bub->next;
1684 		itree_prune_arrowlists(bub);
1685 		itree_free_bubble(bub);
1686 		bub = nextbub;
1687 	}
1688 
1689 	if (ep->nvp != NULL)
1690 		nvlist_free(ep->nvp);
1691 	ep->props = NULL;
1692 	ep->payloadprops = NULL;
1693 	ep->bubbles = NULL;
1694 	ep->nvp = NULL;
1695 }
1696 
1697 static void
1698 itree_free_bubble(struct bubble *freeme)
1699 {
1700 	bzero(freeme, sizeof (*freeme));
1701 	FREE(freeme);
1702 }
1703 
1704 static struct bubble *
1705 itree_add_bubble(struct event *eventp, enum bubbletype btype, int nork, int gen)
1706 {
1707 	struct bubble *prev = NULL;
1708 	struct bubble *curr;
1709 	struct bubble *newb;
1710 
1711 	/* Use existing bubbles as appropriate when possible */
1712 	for (curr = eventp->bubbles;
1713 	    curr != NULL;
1714 	    prev = curr, curr = curr->next) {
1715 		if (btype == B_TO && curr->t == B_TO) {
1716 			/* see if an existing "to" bubble works for us */
1717 			if (gen == curr->gen)
1718 				return (curr);	/* matched gen number */
1719 			else if (nork == 1 && curr->nork == 1) {
1720 				curr->gen = gen;
1721 				return (curr);	/* coalesce K==1 bubbles */
1722 			}
1723 		} else if (btype == B_FROM && curr->t == B_FROM) {
1724 			/* see if an existing "from" bubble works for us */
1725 			if ((nork == N_IS_ALL && curr->nork == N_IS_ALL) ||
1726 			    (nork == 0 && curr->nork == 0))
1727 				return (curr);
1728 		}
1729 	}
1730 
1731 	newb = MALLOC(sizeof (struct bubble));
1732 	newb->next = NULL;
1733 	newb->t = btype;
1734 	newb->myevent = eventp;
1735 	newb->nork = nork;
1736 	newb->mark = 0;
1737 	newb->gen = gen;
1738 	newb->arrows = NULL;
1739 
1740 	if (prev == NULL)
1741 		eventp->bubbles = newb;
1742 	else
1743 		prev->next = newb;
1744 
1745 	return (newb);
1746 }
1747 
1748 struct bubble *
1749 itree_next_bubble(struct event *eventp, struct bubble *last)
1750 {
1751 	struct bubble *next;
1752 
1753 	for (;;) {
1754 		if (last != NULL)
1755 			next = last->next;
1756 		else
1757 			next = eventp->bubbles;
1758 
1759 		if (next == NULL || next->arrows != NULL)
1760 			return (next);
1761 
1762 		/* bubble was empty, skip it */
1763 		last = next;
1764 	}
1765 }
1766 
1767 static void
1768 add_arrow(struct bubble *bp, struct arrow *ap)
1769 {
1770 	struct arrowlist *prev = NULL;
1771 	struct arrowlist *curr;
1772 	struct arrowlist *newal;
1773 
1774 	newal = MALLOC(sizeof (struct arrowlist));
1775 	bzero(newal, sizeof (struct arrowlist));
1776 	newal->arrowp = ap;
1777 
1778 	curr = itree_next_arrow(bp, NULL);
1779 	while (curr != NULL) {
1780 		prev = curr;
1781 		curr = itree_next_arrow(bp, curr);
1782 	}
1783 
1784 	if (prev == NULL)
1785 		bp->arrows = newal;
1786 	else
1787 		prev->next = newal;
1788 }
1789 
1790 static struct arrow *
1791 itree_add_arrow(struct bubble *frombubblep, struct bubble *tobubblep,
1792     struct node *apnode, struct node *fromevent, struct node *toevent,
1793     struct lut *ex)
1794 {
1795 	struct arrow *newa;
1796 
1797 	ASSERTeq(frombubblep->t, B_FROM, itree_bubbletype2str);
1798 	ASSERTinfo(tobubblep->t == B_TO || tobubblep->t == B_INHIBIT,
1799 	    itree_bubbletype2str(tobubblep->t));
1800 	newa = MALLOC(sizeof (struct arrow));
1801 	bzero(newa, sizeof (struct arrow));
1802 	newa->tail = frombubblep;
1803 	newa->head = tobubblep;
1804 	newa->pnode = apnode;
1805 	newa->constraints = NULL;
1806 
1807 	/*
1808 	 *  Set default delays, then try to re-set them from
1809 	 *  any within() constraints.
1810 	 */
1811 	newa->mindelay = newa->maxdelay = 0ULL;
1812 	if (itree_set_arrow_traits(newa, fromevent, toevent, ex) == 0) {
1813 		FREE(newa);
1814 		return (NULL);
1815 	}
1816 
1817 	add_arrow(frombubblep, newa);
1818 	add_arrow(tobubblep, newa);
1819 	return (newa);
1820 }
1821 
1822 /* returns false if traits show that arrow should not be added after all */
1823 static int
1824 itree_set_arrow_traits(struct arrow *ap, struct node *fromev,
1825     struct node *toev, struct lut *ex)
1826 {
1827 	struct node *epnames[] = { NULL, NULL, NULL };
1828 	struct node *newc = NULL;
1829 
1830 	ASSERTeq(fromev->t, T_EVENT, ptree_nodetype2str);
1831 	ASSERTeq(toev->t, T_EVENT, ptree_nodetype2str);
1832 
1833 	/*
1834 	 * search for the within values first on the declaration of
1835 	 * the destination event, and then on the prop.  this allows
1836 	 * one to specify a "default" within by putting it on the
1837 	 * declaration,  but then allow overriding on the prop statement.
1838 	 */
1839 	arrow_add_within(ap, toev->u.event.declp->u.stmt.np->u.event.eexprlist);
1840 	arrow_add_within(ap, toev->u.event.eexprlist);
1841 
1842 	/*
1843 	 * handle any global constraints inherited from the
1844 	 * "fromev" event's declaration
1845 	 */
1846 	ASSERT(fromev->u.event.declp != NULL);
1847 	ASSERT(fromev->u.event.declp->u.stmt.np != NULL);
1848 
1849 #ifdef	notdef
1850 	/* XXX not quite ready to evaluate constraints from decls yet */
1851 	if (fromev->u.event.declp->u.stmt.np->u.event.eexprlist)
1852 		(void) itree_add_constraint(ap,
1853 		    fromev->u.event.declp->u.stmt.np->u.event.eexprlist);
1854 #endif	/* notdef */
1855 
1856 	/* handle constraints on the from event in the prop statement */
1857 	epnames[0] = fromev->u.event.epname;
1858 	epnames[1] = toev->u.event.epname;
1859 	if (eval_potential(fromev->u.event.eexprlist, ex, epnames, &newc,
1860 	    Ninfo.croot) == 0)
1861 		return (0);		/* constraint disallows arrow */
1862 
1863 	/*
1864 	 * handle any global constraints inherited from the
1865 	 * "toev" event's declaration
1866 	 */
1867 	ASSERT(toev->u.event.declp != NULL);
1868 	ASSERT(toev->u.event.declp->u.stmt.np != NULL);
1869 
1870 #ifdef	notdef
1871 	/* XXX not quite ready to evaluate constraints from decls yet */
1872 	if (toev->u.event.declp->u.stmt.np->u.event.eexprlist)
1873 		(void) itree_add_constraint(ap,
1874 		    toev->u.event.declp->u.stmt.np->u.event.eexprlist);
1875 #endif	/* notdef */
1876 
1877 	/* handle constraints on the to event in the prop statement */
1878 	epnames[0] = toev->u.event.epname;
1879 	epnames[1] = fromev->u.event.epname;
1880 	if (eval_potential(toev->u.event.eexprlist, ex, epnames, &newc,
1881 	    Ninfo.croot) == 0) {
1882 		if (newc != NULL)
1883 			tree_free(newc);
1884 		return (0);		/* constraint disallows arrow */
1885 	}
1886 
1887 	/* if we came up with any deferred constraints, add them to arrow */
1888 	if (newc != NULL)
1889 		(void) itree_add_constraint(ap, iexpr(newc));
1890 
1891 	return (1);	/* constraints allow arrow */
1892 }
1893 
1894 /*
1895  * Set within() constraint.  If the constraint were set multiple times,
1896  * the last one would "win".
1897  */
1898 static void
1899 arrow_add_within(struct arrow *ap, struct node *xpr)
1900 {
1901 	struct node *arglist;
1902 
1903 	/* end of expressions list */
1904 	if (xpr == NULL)
1905 		return;
1906 
1907 	switch (xpr->t) {
1908 	case T_LIST:
1909 		arrow_add_within(ap, xpr->u.expr.left);
1910 		arrow_add_within(ap, xpr->u.expr.right);
1911 		return;
1912 	case T_FUNC:
1913 		if (xpr->u.func.s != L_within)
1914 			return;
1915 		arglist = xpr->u.func.arglist;
1916 		switch (arglist->t) {
1917 		case T_TIMEVAL:
1918 			ap->mindelay = 0;
1919 			ap->maxdelay = arglist->u.ull;
1920 			break;
1921 		case T_NAME:
1922 			ASSERT(arglist->u.name.s == L_infinity);
1923 			ap->mindelay = 0;
1924 			ap->maxdelay = TIMEVAL_EVENTUALLY;
1925 			break;
1926 		case T_LIST:
1927 			ASSERT(arglist->u.expr.left->t == T_TIMEVAL);
1928 			ap->mindelay = arglist->u.expr.left->u.ull;
1929 			switch (arglist->u.expr.right->t) {
1930 			case T_TIMEVAL:
1931 				ap->maxdelay = arglist->u.ull;
1932 				break;
1933 			case T_NAME:
1934 				ASSERT(arglist->u.expr.right->u.name.s ==
1935 				    L_infinity);
1936 				ap->maxdelay = TIMEVAL_EVENTUALLY;
1937 				break;
1938 			default:
1939 				out(O_DIE, "within: unexpected 2nd arg type");
1940 			}
1941 			break;
1942 		default:
1943 			out(O_DIE, "within: unexpected 1st arg type");
1944 		}
1945 		break;
1946 	default:
1947 		return;
1948 	}
1949 }
1950 
1951 static void
1952 itree_free_arrowlists(struct bubble *bubp, int arrows_too)
1953 {
1954 	struct arrowlist *al, *nal;
1955 
1956 	al = bubp->arrows;
1957 	while (al != NULL) {
1958 		nal = al->next;
1959 		if (arrows_too) {
1960 			itree_free_constraints(al->arrowp);
1961 			bzero(al->arrowp, sizeof (struct arrow));
1962 			FREE(al->arrowp);
1963 		}
1964 		bzero(al, sizeof (*al));
1965 		FREE(al);
1966 		al = nal;
1967 	}
1968 }
1969 
1970 static void
1971 itree_delete_arrow(struct bubble *bubp, struct arrow *arrow)
1972 {
1973 	struct arrowlist *al, *oal;
1974 
1975 	al = bubp->arrows;
1976 	if (al->arrowp == arrow) {
1977 		bubp->arrows = al->next;
1978 		bzero(al, sizeof (*al));
1979 		FREE(al);
1980 		return;
1981 	}
1982 	while (al != NULL) {
1983 		oal = al;
1984 		al = al->next;
1985 		ASSERT(al != NULL);
1986 		if (al->arrowp == arrow) {
1987 			oal->next = al->next;
1988 			bzero(al, sizeof (*al));
1989 			FREE(al);
1990 			return;
1991 		}
1992 	}
1993 }
1994 
1995 static void
1996 itree_prune_arrowlists(struct bubble *bubp)
1997 {
1998 	struct arrowlist *al, *nal;
1999 
2000 	al = bubp->arrows;
2001 	while (al != NULL) {
2002 		nal = al->next;
2003 		if (bubp->t == B_FROM)
2004 			itree_delete_arrow(al->arrowp->head, al->arrowp);
2005 		else
2006 			itree_delete_arrow(al->arrowp->tail, al->arrowp);
2007 		itree_free_constraints(al->arrowp);
2008 		bzero(al->arrowp, sizeof (struct arrow));
2009 		FREE(al->arrowp);
2010 		bzero(al, sizeof (*al));
2011 		FREE(al);
2012 		al = nal;
2013 	}
2014 }
2015 
2016 struct arrowlist *
2017 itree_next_arrow(struct bubble *bubble, struct arrowlist *last)
2018 {
2019 	struct arrowlist *next;
2020 
2021 	if (last != NULL)
2022 		next = last->next;
2023 	else
2024 		next = bubble->arrows;
2025 	return (next);
2026 }
2027 
2028 static struct constraintlist *
2029 itree_add_constraint(struct arrow *arrowp, struct node *c)
2030 {
2031 	struct constraintlist *prev = NULL;
2032 	struct constraintlist *curr;
2033 	struct constraintlist *newc;
2034 
2035 	for (curr = arrowp->constraints;
2036 	    curr != NULL;
2037 	    prev = curr, curr = curr->next);
2038 
2039 	newc = MALLOC(sizeof (struct constraintlist));
2040 	newc->next = NULL;
2041 	newc->cnode = c;
2042 
2043 	if (prev == NULL)
2044 		arrowp->constraints = newc;
2045 	else
2046 		prev->next = newc;
2047 
2048 	return (newc);
2049 }
2050 
2051 struct constraintlist *
2052 itree_next_constraint(struct arrow *arrowp, struct constraintlist *last)
2053 {
2054 	struct constraintlist *next;
2055 
2056 	if (last != NULL)
2057 		next = last->next;
2058 	else
2059 		next = arrowp->constraints;
2060 	return (next);
2061 }
2062 
2063 static void
2064 itree_free_constraints(struct arrow *ap)
2065 {
2066 	struct constraintlist *cl, *ncl;
2067 
2068 	cl = ap->constraints;
2069 	while (cl != NULL) {
2070 		ncl = cl->next;
2071 		ASSERT(cl->cnode != NULL);
2072 		if (!iexpr_cached(cl->cnode))
2073 			tree_free(cl->cnode);
2074 		else
2075 			iexpr_free(cl->cnode);
2076 		bzero(cl, sizeof (*cl));
2077 		FREE(cl);
2078 		cl = ncl;
2079 	}
2080 }
2081 
2082 const char *
2083 itree_bubbletype2str(enum bubbletype t)
2084 {
2085 	static char buf[100];
2086 
2087 	switch (t) {
2088 	case B_FROM:	return L_from;
2089 	case B_TO:	return L_to;
2090 	case B_INHIBIT:	return L_inhibit;
2091 	default:
2092 		(void) sprintf(buf, "[unexpected bubbletype: %d]", t);
2093 		return (buf);
2094 	}
2095 }
2096 
2097 /*
2098  * itree_fini -- clean up any half-built itrees
2099  */
2100 void
2101 itree_fini(void)
2102 {
2103 	if (Ninfo.lut != NULL) {
2104 		itree_free(Ninfo.lut);
2105 		Ninfo.lut = NULL;
2106 	}
2107 	if (Ninfo.ex) {
2108 		lut_free(Ninfo.ex, iterinfo_destructor, NULL);
2109 		Ninfo.ex = NULL;
2110 	}
2111 }
2112