xref: /illumos-gate/usr/src/cmd/fm/modules/common/eversholt/itree.c (revision 814a60b13c0ad90e5d2edfd29a7a84bbf416cc1a)
1 /*
2  * CDDL HEADER START
3  *
4  * The contents of this file are subject to the terms of the
5  * Common Development and Distribution License, Version 1.0 only
6  * (the "License").  You may not use this file except in compliance
7  * with the License.
8  *
9  * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10  * or http://www.opensolaris.org/os/licensing.
11  * See the License for the specific language governing permissions
12  * and limitations under the License.
13  *
14  * When distributing Covered Code, include this CDDL HEADER in each
15  * file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16  * If applicable, add the following below this CDDL HEADER, with the
17  * fields enclosed by brackets "[]" replaced with your own identifying
18  * information: Portions Copyright [yyyy] [name of copyright owner]
19  *
20  * CDDL HEADER END
21  */
22 /*
23  * Copyright 2005 Sun Microsystems, Inc.  All rights reserved.
24  * Use is subject to license terms.
25  *
26  * itree.c -- instance tree creation and manipulation
27  *
28  * this module provides the instance tree
29  */
30 
31 #pragma ident	"%Z%%M%	%I%	%E% SMI"
32 
33 #include <stdio.h>
34 #include <ctype.h>
35 #include <string.h>
36 #include <strings.h>
37 #include "alloc.h"
38 #include "out.h"
39 #include "stable.h"
40 #include "literals.h"
41 #include "lut.h"
42 #include "tree.h"
43 #include "ptree.h"
44 #include "itree.h"
45 #include "ipath.h"
46 #include "eval.h"
47 #include "config.h"
48 
49 /*
50  * struct info contains the state we keep when expanding a prop statement
51  * as part of constructing the instance tree.  state kept in struct info
52  * is the non-recursive stuff -- the stuff that doesn't need to be on
53  * the stack.  the rest of the state that is passed between all the
54  * mutually recursive functions, is required to be on the stack since
55  * we need to backtrack and recurse as we do the instance tree construction.
56  */
57 struct info {
58 	struct lut *lut;
59 	struct node *anp;	/* arrow np */
60 	struct lut *ex;		/* dictionary of explicit iterators */
61 	struct config *croot;
62 } Ninfo;
63 
64 /*
65  * struct wildcardinfo is used to track wildcarded portions of paths.
66  *
67  * for example, if the epname of an event is "c/d" and the path "a/b/c/d"
68  * exists, the wildcard path ewname is filled in with the path "a/b".  when
69  * matching is done, epname is temporarily replaced with the concatenation
70  * of ewname and epname.  cpstart is set to the (struct config *)
71  * corresponding to component "c".
72  *
73  * a linked list of these structs is used to track the expansion of each
74  * event node as it is processed in vmatch() --> vmatch_event() calls.
75  */
76 struct wildcardinfo {
77 	struct node *nptop;		/* event node fed to vmatch */
78 	struct node *oldepname;		/* epname without the wildcard part */
79 	enum status {
80 		WC_UNDEFINED,		/* struct is not yet initialized */
81 		WC_UNDERCONSTRUCTION,	/* wildcard path not yet done */
82 		WC_COMPLETE		/* wildcard path done and is in use */
83 	} s;
84 	struct wildcardpath {
85 		struct node *ewname;	/* wildcard path */
86 		struct config *cpstart;	/* starting cp node for oldepname */
87 		int refcount;		/* number of event nodes using this */
88 	} *p;
89 	struct wildcardpath *matchwc;	/* ptr to wc path to be matched */
90 	struct wildcardinfo *next;
91 };
92 
93 static void vmatch(struct info *infop, struct node *np,
94     struct node *lnp, struct node *anp, struct wildcardinfo **wcproot);
95 static void hmatch(struct info *infop, struct node *np, struct node *nextnp);
96 static void itree_pbubble(int flags, struct bubble *bp);
97 static void itree_destructor(void *left, void *right, void *arg);
98 static int itree_set_arrow_traits(struct arrow *ap, struct node *fromev,
99     struct node *toev, struct lut *ex);
100 static void itree_free_arrowlists(struct bubble *bubp, int arrows_too);
101 static void arrow_add_within(struct arrow *ap, struct node *xpr);
102 static struct arrow *itree_add_arrow(struct bubble *frombubblep,
103     struct bubble *tobubblep, struct node *apnode, struct node *fromevent,
104     struct node *toevent, struct lut *ex);
105 static struct constraintlist *itree_add_constraint(struct arrow *arrowp,
106     struct node *c);
107 static struct bubble *itree_add_bubble(struct event *eventp,
108     enum bubbletype btype, int nork, int gen);
109 static void itree_free_bubble(struct bubble *freeme);
110 static void itree_free_constraints(struct arrow *ap);
111 
112 /*
113  * the following struct contains the state we build up during
114  * vertical and horizontal expansion so that generate()
115  * has everything it needs to construct the appropriate arrows.
116  * after setting up the state by calling:
117  *	generate_arrownp()
118  *	generate_nork()
119  *	generate_new()
120  *	generate_from()
121  *	generate_to()
122  * the actual arrow generation is done by calling:
123  *	generate()
124  */
125 static struct {
126 	int generation;		/* generation number of arrow set */
127 	struct node *arrownp;	/* top-level parse tree for arrow */
128 	int n;			/* n value associated with arrow */
129 	int k;			/* k value associated with arrow */
130 	struct node *fromnp;	/* left-hand-side event in parse tree */
131 	struct node *tonp;	/* right-hand-side event in parse tree */
132 	struct event *frome;	/* left-hand-side event in instance tree */
133 	struct event *toe;	/* right-hand-side event in instance tree */
134 	struct bubble *frombp;	/* bubble arrow comes from */
135 	struct bubble *tobp;	/* bubble arrow goes to */
136 } G;
137 
138 static void
139 generate_arrownp(struct node *arrownp)
140 {
141 	G.arrownp = arrownp;
142 }
143 
144 static void
145 generate_nork(int n, int k)
146 {
147 	G.n = n;
148 	G.k = k;
149 }
150 
151 static void
152 generate_new(void)
153 {
154 	G.generation++;
155 }
156 
157 static void
158 generate_from(struct node *fromeventnp, struct event *fromevent)
159 {
160 	G.fromnp = fromeventnp;
161 	G.frome = fromevent;
162 
163 	out(O_ALTFP|O_VERB3|O_NONL, "from bubble on ");
164 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp);
165 	out(O_ALTFP|O_VERB3, NULL);
166 
167 	G.frombp = itree_add_bubble(G.frome, B_FROM, G.n, 0);
168 }
169 
170 static void
171 generate_to(struct node *toeventnp, struct event *toevent)
172 {
173 	G.tonp = toeventnp;
174 	G.toe = toevent;
175 
176 	out(O_ALTFP|O_VERB3|O_NONL, "to bubble (gen %d) on ", G.generation);
177 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp);
178 	out(O_ALTFP|O_VERB3, NULL);
179 
180 	G.tobp = itree_add_bubble(G.toe, B_TO, G.k, G.generation);
181 }
182 
183 static void
184 generate(struct lut *ex)
185 {
186 	ASSERT(G.arrownp != NULL);
187 	ASSERT(G.fromnp != NULL);
188 	ASSERT(G.frome != NULL);
189 	ASSERT(G.frombp != NULL);
190 	ASSERT(G.tonp != NULL);
191 	ASSERT(G.toe != NULL);
192 	ASSERT(G.tobp != NULL);
193 
194 	out(O_ALTFP|O_VERB3|O_NONL, "        Arrow \"");
195 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp);
196 	out(O_ALTFP|O_VERB3|O_NONL, "\" -> \"");
197 	ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp);
198 
199 	if (itree_add_arrow(G.frombp, G.tobp, G.arrownp,
200 	    G.fromnp, G.tonp, ex) == NULL) {
201 		out(O_ALTFP|O_VERB3, "\" (prevented by constraints)");
202 	} else {
203 		out(O_ALTFP|O_VERB3, "\"");
204 	}
205 }
206 
207 enum childnode_action {
208 	CN_NONE,
209 	CN_INSTANTIZE,
210 	CN_DUP
211 };
212 
213 static struct node *
214 tname_dup(struct node *namep, enum childnode_action act)
215 {
216 	struct node *retp = NULL;
217 	const char *file;
218 	int line;
219 
220 	if (namep == NULL)
221 		return (NULL);
222 
223 	file = namep->file;
224 	line = namep->line;
225 
226 	for (; namep != NULL; namep = namep->u.name.next) {
227 		struct node *newnp = newnode(T_NAME, file, line);
228 
229 		newnp->u.name.t = namep->u.name.t;
230 		newnp->u.name.s = namep->u.name.s;
231 		newnp->u.name.last = newnp;
232 		newnp->u.name.it = namep->u.name.it;
233 		newnp->u.name.cp = namep->u.name.cp;
234 
235 		if (act == CN_DUP) {
236 			struct node *npc;
237 
238 			npc = namep->u.name.child;
239 			if (npc != NULL) {
240 				switch (npc->t) {
241 				case T_NUM:
242 					newnp->u.name.child =
243 						newnode(T_NUM, file, line);
244 					newnp->u.name.child->u.ull =
245 						npc->u.ull;
246 					break;
247 				case T_NAME:
248 					newnp->u.name.child =
249 						tree_name(npc->u.name.s,
250 							npc->u.name.it,
251 							file, line);
252 					break;
253 				default:
254 					out(O_DIE, "tname_dup: "
255 					    "invalid child type %s",
256 					    ptree_nodetype2str(npc->t));
257 				}
258 			}
259 		} else if (act == CN_INSTANTIZE) {
260 			newnp->u.name.child = newnode(T_NUM, file, line);
261 
262 			if (namep->u.name.child == NULL ||
263 			    namep->u.name.child->t != T_NUM) {
264 				int inum;
265 
266 				ASSERT(newnp->u.name.cp != NULL);
267 				config_getcompname(newnp->u.name.cp,
268 						    NULL, &inum);
269 				newnp->u.name.child->u.ull =
270 					(unsigned long long)inum;
271 			} else {
272 				newnp->u.name.child->u.ull =
273 					namep->u.name.child->u.ull;
274 			}
275 		}
276 
277 		if (retp == NULL) {
278 			retp = newnp;
279 		} else {
280 			retp->u.name.last->u.name.next = newnp;
281 			retp->u.name.last = newnp;
282 		}
283 	}
284 
285 	return (retp);
286 }
287 
288 struct prop_wlk_data {
289 	struct lut *props;
290 	struct node *epname;
291 };
292 
293 static struct lut *props2instance(struct node *, struct node *);
294 
295 /*
296  * let oldepname be a subset of epname.  return the subsection of epname
297  * that ends with oldepname.  make each component in the path explicitly
298  * instanced (i.e., with a T_NUM child).
299  */
300 static struct node *
301 tname_dup_to_epname(struct node *oldepname, struct node *epname)
302 {
303 	struct node *npref, *npend, *np1, *np2;
304 	struct node *ret = NULL;
305 	int foundmatch = 0;
306 
307 	if (epname == NULL)
308 		return (NULL);
309 
310 	/*
311 	 * search for the longest path in epname which contains
312 	 * oldnode->u.event.epname.  set npend to point to just past the
313 	 * end of this path.
314 	 */
315 	npend = NULL;
316 	for (npref = epname; npref; npref = npref->u.name.next) {
317 		if (npref->u.name.s == oldepname->u.name.s) {
318 			for (np1 = npref, np2 = oldepname;
319 			    np1 != NULL && np2 != NULL;
320 			    np1 = np1->u.name.next, np2 = np2->u.name.next) {
321 				if (np1->u.name.s != np2->u.name.s)
322 					break;
323 			}
324 			if (np2 == NULL) {
325 				foundmatch = 1;
326 				npend = np1;
327 				if (np1 == NULL) {
328 					/* oldepname matched npref up to end */
329 					break;
330 				}
331 			}
332 		}
333 	}
334 
335 	if (foundmatch == 0) {
336 		/*
337 		 * if oldepname could not be found in epname, return a
338 		 * duplicate of the former.  do not try to instantize
339 		 * oldepname since it might not be a path.
340 		 */
341 		return (tname_dup(oldepname, CN_DUP));
342 	}
343 
344 	/*
345 	 * dup (epname -- npend).  all children should be T_NUMs.
346 	 */
347 	for (npref = epname;
348 	    ! (npref == NULL || npref == npend);
349 	    npref = npref->u.name.next) {
350 		struct node *newnp = newnode(T_NAME, oldepname->file,
351 					    oldepname->line);
352 
353 		newnp->u.name.t = npref->u.name.t;
354 		newnp->u.name.s = npref->u.name.s;
355 		newnp->u.name.last = newnp;
356 		newnp->u.name.it = npref->u.name.it;
357 		newnp->u.name.cp = npref->u.name.cp;
358 
359 		newnp->u.name.child = newnode(T_NUM, oldepname->file,
360 					    oldepname->line);
361 
362 		if (npref->u.name.child == NULL ||
363 		    npref->u.name.child->t != T_NUM) {
364 			int childnum;
365 
366 			ASSERT(npref->u.name.cp != NULL);
367 			config_getcompname(npref->u.name.cp, NULL, &childnum);
368 			newnp->u.name.child->u.ull = childnum;
369 		} else {
370 			newnp->u.name.child->u.ull =
371 				npref->u.name.child->u.ull;
372 		}
373 
374 		if (ret == NULL) {
375 			ret = newnp;
376 		} else {
377 			ret->u.name.last->u.name.next = newnp;
378 			ret->u.name.last = newnp;
379 		}
380 	}
381 
382 	return (ret);
383 }
384 
385 /*
386  * restriction: oldnode->u.event.epname has to be equivalent to or a subset
387  * of epname
388  */
389 static struct node *
390 tevent_dup_to_epname(struct node *oldnode, struct node *epname)
391 {
392 	struct node *ret;
393 
394 	ret = newnode(T_EVENT, oldnode->file, oldnode->line);
395 	ret->u.event.ename = tname_dup(oldnode->u.event.ename, CN_NONE);
396 	ret->u.event.epname = tname_dup_to_epname(oldnode->u.event.epname,
397 						    epname);
398 	return (ret);
399 }
400 
401 static void
402 nv_instantiate(void *name, void *val, void *arg)
403 {
404 	struct prop_wlk_data *pd = (struct prop_wlk_data *)arg;
405 	struct node *orhs = (struct node *)val;
406 	struct node *nrhs;
407 
408 	/* handle engines by instantizing the entire engine */
409 	if (name == L_engine) {
410 		ASSERT(orhs->t == T_EVENT);
411 		ASSERT(orhs->u.event.ename->u.name.t == N_SERD);
412 
413 		/* there are only SERD engines for now */
414 
415 		nrhs = newnode(T_SERD, orhs->file, orhs->line);
416 		nrhs->u.stmt.np = tevent_dup_to_epname(orhs, pd->epname);
417 		nrhs->u.stmt.lutp = props2instance(orhs, pd->epname);
418 		pd->props = lut_add(pd->props, name, nrhs, NULL);
419 		return;
420 	}
421 
422 	switch (orhs->t) {
423 	case T_NUM:
424 		nrhs = newnode(T_NUM, orhs->file, orhs->line);
425 		nrhs->u.ull = orhs->u.ull;
426 		pd->props = lut_add(pd->props, name, nrhs, NULL);
427 		break;
428 	case T_TIMEVAL:
429 		nrhs = newnode(T_TIMEVAL, orhs->file, orhs->line);
430 		nrhs->u.ull = orhs->u.ull;
431 		pd->props = lut_add(pd->props, name, nrhs, NULL);
432 		break;
433 	case T_NAME:
434 		nrhs = tname_dup_to_epname(orhs, pd->epname);
435 		pd->props = lut_add(pd->props, name, nrhs, NULL);
436 		break;
437 	case T_EVENT:
438 		nrhs = tevent_dup_to_epname(orhs, pd->epname);
439 		pd->props = lut_add(pd->props, name, nrhs, NULL);
440 		break;
441 	default:
442 		out(O_DEBUG, "unexpected nvpair value type %s",
443 		    ptree_nodetype2str(((struct node *)val)->t));
444 	}
445 }
446 
447 static struct lut *
448 props2instance(struct node *eventnp, struct node *epname)
449 {
450 	struct prop_wlk_data pd;
451 
452 	pd.props = NULL;
453 	pd.epname = epname;
454 
455 	ASSERT(eventnp->u.event.declp != NULL);
456 	lut_walk(eventnp->u.event.declp->u.stmt.lutp, nv_instantiate, &pd);
457 	return (pd.props);
458 }
459 
460 /*ARGSUSED*/
461 static void
462 instances_destructor(void *left, void *right, void *arg)
463 {
464 	struct node *dn = (struct node *)right;
465 
466 	if (dn->t == T_SERD) {
467 		/* we allocated the lut during itree_create(), so free it */
468 		lut_free(dn->u.stmt.lutp, instances_destructor, NULL);
469 		dn->u.stmt.lutp = NULL;
470 	}
471 	tree_free(dn);
472 }
473 
474 /*
475  * event_cmp -- used via lut_lookup/lut_add on instance tree lut
476  */
477 static int
478 event_cmp(struct event *ep1, struct event *ep2)
479 {
480 	int diff;
481 
482 	if ((diff = ep2->enode->u.event.ename->u.name.s -
483 	    ep1->enode->u.event.ename->u.name.s) != 0)
484 		return (diff);
485 	if ((diff = (char *)ep2->ipp - (char *)ep1->ipp) != 0)
486 		return (diff);
487 	return (0);
488 
489 }
490 
491 struct event *
492 itree_lookup(struct lut *itp, const char *ename, const struct ipath *ipp)
493 {
494 	struct event searchevent;	/* just used for searching */
495 	struct node searcheventnode;
496 	struct node searchenamenode;
497 
498 	searchevent.enode = &searcheventnode;
499 	searcheventnode.t = T_EVENT;
500 	searcheventnode.u.event.ename = &searchenamenode;
501 	searchenamenode.t = T_NAME;
502 	searchenamenode.u.name.s = ename;
503 	searchevent.ipp = ipp;
504 	return (lut_lookup(itp, (void *)&searchevent, (lut_cmp)event_cmp));
505 }
506 
507 static struct event *
508 find_or_add_event(struct info *infop, struct node *np)
509 {
510 	struct event *ret;
511 	struct event searchevent;	/* just used for searching */
512 
513 	ASSERTeq(np->t, T_EVENT, ptree_nodetype2str);
514 
515 	searchevent.enode = np;
516 	searchevent.ipp = ipath(np->u.event.epname);
517 	if ((ret = lut_lookup(infop->lut, (void *)&searchevent,
518 	    (lut_cmp)event_cmp)) != NULL)
519 		return (ret);
520 
521 	/* wasn't already in tree, allocate it */
522 	ret = MALLOC(sizeof (*ret));
523 	bzero(ret, sizeof (*ret));
524 
525 	ret->t = np->u.event.ename->u.name.t;
526 	ret->enode = np;
527 	ret->ipp = searchevent.ipp;
528 	ret->props = props2instance(np, np->u.event.epname);
529 
530 	infop->lut = lut_add(infop->lut, (void *)ret, (void *)ret,
531 	    (lut_cmp)event_cmp);
532 
533 	return (ret);
534 }
535 
536 /*
537  * hmatch_event -- perform any appropriate horizontal expansion on an event
538  *
539  * this routine is used to perform horizontal expansion on both the
540  * left-hand-side events in a prop, and the right-hand-side events.
541  * when called to handle a left-side event, nextnp point to the right
542  * side of the prop that should be passed to hmatch() for each match
543  * found by horizontal expansion.   when no horizontal expansion exists,
544  * we will still "match" one event for every event found in the list on
545  * the left-hand-side of the prop because vmatch() already found that
546  * there's at least one match during vertical expansion.
547  */
548 static void
549 hmatch_event(struct info *infop, struct node *eventnp, struct node *epname,
550     struct config *ncp, struct node *nextnp, int rematch)
551 {
552 	if (epname == NULL) {
553 		/*
554 		 * end of pathname recursion, either we just located
555 		 * a left-hand-side event and we're ready to move on
556 		 * to the expanding the right-hand-side events, or
557 		 * we're further down the recursion and we just located
558 		 * a right-hand-side event.  the passed-in parameter
559 		 * "nextnp" tells us whether we're working on the left
560 		 * side and need to move on to nextnp, or if nextnp is
561 		 * NULL, we're working on the right side.
562 		 */
563 		if (nextnp) {
564 			/*
565 			 * finished a left side expansion, move on to right.
566 			 * tell generate() what event we just matched so
567 			 * it can be used at the source of any arrows
568 			 * we generate as we match events on the right side.
569 			 */
570 			generate_from(eventnp,
571 			    find_or_add_event(infop, eventnp));
572 			hmatch(infop, nextnp, NULL);
573 		} else {
574 			/*
575 			 * finished a right side expansion.  tell generate
576 			 * the information about the destination and let
577 			 * it construct the arrows as appropriate.
578 			 */
579 			generate_to(eventnp,
580 			    find_or_add_event(infop, eventnp));
581 			generate(infop->ex);
582 		}
583 
584 		return;
585 	}
586 
587 	ASSERTeq(epname->t, T_NAME, ptree_nodetype2str);
588 
589 	/*
590 	 * we only get here when eventnp already has a completely
591 	 * instanced epname in it already.  so we first recurse
592 	 * down to the end of the name and as the recursion pops
593 	 * up, we look for opportunities to advance horizontal
594 	 * expansions on to the next match.  when we do advance
595 	 * horizontal expansions, we potentially render all cp
596 	 * pointers on all components to the right as invalid,
597 	 * so we pass in an "ncp" config handle so matching against
598 	 * the config can happen.
599 	 */
600 	if (rematch) {
601 		struct config *ocp = epname->u.name.cp;
602 		char *ncp_s;
603 		int ncp_num, num;
604 
605 		for (; ncp; ncp = config_next(ncp)) {
606 			config_getcompname(ncp, &ncp_s, &ncp_num);
607 
608 			if (ncp_s == epname->u.name.s) {
609 				/* found a matching component name */
610 				config_getcompname(epname->u.name.cp,
611 				    NULL, &num);
612 
613 				if (epname->u.name.it != IT_HORIZONTAL &&
614 				    ncp_num != num)
615 					continue;
616 
617 				epname->u.name.cp = ncp;
618 				hmatch_event(infop, eventnp,
619 				    epname->u.name.next, config_child(ncp),
620 				    nextnp, 1);
621 			}
622 		}
623 
624 		epname->u.name.cp = ocp;
625 
626 		return;		/* no more config to match against */
627 
628 	} else {
629 		hmatch_event(infop, eventnp, epname->u.name.next, ncp,
630 		    nextnp, 0);
631 	}
632 
633 	if (epname->u.name.it == IT_HORIZONTAL) {
634 		struct config *cp;
635 		struct config *ocp = epname->u.name.cp;
636 		char *cp_s;
637 		int cp_num;
638 		int ocp_num;
639 		struct iterinfo *iterinfop = NULL;
640 		const char *iters;
641 
642 		config_getcompname(ocp, NULL, &ocp_num);
643 
644 		for (cp = config_next(ocp); cp; cp = config_next(cp)) {
645 			config_getcompname(cp, &cp_s, &cp_num);
646 
647 			if (cp_s == epname->u.name.s) {
648 				ASSERT(epname->u.name.child != NULL);
649 
650 				iters = epname->u.name.child->u.name.s;
651 				if ((iterinfop = lut_lookup(infop->ex,
652 				    (void *)iters, NULL)) == NULL) {
653 					out(O_DIE,
654 					    "hmatch_event: internal error: "
655 					    "iterator \"%s\" undefined", iters);
656 				} else {
657 					/* advance dict entry to next match */
658 					iterinfop->num = cp_num;
659 				}
660 				epname->u.name.cp = cp;
661 				hmatch_event(infop, eventnp,
662 				    epname->u.name.next, config_child(cp),
663 				    nextnp, 1);
664 			}
665 		}
666 
667 		if (iterinfop != NULL) {
668 			/* restore dict entry */
669 			iterinfop->num = ocp_num;
670 		}
671 		epname->u.name.cp = ocp;
672 	}
673 }
674 
675 /*
676  * hmatch -- check for horizontal expansion matches
677  *
678  * np points to the things we're matching (like a T_LIST or a T_EVENT)
679  * and if we're working on a left-side of a prop, nextnp points to
680  * the other side of the prop that we'll tackle next when this recursion
681  * bottoms out.  when all the events in the entire prop arrow have been
682  * horizontally expanded, generate() will be called to generate the
683  * actualy arrow.
684  */
685 static void
686 hmatch(struct info *infop, struct node *np, struct node *nextnp)
687 {
688 	if (np == NULL)
689 		return;		/* all done */
690 
691 	/*
692 	 * for each item in the list of events (which could just
693 	 * be a single event, or it could get larger in the loop
694 	 * below due to horizontal expansion), call hmatch on
695 	 * the right side and create arrows to each element.
696 	 */
697 
698 	switch (np->t) {
699 	case T_LIST:
700 		/* loop through the list */
701 		if (np->u.expr.left)
702 			hmatch(infop, np->u.expr.left, nextnp);
703 		if (np->u.expr.right)
704 			hmatch(infop, np->u.expr.right, nextnp);
705 		break;
706 
707 	case T_EVENT:
708 		hmatch_event(infop, np, np->u.event.epname,
709 		    NULL, nextnp, 0);
710 		break;
711 
712 	default:
713 		outfl(O_DIE, np->file, np->line,
714 		    "hmatch: unexpected type: %s",
715 		    ptree_nodetype2str(np->t));
716 	}
717 }
718 
719 static int
720 itree_np2nork(struct node *norknp)
721 {
722 	if (norknp == NULL)
723 		return (1);
724 	else if (norknp->t == T_NAME && norknp->u.name.s == L_A)
725 		return (-1);	/* our internal version of "all" */
726 	else if (norknp->t == T_NUM)
727 		return ((int)norknp->u.ull);
728 	else
729 		out(O_DIE, norknp->file, norknp->line,
730 		    "itree_np2nork: internal error type %s",
731 		    ptree_nodetype2str(norknp->t));
732 	/*NOTREACHED*/
733 }
734 
735 static struct iterinfo *
736 newiterinfo(int num, struct node *np)
737 {
738 	struct iterinfo *ret = MALLOC(sizeof (*ret));
739 
740 	ret->num = num;
741 	ret->np = np;
742 
743 	return (ret);
744 }
745 
746 /*ARGSUSED*/
747 static void
748 iterinfo_destructor(void *left, void *right, void *arg)
749 {
750 	struct iterinfo *iterinfop = (struct iterinfo *)right;
751 
752 	bzero(iterinfop, sizeof (*iterinfop));
753 	FREE(iterinfop);
754 }
755 
756 /*
757  * return 1 if wildcard path for wcp matches another wildcard path;
758  * return 0 if otherwise.
759  */
760 static int
761 wc_paths_match(struct wildcardinfo *wcp)
762 {
763 	struct node *np1, *np2;
764 
765 	ASSERT(wcp->matchwc != NULL);
766 
767 	for (np1 = wcp->p->ewname, np2 = wcp->matchwc->ewname;
768 	    np1 != NULL && np2 != NULL;
769 	    np1 = np1->u.name.next, np2 = np2->u.name.next) {
770 		/*
771 		 * names must match
772 		 */
773 		if (np1->u.name.s != np2->u.name.s)
774 			return (0);
775 
776 		/*
777 		 * children must exist and have the same numerical value
778 		 */
779 		if (np1->u.name.child == NULL || np2->u.name.child == NULL)
780 			return (0);
781 
782 		if (np1->u.name.child->t != T_NUM ||
783 		    np2->u.name.child->t != T_NUM)
784 			return (0);
785 
786 		if (np1->u.name.child->u.ull != np2->u.name.child->u.ull)
787 			return (0);
788 	}
789 
790 	/*
791 	 * return true only if we have matches for all entries of n1 and
792 	 * n2.  note that NULL wildcard paths (i.e., both wcp->p->ewname
793 	 * and wcp->matchwc->ewname are NULL) will be considered as
794 	 * matching paths.
795 	 */
796 	if (np1 == NULL && np2 == NULL)
797 		return (1);
798 
799 	return (0);
800 }
801 
802 /*
803  * update epname to include the wildcarded portion
804  */
805 static void
806 create_wildcardedpath(struct wildcardinfo **wcproot)
807 {
808 	struct wildcardinfo *wcp;
809 	struct node *nptop;
810 
811 	wcp = *wcproot;
812 
813 	if (wcp->s == WC_UNDERCONSTRUCTION) {
814 		ASSERT(wcp->p->refcount == 1);
815 		wcp->s = WC_COMPLETE;
816 	}
817 
818 	/* path has no wildcard */
819 	if (wcp->p->ewname == NULL)
820 		return;
821 
822 	/*
823 	 * get to this point if a wildcard portion of the path exists.
824 	 *
825 	 * first set oldepname to the start of the existing epname for use
826 	 * in future comparisons, then update epname to include the
827 	 * wildcard portion.
828 	 */
829 	nptop = wcp->nptop;
830 
831 	ASSERT(wcp->oldepname == nptop->u.event.epname);
832 
833 	nptop->u.event.epname =	tname_dup(wcp->p->ewname, CN_DUP);
834 	nptop->u.event.epname = tree_name_append(nptop->u.event.epname,
835 					tname_dup(wcp->oldepname, CN_DUP));
836 }
837 
838 /*
839  * restore epname to its former (nonwildcarded) state
840  */
841 static void
842 undo_wildcardedpath(struct wildcardinfo **wcproot)
843 {
844 	struct wildcardinfo *wcp;
845 
846 	wcp = *wcproot;
847 
848 	if (wcp->s == WC_COMPLETE) {
849 		ASSERT(wcp->p->refcount == 1);
850 		wcp->s = WC_UNDERCONSTRUCTION;
851 	}
852 
853 	/* path has no wildcard */
854 	if (wcp->p->ewname == NULL)
855 		return;
856 
857 	ASSERT(wcp->oldepname != NULL);
858 
859 	tree_free(wcp->nptop->u.event.epname);
860 	wcp->nptop->u.event.epname = wcp->oldepname;
861 }
862 
863 enum wildcard_action {
864 	WA_NONE,	/* do not do any wildcards */
865 	WA_SINGLE,	/* do wildcard only for current cp node */
866 	WA_ALL		/* do wildcards for all cp nodes */
867 };
868 
869 static void
870 vmatch_event(struct info *infop, struct config *cp, struct node *np,
871 	    struct node *lnp, struct node *anp,
872 	    struct wildcardinfo **wcproot, enum wildcard_action dowildcard)
873 {
874 	struct wildcardinfo *wcp;
875 	char *cp_s;
876 	int cp_num;
877 
878 	wcp = *wcproot;
879 
880 	if ((np == NULL && wcp->oldepname != NULL) ||
881 	    (cp == NULL && wcp->oldepname == NULL)) {
882 		/*
883 		 * get to this point if the pathname matched the config
884 		 * (but not necessarily a match at the end).  first check
885 		 * for any matching wildcard paths.
886 		 */
887 		if (wcp->matchwc != NULL && wc_paths_match(wcp) == 0)
888 			return;
889 
890 		create_wildcardedpath(wcproot);
891 		vmatch(infop, np, lnp, anp, wcproot);
892 		undo_wildcardedpath(wcproot);
893 
894 		return;
895 	}
896 
897 	if (cp == NULL)
898 		return;	/* no more config to match against */
899 
900 	for (; cp; cp = config_next(cp)) {
901 		config_getcompname(cp, &cp_s, &cp_num);
902 
903 		if (cp_s == np->u.name.s &&
904 		    ! (wcp->s == WC_UNDERCONSTRUCTION &&
905 		    dowildcard == WA_SINGLE)) {
906 			/* found a matching component name */
907 			if (np->u.name.child &&
908 			    np->u.name.child->t == T_NUM) {
909 				/*
910 				 * an explicit instance number was given
911 				 * in the source.  so only consider this
912 				 * a configuration match if the number
913 				 * also matches.
914 				 */
915 				if (cp_num != np->u.name.child->u.ull)
916 					continue;
917 
918 				np->u.name.cp = cp;
919 			} else {
920 				struct iterinfo *iterinfop;
921 				const char *iters;
922 
923 				/*
924 				 * vertical iterator.  look it up in
925 				 * the appropriate lut and if we get
926 				 * back a value it is either one that we
927 				 * set earlier, in which case we record
928 				 * the new value for this iteration and
929 				 * keep matching, or it is one that was
930 				 * set by an earlier reference to the
931 				 * iterator, in which case we only consider
932 				 * this a configuration match if the number
933 				 * matches cp_num.
934 				 */
935 
936 				ASSERT(np->u.name.child != NULL);
937 				ASSERT(np->u.name.child->t == T_NAME);
938 				iters = np->u.name.child->u.name.s;
939 
940 				if ((iterinfop = lut_lookup(infop->ex,
941 				    (void *)iters, NULL)) == NULL) {
942 					/* we're the first use, record our np */
943 					infop->ex = lut_add(infop->ex,
944 					    (void *)iters,
945 					    newiterinfo(cp_num, np), NULL);
946 				} else if (np == iterinfop->np) {
947 					/*
948 					 * we're the first use, back again
949 					 * for another iteration.  so update
950 					 * the num bound to this iterator in
951 					 * the lut.
952 					 */
953 					iterinfop->num = cp_num;
954 				} else if (cp_num != iterinfop->num) {
955 					/*
956 					 * an earlier reference to this
957 					 * iterator bound it to a different
958 					 * instance number, so there's no
959 					 * match here after all.
960 					 *
961 					 * however, it's possible that this
962 					 * component should really be part of
963 					 * the wildcard.  we explore this by
964 					 * forcing this component into the
965 					 * wildcarded section.
966 					 *
967 					 * for an more details of what's
968 					 * going to happen now, see
969 					 * comments block below entitled
970 					 * "forcing components into
971 					 * wildcard path".
972 					 */
973 					if (dowildcard == WA_ALL &&
974 					    wcp->s == WC_UNDERCONSTRUCTION) {
975 						vmatch_event(infop, cp, np,
976 							    lnp, anp, wcproot,
977 							    WA_SINGLE);
978 					}
979 					continue;
980 				}
981 				np->u.name.cp = cp;
982 			}
983 
984 			/*
985 			 * if wildcarding was done in a call earlier in the
986 			 * stack, record the current cp as the first
987 			 * matching and nonwildcarded cp.
988 			 */
989 			if (dowildcard == WA_ALL &&
990 			    wcp->s == WC_UNDERCONSTRUCTION)
991 				wcp->p->cpstart = cp;
992 
993 			/*
994 			 * if this was an IT_HORIZONTAL name,
995 			 * hmatch() will use the cp to expand
996 			 * all matches horizontally into a list.
997 			 * we know the list will contain at least
998 			 * one element (the one we just matched),
999 			 * so we just store cp and let hmatch_event()
1000 			 * do the rest.
1001 			 *
1002 			 * recurse on to next component.  note that
1003 			 * wildcarding is now turned off.
1004 			 */
1005 			vmatch_event(infop, config_child(cp), np->u.name.next,
1006 				    lnp, anp, wcproot, WA_NONE);
1007 
1008 			/*
1009 			 * forcing components into wildcard path:
1010 			 *
1011 			 * if this component is the first match, force it
1012 			 * to be part of the wildcarded path and see if we
1013 			 * can get additional matches.  repeat call to
1014 			 * vmatch_event() with the same np, making sure
1015 			 * wildcarding is forced for this component alone
1016 			 * and not its peers by specifying vmatch_event(
1017 			 * ..., WA_SINGLE).  in other words, in the call to
1018 			 * vmatch_event() below, there should be no loop
1019 			 * over cp's peers since that is being done in the
1020 			 * current loop [i.e., the loop we're in now].
1021 			 *
1022 			 * here's an example.  suppose we have the
1023 			 * definition
1024 			 *	event foo@x/y
1025 			 * and configuration
1026 			 *	a0/x0/y0/a1/x1/y1
1027 			 *
1028 			 * the code up to this point will treat "a0" as the
1029 			 * wildcarded part of the path and "x0/y0" as the
1030 			 * nonwildcarded part, resulting in the instanced
1031 			 * event
1032 			 *	foo@a0/x0/y0
1033 			 *
1034 			 * in order to discover the next match (.../x1/y1)
1035 			 * in the configuration we have to force "x0" into
1036 			 * the wildcarded part of the path.  the following
1037 			 * call to vmatch_event(..., WA_SINGLE) does this.
1038 			 * by doing so, we discover the wildcarded part
1039 			 * "a0/x0/y0/a1" and the nonwildcarded part "x1/y1"
1040 			 *
1041 			 * the following call to vmatch_event() is also
1042 			 * needed to properly handle the configuration
1043 			 *	b0/x0/b1/x1/y1
1044 			 *
1045 			 * the recursions into vmatch_event() will start
1046 			 * off uncovering "b0" as the wildcarded part and
1047 			 * "x0" as the start of the nonwildcarded path.
1048 			 * however, the next recursion will not result in a
1049 			 * match since there is no "y" following "x0".  the
1050 			 * subsequent match of (wildcard = "b0/x0/b1" and
1051 			 * nonwildcard = "x1/y1") will be discovered only
1052 			 * if "x0" is forced to be a part of the wildcarded
1053 			 * path.
1054 			 */
1055 			if (dowildcard == WA_ALL &&
1056 			    wcp->s == WC_UNDERCONSTRUCTION) {
1057 				vmatch_event(infop, cp, np, lnp, anp,
1058 					    wcproot, WA_SINGLE);
1059 			}
1060 
1061 			if (np->u.name.it == IT_HORIZONTAL) {
1062 				/*
1063 				 * hmatch() finished iterating through
1064 				 * the configuration as described above, so
1065 				 * don't continue iterating here.
1066 				 */
1067 				return;
1068 			}
1069 
1070 		} else if ((dowildcard == WA_SINGLE || dowildcard == WA_ALL) &&
1071 			    wcp->s == WC_UNDERCONSTRUCTION) {
1072 			/*
1073 			 * no matching cp, and we are constructing our own
1074 			 * wildcard path.  (in other words, we are not
1075 			 * referencing a wildcard path created for an
1076 			 * earlier event.)
1077 			 *
1078 			 * add wildcard entry, then recurse on to config
1079 			 * child
1080 			 */
1081 			struct node *cpnode, *prevlast;
1082 
1083 			cpnode = tree_name(cp_s, IT_NONE, NULL, 0);
1084 			cpnode->u.name.child = newnode(T_NUM, NULL, 0);
1085 			cpnode->u.name.child->u.ull = cp_num;
1086 			cpnode->u.name.cp = cp;
1087 
1088 			if (wcp->p->ewname == NULL) {
1089 				prevlast = NULL;
1090 				wcp->p->ewname = cpnode;
1091 			} else {
1092 				prevlast = wcp->p->ewname->u.name.last;
1093 				wcp->p->ewname =
1094 					tree_name_append(wcp->p->ewname,
1095 							    cpnode);
1096 			}
1097 
1098 			vmatch_event(infop, config_child(cp), np, lnp, anp,
1099 				    wcproot, WA_ALL);
1100 
1101 			/*
1102 			 * back out last addition to ewname and continue
1103 			 * with loop
1104 			 */
1105 			tree_free(cpnode);
1106 			if (prevlast == NULL) {
1107 				wcp->p->ewname = NULL;
1108 			} else {
1109 				prevlast->u.name.next = NULL;
1110 				wcp->p->ewname->u.name.last = prevlast;
1111 			}
1112 
1113 			/*
1114 			 * return if wildcarding is done only for this cp
1115 			 */
1116 			if (dowildcard == WA_SINGLE)
1117 				return;
1118 		}
1119 	}
1120 }
1121 
1122 /*
1123  * for the event node np, which will be subjected to pathname
1124  * expansion/matching, create a (struct wildcardinfo) to hold wildcard
1125  * information.  this struct will be inserted into the first location in
1126  * the list that starts with *wcproot.
1127  *
1128  * cp is the starting node of the configuration; cpstart, which is output,
1129  * is the starting node of the nonwildcarded portion of the path.
1130  */
1131 static void
1132 add_wildcardentry(struct wildcardinfo **wcproot, struct config *cp,
1133 		struct node *np)
1134 {
1135 	struct wildcardinfo *wcpnew, *wcp;
1136 	struct node *np1, *np2;
1137 
1138 	/*
1139 	 * create entry for np
1140 	 */
1141 	wcpnew = MALLOC(sizeof (struct wildcardinfo));
1142 	bzero(wcpnew, sizeof (struct wildcardinfo));
1143 	wcpnew->nptop = np;
1144 	wcpnew->oldepname = np->u.event.epname;
1145 	wcpnew->s = WC_UNDERCONSTRUCTION;
1146 
1147 	wcpnew->p = MALLOC(sizeof (struct wildcardpath));
1148 	bzero(wcpnew->p, sizeof (struct wildcardpath));
1149 	wcpnew->p->cpstart = cp;
1150 	wcpnew->p->refcount = 1;
1151 
1152 	/*
1153 	 * search all completed entries for an epname whose first entry
1154 	 * matches.  note that NULL epnames are considered valid and can be
1155 	 * matched.
1156 	 */
1157 	np2 = wcpnew->oldepname;
1158 	for (wcp = *wcproot; wcp; wcp = wcp->next) {
1159 		ASSERT(wcp->s == WC_COMPLETE);
1160 
1161 		np1 = wcp->oldepname;
1162 		if ((np1 && np2 && np1->u.name.s == np2->u.name.s) ||
1163 		    (np1 == NULL && np2 == NULL)) {
1164 			/*
1165 			 * if we find a match in a completed entry, set
1166 			 * matchwc to indicate that we would like to match
1167 			 * it.  it is necessary to do this since wildcards
1168 			 * for each event are constructed independently.
1169 			 */
1170 			wcpnew->matchwc = wcp->p;
1171 
1172 			wcp->p->refcount++;
1173 			break;
1174 		}
1175 	}
1176 
1177 	wcpnew->next = *wcproot;
1178 	*wcproot = wcpnew;
1179 }
1180 
1181 static void
1182 delete_wildcardentry(struct wildcardinfo **wcproot)
1183 {
1184 	struct wildcardinfo *wcp;
1185 
1186 	wcp = *wcproot;
1187 	*wcproot = wcp->next;
1188 
1189 	switch (wcp->s) {
1190 	case WC_UNDERCONSTRUCTION:
1191 	case WC_COMPLETE:
1192 		if (wcp->matchwc != NULL)
1193 			wcp->matchwc->refcount--;
1194 
1195 		ASSERT(wcp->p->refcount == 1);
1196 		tree_free(wcp->p->ewname);
1197 		FREE(wcp->p);
1198 		break;
1199 
1200 	default:
1201 		out(O_DIE, "deletewc: invalid status");
1202 		break;
1203 	}
1204 
1205 	FREE(wcp);
1206 }
1207 
1208 /*
1209  * vmatch -- find the next vertical expansion match in the config database
1210  *
1211  * this routine is called with three node pointers:
1212  *	 np -- the parse we're matching
1213  *	lnp -- the rest of the list we're currently working on
1214  *	anp -- the rest of the arrow we're currently working on
1215  *
1216  * the expansion matching happens via three types of recursion:
1217  *
1218  *	- when given an arrow, handle the left-side and then recursively
1219  *	  handle the right side (which might be another cascaded arrow).
1220  *
1221  *	- when handling one side of an arrow, recurse through the T_LIST
1222  *	  to get to each event (or just move on to the event if there
1223  *	  is a single event instead of a list)  since the arrow parse
1224  *	  trees recurse left, we actually start with the right-most
1225  *	  event list in the prop statement and work our way towards
1226  *	  the left-most event list.
1227  *
1228  *	- when handling an event, recurse down each component of the
1229  *	  pathname, matching in the config database and recording the
1230  *	  matches in the explicit iterator dictionary as we go.
1231  *
1232  * when the bottom of this matching recursion is met, meaning we have
1233  * set the "cp" pointers on all the names in the entire statement,
1234  * we call hmatch() which does it's own recursion to handle horizontal
1235  * expandsion and then call generate() to generate nodes, bubbles, and
1236  * arrows in the instance tree.  generate() looks at the cp pointers to
1237  * see what instance numbers were matched in the configuration database.
1238  *
1239  * when horizontal expansion appears, vmatch() finds only the first match
1240  * and hmatch() then takes the horizontal expansion through all the other
1241  * matches when generating the arrows in the instance tree.
1242  *
1243  * the "infop" passed down through the recursion contains a dictionary
1244  * of the explicit iterators (all the implicit iterators have been converted
1245  * to explicit iterators when the parse tree was created by tree.c), which
1246  * allows things like this to work correctly:
1247  *
1248  *	prop error.a@x[n]/y/z -> error.b@x/y[n]/z -> error.c@x/y/z[n];
1249  *
1250  * during the top level call, the explicit iterator "n" will match an
1251  * instance number in the config database, and the result will be recorded
1252  * in the explicit iterator dictionary and passed down via "infop".  so
1253  * when the recursive call tries to match y[n] in the config database, it
1254  * will only match the same instance number as x[n] did since the dictionary
1255  * is consulted to see if "n" took on a value already.
1256  *
1257  * at any point during the recursion, match*() can return to indicate
1258  * a match was not found in the config database and that the caller should
1259  * move on to the next potential match, if any.
1260  *
1261  * constraints are completely ignored by match(), so the statement:
1262  *
1263  *	prop error.a@x[n] -> error.b@x[n] {n != 0};
1264  *
1265  * might very well match x[0] if it appears in the config database.  it
1266  * is the generate() routine that takes that match and then decides what
1267  * arrow, if any, should be generated in the instance tree.  generate()
1268  * looks at the explicit iterator dictionary to get values like "n" in
1269  * the above example so that it can evaluate constraints.
1270  *
1271  */
1272 static void
1273 vmatch(struct info *infop, struct node *np, struct node *lnp,
1274     struct node *anp, struct wildcardinfo **wcproot)
1275 {
1276 	if (np == NULL) {
1277 		if (lnp)
1278 			vmatch(infop, lnp, NULL, anp, wcproot);
1279 		else if (anp)
1280 			vmatch(infop, anp, NULL, NULL, wcproot);
1281 		else {
1282 			struct node *src;
1283 			struct node *dst;
1284 
1285 			/* end of vertical match recursion */
1286 			outfl(O_ALTFP|O_VERB3|O_NONL,
1287 			    infop->anp->file, infop->anp->line, "vmatch: ");
1288 			ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, infop->anp);
1289 			out(O_ALTFP|O_VERB3, NULL);
1290 
1291 			generate_nork(
1292 			    itree_np2nork(infop->anp->u.arrow.nnp),
1293 			    itree_np2nork(infop->anp->u.arrow.knp));
1294 			dst = infop->anp->u.arrow.rhs;
1295 			src = infop->anp->u.arrow.lhs;
1296 			for (;;) {
1297 				generate_new();	/* new set of arrows */
1298 				if (src->t == T_ARROW) {
1299 					hmatch(infop, src->u.arrow.rhs, dst);
1300 					generate_nork(
1301 					    itree_np2nork(src->u.arrow.nnp),
1302 					    itree_np2nork(src->u.arrow.knp));
1303 					dst = src->u.arrow.rhs;
1304 					src = src->u.arrow.lhs;
1305 				} else {
1306 					hmatch(infop, src, dst);
1307 					break;
1308 				}
1309 			}
1310 		}
1311 		return;
1312 	}
1313 
1314 	switch (np->t) {
1315 	case T_EVENT: {
1316 		add_wildcardentry(wcproot, config_child(infop->croot), np);
1317 		vmatch_event(infop, config_child(infop->croot),
1318 			    np->u.event.epname, lnp, anp, wcproot, WA_ALL);
1319 		delete_wildcardentry(wcproot);
1320 		break;
1321 	}
1322 	case T_LIST:
1323 		ASSERT(lnp == NULL);
1324 		vmatch(infop, np->u.expr.right, np->u.expr.left, anp, wcproot);
1325 		break;
1326 
1327 	case T_ARROW:
1328 		ASSERT(lnp == NULL && anp == NULL);
1329 		vmatch(infop, np->u.arrow.rhs, NULL, np->u.arrow.lhs, wcproot);
1330 		break;
1331 
1332 	default:
1333 		outfl(O_DIE, np->file, np->line,
1334 		    "vmatch: unexpected type: %s",
1335 		    ptree_nodetype2str(np->t));
1336 	}
1337 }
1338 
1339 static void
1340 cp_reset(struct node *np)
1341 {
1342 	if (np == NULL)
1343 		return;
1344 	switch (np->t) {
1345 	case T_NAME:
1346 		np->u.name.cp = NULL;
1347 		cp_reset(np->u.name.next);
1348 		break;
1349 
1350 	case T_LIST:
1351 		cp_reset(np->u.expr.left);
1352 		cp_reset(np->u.expr.right);
1353 		break;
1354 
1355 	case T_ARROW:
1356 		cp_reset(np->u.arrow.lhs);
1357 		cp_reset(np->u.arrow.rhs);
1358 		break;
1359 
1360 	case T_EVENT:
1361 		cp_reset(np->u.event.epname);
1362 		break;
1363 	}
1364 }
1365 
1366 /*
1367  * itree_create -- apply the current config to the current parse tree
1368  *
1369  * returns a lut mapping fully-instance-qualified names to struct events.
1370  *
1371  */
1372 struct lut *
1373 itree_create(struct config *croot)
1374 {
1375 	struct lut *retval;
1376 	struct node *propnp;
1377 
1378 	Ninfo.lut = NULL;
1379 	Ninfo.croot = croot;
1380 	for (propnp = Props; propnp; propnp = propnp->u.stmt.next) {
1381 		struct node *anp = propnp->u.stmt.np;
1382 		struct wildcardinfo *wcproot = NULL;
1383 
1384 		ASSERTeq(anp->t, T_ARROW, ptree_nodetype2str);
1385 
1386 		Ninfo.anp = anp;
1387 		Ninfo.ex = NULL;
1388 
1389 		generate_arrownp(anp);
1390 		vmatch(&Ninfo, anp, NULL, NULL, &wcproot);
1391 
1392 		if (Ninfo.ex) {
1393 			lut_free(Ninfo.ex, iterinfo_destructor, NULL);
1394 			Ninfo.ex = NULL;
1395 		}
1396 		ASSERT(wcproot == NULL);
1397 		cp_reset(anp);
1398 	}
1399 
1400 	retval = Ninfo.lut;
1401 	Ninfo.lut = NULL;
1402 	return (retval);
1403 }
1404 
1405 void
1406 itree_free(struct lut *lutp)
1407 {
1408 	lut_free(lutp, itree_destructor, NULL);
1409 }
1410 
1411 int
1412 itree_nameinstancecmp(struct node *np1, struct node *np2)
1413 {
1414 	int np1type = (int)np1->u.name.t;
1415 	int np2type = (int)np2->u.name.t;
1416 	int num1;
1417 	int num2;
1418 
1419 	while (np1 && np2 && np1->u.name.s == np2->u.name.s) {
1420 		if (np1->u.name.next != NULL && np2->u.name.next != NULL) {
1421 			if (np1->u.name.cp != NULL) {
1422 				config_getcompname(np1->u.name.cp, NULL, &num1);
1423 			} else {
1424 				ASSERT(np1->u.name.child != NULL);
1425 				ASSERT(np1->u.name.child->t == T_NUM);
1426 				num1 = (int)np1->u.name.child->u.ull;
1427 			}
1428 
1429 			if (np2->u.name.cp != NULL) {
1430 				config_getcompname(np2->u.name.cp, NULL, &num2);
1431 			} else {
1432 				ASSERT(np2->u.name.child != NULL);
1433 				ASSERT(np2->u.name.child->t == T_NUM);
1434 				num2 = (int)np2->u.name.child->u.ull;
1435 			}
1436 
1437 			if (num1 != num2)
1438 				return (num1 - num2);
1439 		}
1440 
1441 		np1 = np1->u.name.next;
1442 		np2 = np2->u.name.next;
1443 	}
1444 	if (np1 == NULL)
1445 		if (np2 == NULL)
1446 			return (np1type - np2type);
1447 		else
1448 			return (-1);
1449 	else if (np2 == NULL)
1450 		return (1);
1451 	else
1452 		return (strcmp(np1->u.name.s, np2->u.name.s));
1453 }
1454 
1455 void
1456 itree_pevent_brief(int flags, struct event *ep)
1457 {
1458 	ASSERT(ep != NULL);
1459 	ASSERT(ep->enode != NULL);
1460 	ASSERT(ep->ipp != NULL);
1461 
1462 	ipath_print(flags, ep->enode->u.event.ename->u.name.s, ep->ipp);
1463 }
1464 
1465 /*ARGSUSED*/
1466 static void
1467 itree_pevent(struct event *lhs, struct event *ep, void *arg)
1468 {
1469 	struct plut_wlk_data propd;
1470 	struct bubble *bp;
1471 	int flags = (int)arg;
1472 
1473 	itree_pevent_brief(flags, ep);
1474 	if (ep->t == N_EREPORT)
1475 		out(flags, " (count %d)", ep->count);
1476 	else
1477 		out(flags, NULL);
1478 
1479 	if (ep->props) {
1480 		propd.flags = flags;
1481 		propd.first = 1;
1482 		out(flags, "Properties:");
1483 		lut_walk(ep->props, ptree_plut, (void *)&propd);
1484 	}
1485 
1486 	for (bp = itree_next_bubble(ep, NULL); bp;
1487 	    bp = itree_next_bubble(ep, bp)) {
1488 		/* Print only TO bubbles in this loop */
1489 		if (bp->t != B_TO)
1490 			continue;
1491 		itree_pbubble(flags, bp);
1492 	}
1493 
1494 	for (bp = itree_next_bubble(ep, NULL); bp;
1495 	    bp = itree_next_bubble(ep, bp)) {
1496 		/* Print only INHIBIT bubbles in this loop */
1497 		if (bp->t != B_INHIBIT)
1498 			continue;
1499 		itree_pbubble(flags, bp);
1500 	}
1501 
1502 	for (bp = itree_next_bubble(ep, NULL); bp;
1503 	    bp = itree_next_bubble(ep, bp)) {
1504 		/* Print only FROM bubbles in this loop */
1505 		if (bp->t != B_FROM)
1506 			continue;
1507 		itree_pbubble(flags, bp);
1508 	}
1509 }
1510 
1511 static void
1512 itree_pbubble(int flags, struct bubble *bp)
1513 {
1514 	struct constraintlist *cp;
1515 	struct arrowlist *ap;
1516 
1517 	ASSERT(bp != NULL);
1518 
1519 	out(flags|O_NONL, "   ");
1520 	if (bp->mark)
1521 		out(flags|O_NONL, "*");
1522 	else
1523 		out(flags|O_NONL, " ");
1524 	if (bp->t == B_FROM)
1525 		out(flags|O_NONL, "N=%d to:", bp->nork);
1526 	else if (bp->t == B_TO)
1527 		out(flags|O_NONL, "K=%d from:", bp->nork);
1528 	else
1529 		out(flags|O_NONL, "K=%d masked from:", bp->nork);
1530 
1531 	if (bp->t == B_TO || bp->t == B_INHIBIT) {
1532 		for (ap = itree_next_arrow(bp, NULL); ap;
1533 		    ap = itree_next_arrow(bp, ap)) {
1534 			ASSERT(ap->arrowp->head == bp);
1535 			ASSERT(ap->arrowp->tail != NULL);
1536 			ASSERT(ap->arrowp->tail->myevent != NULL);
1537 			out(flags|O_NONL, " ");
1538 			itree_pevent_brief(flags, ap->arrowp->tail->myevent);
1539 		}
1540 		out(flags, NULL);
1541 		return;
1542 	}
1543 
1544 	for (ap = itree_next_arrow(bp, NULL); ap;
1545 	    ap = itree_next_arrow(bp, ap)) {
1546 		ASSERT(ap->arrowp->tail == bp);
1547 		ASSERT(ap->arrowp->head != NULL);
1548 		ASSERT(ap->arrowp->head->myevent != NULL);
1549 
1550 		out(flags|O_NONL, " ");
1551 		itree_pevent_brief(flags, ap->arrowp->head->myevent);
1552 
1553 		out(flags|O_NONL, " ");
1554 		ptree_timeval(flags, &ap->arrowp->mindelay);
1555 		out(flags|O_NONL, ",");
1556 		ptree_timeval(flags, &ap->arrowp->maxdelay);
1557 
1558 		/* Display anything from the propogation node? */
1559 		out(O_VERB3|O_NONL, " <%s:%d>",
1560 		    ap->arrowp->pnode->file, ap->arrowp->pnode->line);
1561 
1562 		if (itree_next_constraint(ap->arrowp, NULL))
1563 			out(flags|O_NONL, " {");
1564 
1565 		for (cp = itree_next_constraint(ap->arrowp, NULL); cp;
1566 		    cp = itree_next_constraint(ap->arrowp, cp)) {
1567 			ptree(flags, cp->cnode, 1, 0);
1568 			if (itree_next_constraint(ap->arrowp, cp))
1569 				out(flags|O_NONL, ", ");
1570 		}
1571 
1572 		if (itree_next_constraint(ap->arrowp, NULL))
1573 			out(flags|O_NONL, "}");
1574 	}
1575 	out(flags, NULL);
1576 }
1577 
1578 void
1579 itree_ptree(int flags, struct lut *itp)
1580 {
1581 	lut_walk(itp, (lut_cb)itree_pevent, (void *)flags);
1582 }
1583 
1584 /*ARGSUSED*/
1585 static void
1586 itree_destructor(void *left, void *right, void *arg)
1587 {
1588 	struct event *ep = (struct event *)right;
1589 	struct bubble *nextbub, *bub;
1590 
1591 	/* Free the properties */
1592 	lut_free(ep->props, instances_destructor, NULL);
1593 
1594 	/* Free my bubbles */
1595 	for (bub = ep->bubbles; bub != NULL; ) {
1596 		nextbub = bub->next;
1597 		/*
1598 		 * Free arrows if they are FROM me.  Free arrowlists on
1599 		 * other types of bubbles (but not the attached arrows,
1600 		 * which will be freed when we free the originating
1601 		 * bubble.
1602 		 */
1603 		if (bub->t == B_FROM)
1604 			itree_free_arrowlists(bub, 1);
1605 		else
1606 			itree_free_arrowlists(bub, 0);
1607 		itree_free_bubble(bub);
1608 		bub = nextbub;
1609 	}
1610 
1611 	if (ep->nvp != NULL)
1612 		nvlist_free(ep->nvp);
1613 	bzero(ep, sizeof (*ep));
1614 	FREE(ep);
1615 }
1616 
1617 static void
1618 itree_free_bubble(struct bubble *freeme)
1619 {
1620 	bzero(freeme, sizeof (*freeme));
1621 	FREE(freeme);
1622 }
1623 
1624 static struct bubble *
1625 itree_add_bubble(struct event *eventp, enum bubbletype btype, int nork, int gen)
1626 {
1627 	struct bubble *prev = NULL;
1628 	struct bubble *curr;
1629 	struct bubble *newb;
1630 
1631 	/* Use existing bubbles as appropriate when possible */
1632 	for (curr = eventp->bubbles;
1633 	    curr != NULL;
1634 	    prev = curr, curr = curr->next) {
1635 		if (btype == B_TO && curr->t == B_TO) {
1636 			/* see if an existing "to" bubble works for us */
1637 			if (gen == curr->gen)
1638 				return (curr);	/* matched gen number */
1639 			else if (nork == 1 && curr->nork == 1) {
1640 				curr->gen = gen;
1641 				return (curr);	/* coalesce K==1 bubbles */
1642 			}
1643 		} else if (btype == B_FROM && curr->t == B_FROM) {
1644 			/* see if an existing "from" bubble works for us */
1645 			if ((nork == N_IS_ALL && curr->nork == N_IS_ALL) ||
1646 			    (nork == 0 && curr->nork == 0))
1647 				return (curr);
1648 		}
1649 	}
1650 
1651 	newb = MALLOC(sizeof (struct bubble));
1652 	newb->next = NULL;
1653 	newb->t = btype;
1654 	newb->myevent = eventp;
1655 	newb->nork = nork;
1656 	newb->mark = 0;
1657 	newb->gen = gen;
1658 	newb->arrows = NULL;
1659 
1660 	if (prev == NULL)
1661 		eventp->bubbles = newb;
1662 	else
1663 		prev->next = newb;
1664 
1665 	return (newb);
1666 }
1667 
1668 struct bubble *
1669 itree_next_bubble(struct event *eventp, struct bubble *last)
1670 {
1671 	struct bubble *next;
1672 
1673 	for (;;) {
1674 		if (last != NULL)
1675 			next = last->next;
1676 		else
1677 			next = eventp->bubbles;
1678 
1679 		if (next == NULL || next->arrows != NULL)
1680 			return (next);
1681 
1682 		/* bubble was empty, skip it */
1683 		last = next;
1684 	}
1685 }
1686 
1687 static void
1688 add_arrow(struct bubble *bp, struct arrow *ap)
1689 {
1690 	struct arrowlist *prev = NULL;
1691 	struct arrowlist *curr;
1692 	struct arrowlist *newal;
1693 
1694 	newal = MALLOC(sizeof (struct arrowlist));
1695 	bzero(newal, sizeof (struct arrowlist));
1696 	newal->arrowp = ap;
1697 
1698 	curr = itree_next_arrow(bp, NULL);
1699 	while (curr != NULL) {
1700 		prev = curr;
1701 		curr = itree_next_arrow(bp, curr);
1702 	}
1703 
1704 	if (prev == NULL)
1705 		bp->arrows = newal;
1706 	else
1707 		prev->next = newal;
1708 }
1709 
1710 static struct arrow *
1711 itree_add_arrow(struct bubble *frombubblep, struct bubble *tobubblep,
1712     struct node *apnode, struct node *fromevent, struct node *toevent,
1713     struct lut *ex)
1714 {
1715 	struct arrow *newa;
1716 
1717 	ASSERTeq(frombubblep->t, B_FROM, itree_bubbletype2str);
1718 	ASSERTinfo(tobubblep->t == B_TO || tobubblep->t == B_INHIBIT,
1719 	    itree_bubbletype2str(tobubblep->t));
1720 	newa = MALLOC(sizeof (struct arrow));
1721 	newa->tail = frombubblep;
1722 	newa->head = tobubblep;
1723 	newa->pnode = apnode;
1724 	newa->constraints = NULL;
1725 
1726 	/*
1727 	 *  Set default delays, then try to re-set them from
1728 	 *  any within() constraints.
1729 	 */
1730 	newa->mindelay = newa->maxdelay = 0ULL;
1731 	if (itree_set_arrow_traits(newa, fromevent, toevent, ex) == 0) {
1732 		FREE(newa);
1733 		return (NULL);
1734 	}
1735 
1736 	add_arrow(frombubblep, newa);
1737 	add_arrow(tobubblep, newa);
1738 	return (newa);
1739 }
1740 
1741 /* returns false if traits show that arrow should not be added after all */
1742 static int
1743 itree_set_arrow_traits(struct arrow *ap, struct node *fromev,
1744     struct node *toev, struct lut *ex)
1745 {
1746 	struct node *epnames[] = { NULL, NULL, NULL };
1747 	struct node *newc = NULL;
1748 
1749 	ASSERTeq(fromev->t, T_EVENT, ptree_nodetype2str);
1750 	ASSERTeq(toev->t, T_EVENT, ptree_nodetype2str);
1751 
1752 	/*
1753 	 * search for the within values first on the declaration of
1754 	 * the destination event, and then on the prop.  this allows
1755 	 * one to specify a "default" within by putting it on the
1756 	 * declaration,  but then allow overriding on the prop statement.
1757 	 */
1758 	arrow_add_within(ap, toev->u.event.declp->u.stmt.np->u.event.eexprlist);
1759 	arrow_add_within(ap, toev->u.event.eexprlist);
1760 
1761 	/*
1762 	 * handle any global constraints inherited from the
1763 	 * "fromev" event's declaration
1764 	 */
1765 	ASSERT(fromev->u.event.declp != NULL);
1766 	ASSERT(fromev->u.event.declp->u.stmt.np != NULL);
1767 
1768 #ifdef	notdef
1769 	/* XXX not quite ready to evaluate constraints from decls yet */
1770 	if (fromev->u.event.declp->u.stmt.np->u.event.eexprlist)
1771 		(void) itree_add_constraint(ap,
1772 		    fromev->u.event.declp->u.stmt.np->u.event.eexprlist);
1773 #endif	/* notdef */
1774 
1775 	/* handle constraints on the from event in the prop statement */
1776 	epnames[0] = fromev->u.event.epname;
1777 	epnames[1] = toev->u.event.epname;
1778 	if (eval_potential(fromev->u.event.eexprlist, ex, epnames, &newc) == 0)
1779 		return (0);		/* constraint disallows arrow */
1780 
1781 	/*
1782 	 * handle any global constraints inherited from the
1783 	 * "toev" event's declaration
1784 	 */
1785 	ASSERT(toev->u.event.declp != NULL);
1786 	ASSERT(toev->u.event.declp->u.stmt.np != NULL);
1787 
1788 #ifdef	notdef
1789 	/* XXX not quite ready to evaluate constraints from decls yet */
1790 	if (toev->u.event.declp->u.stmt.np->u.event.eexprlist)
1791 		(void) itree_add_constraint(ap,
1792 		    toev->u.event.declp->u.stmt.np->u.event.eexprlist);
1793 #endif	/* notdef */
1794 
1795 	/* handle constraints on the to event in the prop statement */
1796 	epnames[0] = toev->u.event.epname;
1797 	epnames[1] = fromev->u.event.epname;
1798 	if (eval_potential(toev->u.event.eexprlist, ex, epnames, &newc) == 0)
1799 		return (0);		/* constraint disallows arrow */
1800 
1801 	/* if we came up with any deferred constraints, add them to arrow */
1802 	if (newc != NULL)
1803 		(void) itree_add_constraint(ap, newc);
1804 
1805 	return (1);	/* constraints allow arrow */
1806 }
1807 
1808 /*
1809  * Set within() constraint.  If the constraint were set multiple times,
1810  * the last one would "win".
1811  */
1812 static void
1813 arrow_add_within(struct arrow *ap, struct node *xpr)
1814 {
1815 	struct node *arglist;
1816 
1817 	/* end of expressions list */
1818 	if (xpr == NULL)
1819 		return;
1820 
1821 	switch (xpr->t) {
1822 	case T_LIST:
1823 		arrow_add_within(ap, xpr->u.expr.left);
1824 		arrow_add_within(ap, xpr->u.expr.right);
1825 		return;
1826 	case T_FUNC:
1827 		if (xpr->u.func.s != L_within)
1828 			return;
1829 		arglist = xpr->u.func.arglist;
1830 		switch (arglist->t) {
1831 		case T_TIMEVAL:
1832 			ap->mindelay = 0;
1833 			ap->maxdelay = arglist->u.ull;
1834 			break;
1835 		case T_NAME:
1836 			ASSERT(arglist->u.name.s == L_infinity);
1837 			ap->mindelay = 0;
1838 			ap->maxdelay = TIMEVAL_EVENTUALLY;
1839 			break;
1840 		case T_LIST:
1841 			ASSERT(arglist->u.expr.left->t == T_TIMEVAL);
1842 			ap->mindelay = arglist->u.expr.left->u.ull;
1843 			switch (arglist->u.expr.right->t) {
1844 			case T_TIMEVAL:
1845 				ap->maxdelay = arglist->u.ull;
1846 				break;
1847 			case T_NAME:
1848 				ASSERT(arglist->u.expr.right->u.name.s ==
1849 				    L_infinity);
1850 				ap->maxdelay = TIMEVAL_EVENTUALLY;
1851 				break;
1852 			default:
1853 				out(O_DIE, "within: unexpected 2nd arg type");
1854 			}
1855 			break;
1856 		default:
1857 			out(O_DIE, "within: unexpected 1st arg type");
1858 		}
1859 		break;
1860 	default:
1861 		return;
1862 	}
1863 }
1864 
1865 static void
1866 itree_free_arrowlists(struct bubble *bubp, int arrows_too)
1867 {
1868 	struct arrowlist *al, *nal;
1869 
1870 	al = bubp->arrows;
1871 	while (al != NULL) {
1872 		nal = al->next;
1873 		if (arrows_too) {
1874 			itree_free_constraints(al->arrowp);
1875 			bzero(al->arrowp, sizeof (struct arrow));
1876 			FREE(al->arrowp);
1877 		}
1878 		bzero(al, sizeof (*al));
1879 		FREE(al);
1880 		al = nal;
1881 	}
1882 }
1883 
1884 struct arrowlist *
1885 itree_next_arrow(struct bubble *bubble, struct arrowlist *last)
1886 {
1887 	struct arrowlist *next;
1888 
1889 	if (last != NULL)
1890 		next = last->next;
1891 	else
1892 		next = bubble->arrows;
1893 	return (next);
1894 }
1895 
1896 static struct constraintlist *
1897 itree_add_constraint(struct arrow *arrowp, struct node *c)
1898 {
1899 	struct constraintlist *prev = NULL;
1900 	struct constraintlist *curr;
1901 	struct constraintlist *newc;
1902 
1903 	for (curr = arrowp->constraints;
1904 	    curr != NULL;
1905 	    prev = curr, curr = curr->next);
1906 
1907 	newc = MALLOC(sizeof (struct constraintlist));
1908 	newc->next = NULL;
1909 	newc->cnode = c;
1910 
1911 	if (prev == NULL)
1912 		arrowp->constraints = newc;
1913 	else
1914 		prev->next = newc;
1915 
1916 	return (newc);
1917 }
1918 
1919 struct constraintlist *
1920 itree_next_constraint(struct arrow *arrowp, struct constraintlist *last)
1921 {
1922 	struct constraintlist *next;
1923 
1924 	if (last != NULL)
1925 		next = last->next;
1926 	else
1927 		next = arrowp->constraints;
1928 	return (next);
1929 }
1930 
1931 static void
1932 itree_free_constraints(struct arrow *ap)
1933 {
1934 	struct constraintlist *cl, *ncl;
1935 
1936 	cl = ap->constraints;
1937 	while (cl != NULL) {
1938 		ncl = cl->next;
1939 		ASSERT(cl->cnode != NULL);
1940 		tree_free(cl->cnode);
1941 		bzero(cl, sizeof (*cl));
1942 		FREE(cl);
1943 		cl = ncl;
1944 	}
1945 }
1946 
1947 const char *
1948 itree_bubbletype2str(enum bubbletype t)
1949 {
1950 	static char buf[100];
1951 
1952 	switch (t) {
1953 	case B_FROM:	return L_from;
1954 	case B_TO:	return L_to;
1955 	case B_INHIBIT:	return L_inhibit;
1956 	default:
1957 		(void) sprintf(buf, "[unexpected bubbletype: %d]", t);
1958 		return (buf);
1959 	}
1960 }
1961 
1962 /*
1963  * itree_fini -- clean up any half-built itrees
1964  */
1965 void
1966 itree_fini(void)
1967 {
1968 	if (Ninfo.lut != NULL) {
1969 		itree_free(Ninfo.lut);
1970 		Ninfo.lut = NULL;
1971 	}
1972 	if (Ninfo.ex) {
1973 		lut_free(Ninfo.ex, iterinfo_destructor, NULL);
1974 		Ninfo.ex = NULL;
1975 	}
1976 }
1977