1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License, Version 1.0 only 6 * (the "License"). You may not use this file except in compliance 7 * with the License. 8 * 9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 10 * or http://www.opensolaris.org/os/licensing. 11 * See the License for the specific language governing permissions 12 * and limitations under the License. 13 * 14 * When distributing Covered Code, include this CDDL HEADER in each 15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 16 * If applicable, add the following below this CDDL HEADER, with the 17 * fields enclosed by brackets "[]" replaced with your own identifying 18 * information: Portions Copyright [yyyy] [name of copyright owner] 19 * 20 * CDDL HEADER END 21 */ 22 /* 23 * Copyright 2006 Sun Microsystems, Inc. All rights reserved. 24 * Use is subject to license terms. 25 * 26 * itree.c -- instance tree creation and manipulation 27 * 28 * this module provides the instance tree 29 */ 30 31 #pragma ident "%Z%%M% %I% %E% SMI" 32 33 #include <stdio.h> 34 #include <ctype.h> 35 #include <string.h> 36 #include <strings.h> 37 #include "alloc.h" 38 #include "out.h" 39 #include "stable.h" 40 #include "literals.h" 41 #include "lut.h" 42 #include "tree.h" 43 #include "ptree.h" 44 #include "itree.h" 45 #include "ipath.h" 46 #include "iexpr.h" 47 #include "eval.h" 48 #include "config.h" 49 50 /* 51 * struct info contains the state we keep when expanding a prop statement 52 * as part of constructing the instance tree. state kept in struct info 53 * is the non-recursive stuff -- the stuff that doesn't need to be on 54 * the stack. the rest of the state that is passed between all the 55 * mutually recursive functions, is required to be on the stack since 56 * we need to backtrack and recurse as we do the instance tree construction. 57 */ 58 struct info { 59 struct lut *lut; 60 struct node *anp; /* arrow np */ 61 struct lut *ex; /* dictionary of explicit iterators */ 62 struct config *croot; 63 } Ninfo; 64 65 /* 66 * struct wildcardinfo is used to track wildcarded portions of paths. 67 * 68 * for example, if the epname of an event is "c/d" and the path "a/b/c/d" 69 * exists, the wildcard path ewname is filled in with the path "a/b". when 70 * matching is done, epname is temporarily replaced with the concatenation 71 * of ewname and epname. cpstart is set to the (struct config *) 72 * corresponding to component "c". 73 * 74 * a linked list of these structs is used to track the expansion of each 75 * event node as it is processed in vmatch() --> vmatch_event() calls. 76 */ 77 struct wildcardinfo { 78 struct node *nptop; /* event node fed to vmatch */ 79 struct node *oldepname; /* epname without the wildcard part */ 80 enum status { 81 WC_UNDEFINED, /* struct is not yet initialized */ 82 WC_UNDERCONSTRUCTION, /* wildcard path not yet done */ 83 WC_COMPLETE /* wildcard path done and is in use */ 84 } s; 85 struct wildcardpath { 86 struct node *ewname; /* wildcard path */ 87 struct config *cpstart; /* starting cp node for oldepname */ 88 int refcount; /* number of event nodes using this */ 89 } *p; 90 struct wildcardpath *matchwc; /* ptr to wc path to be matched */ 91 struct wildcardinfo *next; 92 }; 93 94 static void vmatch(struct info *infop, struct node *np, 95 struct node *lnp, struct node *anp, struct wildcardinfo **wcproot); 96 static void hmatch(struct info *infop, struct node *np, struct node *nextnp); 97 static void itree_pbubble(int flags, struct bubble *bp); 98 static void itree_destructor(void *left, void *right, void *arg); 99 static int itree_set_arrow_traits(struct arrow *ap, struct node *fromev, 100 struct node *toev, struct lut *ex); 101 static void itree_free_arrowlists(struct bubble *bubp, int arrows_too); 102 static void arrow_add_within(struct arrow *ap, struct node *xpr); 103 static struct arrow *itree_add_arrow(struct bubble *frombubblep, 104 struct bubble *tobubblep, struct node *apnode, struct node *fromevent, 105 struct node *toevent, struct lut *ex); 106 static struct constraintlist *itree_add_constraint(struct arrow *arrowp, 107 struct node *c); 108 static struct bubble *itree_add_bubble(struct event *eventp, 109 enum bubbletype btype, int nork, int gen); 110 static void itree_free_bubble(struct bubble *freeme); 111 static void itree_free_constraints(struct arrow *ap); 112 113 /* 114 * the following struct contains the state we build up during 115 * vertical and horizontal expansion so that generate() 116 * has everything it needs to construct the appropriate arrows. 117 * after setting up the state by calling: 118 * generate_arrownp() 119 * generate_nork() 120 * generate_new() 121 * generate_from() 122 * generate_to() 123 * the actual arrow generation is done by calling: 124 * generate() 125 */ 126 static struct { 127 int generation; /* generation number of arrow set */ 128 struct node *arrownp; /* top-level parse tree for arrow */ 129 int n; /* n value associated with arrow */ 130 int k; /* k value associated with arrow */ 131 struct node *fromnp; /* left-hand-side event in parse tree */ 132 struct node *tonp; /* right-hand-side event in parse tree */ 133 struct event *frome; /* left-hand-side event in instance tree */ 134 struct event *toe; /* right-hand-side event in instance tree */ 135 struct bubble *frombp; /* bubble arrow comes from */ 136 struct bubble *tobp; /* bubble arrow goes to */ 137 } G; 138 139 static void 140 generate_arrownp(struct node *arrownp) 141 { 142 G.arrownp = arrownp; 143 } 144 145 static void 146 generate_nork(int n, int k) 147 { 148 G.n = n; 149 G.k = k; 150 } 151 152 static void 153 generate_new(void) 154 { 155 G.generation++; 156 } 157 158 static void 159 generate_from(struct node *fromeventnp, struct event *fromevent) 160 { 161 G.fromnp = fromeventnp; 162 G.frome = fromevent; 163 164 out(O_ALTFP|O_VERB3|O_NONL, "from bubble on "); 165 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp); 166 out(O_ALTFP|O_VERB3, NULL); 167 168 G.frombp = itree_add_bubble(G.frome, B_FROM, G.n, 0); 169 } 170 171 static void 172 generate_to(struct node *toeventnp, struct event *toevent) 173 { 174 G.tonp = toeventnp; 175 G.toe = toevent; 176 177 out(O_ALTFP|O_VERB3|O_NONL, "to bubble (gen %d) on ", G.generation); 178 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp); 179 out(O_ALTFP|O_VERB3, NULL); 180 181 G.tobp = itree_add_bubble(G.toe, B_TO, G.k, G.generation); 182 } 183 184 static void 185 generate(struct lut *ex) 186 { 187 ASSERT(G.arrownp != NULL); 188 ASSERT(G.fromnp != NULL); 189 ASSERT(G.frome != NULL); 190 ASSERT(G.frombp != NULL); 191 ASSERT(G.tonp != NULL); 192 ASSERT(G.toe != NULL); 193 ASSERT(G.tobp != NULL); 194 195 out(O_ALTFP|O_VERB3|O_NONL, " Arrow \""); 196 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp); 197 out(O_ALTFP|O_VERB3|O_NONL, "\" -> \""); 198 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp); 199 200 if (itree_add_arrow(G.frombp, G.tobp, G.arrownp, 201 G.fromnp, G.tonp, ex) == NULL) { 202 out(O_ALTFP|O_VERB3, "\" (prevented by constraints)"); 203 } else { 204 out(O_ALTFP|O_VERB3, "\""); 205 } 206 } 207 208 enum childnode_action { 209 CN_NONE, 210 CN_INSTANTIZE, 211 CN_DUP 212 }; 213 214 static struct node * 215 tname_dup(struct node *namep, enum childnode_action act) 216 { 217 struct node *retp = NULL; 218 const char *file; 219 int line; 220 221 if (namep == NULL) 222 return (NULL); 223 224 file = namep->file; 225 line = namep->line; 226 227 for (; namep != NULL; namep = namep->u.name.next) { 228 struct node *newnp = newnode(T_NAME, file, line); 229 230 newnp->u.name.t = namep->u.name.t; 231 newnp->u.name.s = namep->u.name.s; 232 newnp->u.name.last = newnp; 233 newnp->u.name.it = namep->u.name.it; 234 newnp->u.name.cp = namep->u.name.cp; 235 236 if (act == CN_DUP) { 237 struct node *npc; 238 239 npc = namep->u.name.child; 240 if (npc != NULL) { 241 switch (npc->t) { 242 case T_NUM: 243 newnp->u.name.child = 244 newnode(T_NUM, file, line); 245 newnp->u.name.child->u.ull = 246 npc->u.ull; 247 break; 248 case T_NAME: 249 newnp->u.name.child = 250 tree_name(npc->u.name.s, 251 npc->u.name.it, 252 file, line); 253 break; 254 default: 255 out(O_DIE, "tname_dup: " 256 "invalid child type %s", 257 ptree_nodetype2str(npc->t)); 258 } 259 } 260 } else if (act == CN_INSTANTIZE) { 261 newnp->u.name.child = newnode(T_NUM, file, line); 262 263 if (namep->u.name.child == NULL || 264 namep->u.name.child->t != T_NUM) { 265 int inum; 266 267 ASSERT(newnp->u.name.cp != NULL); 268 config_getcompname(newnp->u.name.cp, 269 NULL, &inum); 270 newnp->u.name.child->u.ull = 271 (unsigned long long)inum; 272 } else { 273 newnp->u.name.child->u.ull = 274 namep->u.name.child->u.ull; 275 } 276 } 277 278 if (retp == NULL) { 279 retp = newnp; 280 } else { 281 retp->u.name.last->u.name.next = newnp; 282 retp->u.name.last = newnp; 283 } 284 } 285 286 return (retp); 287 } 288 289 struct prop_wlk_data { 290 struct lut *props; 291 struct node *epname; 292 }; 293 294 static struct lut *props2instance(struct node *, struct node *); 295 296 /* 297 * let oldepname be a subset of epname. return the subsection of epname 298 * that ends with oldepname. make each component in the path explicitly 299 * instanced (i.e., with a T_NUM child). 300 */ 301 static struct node * 302 tname_dup_to_epname(struct node *oldepname, struct node *epname) 303 { 304 struct node *npref, *npend, *np1, *np2; 305 struct node *ret = NULL; 306 int foundmatch = 0; 307 308 if (epname == NULL) 309 return (NULL); 310 311 /* 312 * search for the longest path in epname which contains 313 * oldnode->u.event.epname. set npend to point to just past the 314 * end of this path. 315 */ 316 npend = NULL; 317 for (npref = epname; npref; npref = npref->u.name.next) { 318 if (npref->u.name.s == oldepname->u.name.s) { 319 for (np1 = npref, np2 = oldepname; 320 np1 != NULL && np2 != NULL; 321 np1 = np1->u.name.next, np2 = np2->u.name.next) { 322 if (np1->u.name.s != np2->u.name.s) 323 break; 324 } 325 if (np2 == NULL) { 326 foundmatch = 1; 327 npend = np1; 328 if (np1 == NULL) { 329 /* oldepname matched npref up to end */ 330 break; 331 } 332 } 333 } 334 } 335 336 if (foundmatch == 0) { 337 /* 338 * if oldepname could not be found in epname, return a 339 * duplicate of the former. do not try to instantize 340 * oldepname since it might not be a path. 341 */ 342 return (tname_dup(oldepname, CN_DUP)); 343 } 344 345 /* 346 * dup (epname -- npend). all children should be T_NUMs. 347 */ 348 for (npref = epname; 349 ! (npref == NULL || npref == npend); 350 npref = npref->u.name.next) { 351 struct node *newnp = newnode(T_NAME, oldepname->file, 352 oldepname->line); 353 354 newnp->u.name.t = npref->u.name.t; 355 newnp->u.name.s = npref->u.name.s; 356 newnp->u.name.last = newnp; 357 newnp->u.name.it = npref->u.name.it; 358 newnp->u.name.cp = npref->u.name.cp; 359 360 newnp->u.name.child = newnode(T_NUM, oldepname->file, 361 oldepname->line); 362 363 if (npref->u.name.child == NULL || 364 npref->u.name.child->t != T_NUM) { 365 int childnum; 366 367 ASSERT(npref->u.name.cp != NULL); 368 config_getcompname(npref->u.name.cp, NULL, &childnum); 369 newnp->u.name.child->u.ull = childnum; 370 } else { 371 newnp->u.name.child->u.ull = 372 npref->u.name.child->u.ull; 373 } 374 375 if (ret == NULL) { 376 ret = newnp; 377 } else { 378 ret->u.name.last->u.name.next = newnp; 379 ret->u.name.last = newnp; 380 } 381 } 382 383 return (ret); 384 } 385 386 /* 387 * restriction: oldnode->u.event.epname has to be equivalent to or a subset 388 * of epname 389 */ 390 static struct node * 391 tevent_dup_to_epname(struct node *oldnode, struct node *epname) 392 { 393 struct node *ret; 394 395 ret = newnode(T_EVENT, oldnode->file, oldnode->line); 396 ret->u.event.ename = tname_dup(oldnode->u.event.ename, CN_NONE); 397 ret->u.event.epname = tname_dup_to_epname(oldnode->u.event.epname, 398 epname); 399 return (ret); 400 } 401 402 static void 403 nv_instantiate(void *name, void *val, void *arg) 404 { 405 struct prop_wlk_data *pd = (struct prop_wlk_data *)arg; 406 struct node *orhs = (struct node *)val; 407 struct node *nrhs; 408 409 /* handle engines by instantizing the entire engine */ 410 if (name == L_engine) { 411 ASSERT(orhs->t == T_EVENT); 412 ASSERT(orhs->u.event.ename->u.name.t == N_SERD); 413 414 /* there are only SERD engines for now */ 415 416 nrhs = newnode(T_SERD, orhs->file, orhs->line); 417 nrhs->u.stmt.np = tevent_dup_to_epname(orhs, pd->epname); 418 nrhs->u.stmt.lutp = props2instance(orhs, pd->epname); 419 pd->props = lut_add(pd->props, name, nrhs, NULL); 420 return; 421 } 422 423 switch (orhs->t) { 424 case T_NUM: 425 nrhs = newnode(T_NUM, orhs->file, orhs->line); 426 nrhs->u.ull = orhs->u.ull; 427 pd->props = lut_add(pd->props, name, nrhs, NULL); 428 break; 429 case T_TIMEVAL: 430 nrhs = newnode(T_TIMEVAL, orhs->file, orhs->line); 431 nrhs->u.ull = orhs->u.ull; 432 pd->props = lut_add(pd->props, name, nrhs, NULL); 433 break; 434 case T_NAME: 435 nrhs = tname_dup_to_epname(orhs, pd->epname); 436 pd->props = lut_add(pd->props, name, nrhs, NULL); 437 break; 438 case T_EVENT: 439 nrhs = tevent_dup_to_epname(orhs, pd->epname); 440 pd->props = lut_add(pd->props, name, nrhs, NULL); 441 break; 442 case T_GLOBID: 443 nrhs = newnode(T_GLOBID, orhs->file, orhs->line); 444 nrhs->u.globid.s = orhs->u.globid.s; 445 pd->props = lut_add(pd->props, name, nrhs, NULL); 446 break; 447 case T_FUNC: 448 /* for T_FUNC, we don't duplicate it, just point to node */ 449 pd->props = lut_add(pd->props, name, orhs, NULL); 450 break; 451 default: 452 out(O_DIE, "unexpected nvpair value type %s", 453 ptree_nodetype2str(((struct node *)val)->t)); 454 } 455 } 456 457 static struct lut * 458 props2instance(struct node *eventnp, struct node *epname) 459 { 460 struct prop_wlk_data pd; 461 462 pd.props = NULL; 463 pd.epname = epname; 464 465 ASSERT(eventnp->u.event.declp != NULL); 466 lut_walk(eventnp->u.event.declp->u.stmt.lutp, nv_instantiate, &pd); 467 return (pd.props); 468 } 469 470 /*ARGSUSED*/ 471 static void 472 instances_destructor(void *left, void *right, void *arg) 473 { 474 struct node *dn = (struct node *)right; 475 476 if (dn->t == T_SERD) { 477 /* we allocated the lut during itree_create(), so free it */ 478 lut_free(dn->u.stmt.lutp, instances_destructor, NULL); 479 dn->u.stmt.lutp = NULL; 480 } 481 if (dn->t != T_FUNC) /* T_FUNC pointed to original node */ 482 tree_free(dn); 483 } 484 485 /*ARGSUSED*/ 486 static void 487 payloadprops_destructor(void *left, void *right, void *arg) 488 { 489 FREE(right); 490 } 491 492 /* 493 * event_cmp -- used via lut_lookup/lut_add on instance tree lut 494 */ 495 static int 496 event_cmp(struct event *ep1, struct event *ep2) 497 { 498 int diff; 499 500 if ((diff = ep2->enode->u.event.ename->u.name.s - 501 ep1->enode->u.event.ename->u.name.s) != 0) 502 return (diff); 503 if ((diff = (char *)ep2->ipp - (char *)ep1->ipp) != 0) 504 return (diff); 505 return (0); 506 507 } 508 509 struct event * 510 itree_lookup(struct lut *itp, const char *ename, const struct ipath *ipp) 511 { 512 struct event searchevent; /* just used for searching */ 513 struct node searcheventnode; 514 struct node searchenamenode; 515 516 searchevent.enode = &searcheventnode; 517 searcheventnode.t = T_EVENT; 518 searcheventnode.u.event.ename = &searchenamenode; 519 searchenamenode.t = T_NAME; 520 searchenamenode.u.name.s = ename; 521 searchevent.ipp = ipp; 522 return (lut_lookup(itp, (void *)&searchevent, (lut_cmp)event_cmp)); 523 } 524 525 static struct event * 526 find_or_add_event(struct info *infop, struct node *np) 527 { 528 struct event *ret; 529 struct event searchevent; /* just used for searching */ 530 531 ASSERTeq(np->t, T_EVENT, ptree_nodetype2str); 532 533 searchevent.enode = np; 534 searchevent.ipp = ipath(np->u.event.epname); 535 if ((ret = lut_lookup(infop->lut, (void *)&searchevent, 536 (lut_cmp)event_cmp)) != NULL) 537 return (ret); 538 539 /* wasn't already in tree, allocate it */ 540 ret = MALLOC(sizeof (*ret)); 541 bzero(ret, sizeof (*ret)); 542 543 ret->t = np->u.event.ename->u.name.t; 544 ret->enode = np; 545 ret->ipp = searchevent.ipp; 546 ret->props = props2instance(np, np->u.event.epname); 547 548 infop->lut = lut_add(infop->lut, (void *)ret, (void *)ret, 549 (lut_cmp)event_cmp); 550 551 return (ret); 552 } 553 554 /* 555 * hmatch_event -- perform any appropriate horizontal expansion on an event 556 * 557 * this routine is used to perform horizontal expansion on both the 558 * left-hand-side events in a prop, and the right-hand-side events. 559 * when called to handle a left-side event, nextnp point to the right 560 * side of the prop that should be passed to hmatch() for each match 561 * found by horizontal expansion. when no horizontal expansion exists, 562 * we will still "match" one event for every event found in the list on 563 * the left-hand-side of the prop because vmatch() already found that 564 * there's at least one match during vertical expansion. 565 */ 566 static void 567 hmatch_event(struct info *infop, struct node *eventnp, struct node *epname, 568 struct config *ncp, struct node *nextnp, int rematch) 569 { 570 if (epname == NULL) { 571 /* 572 * end of pathname recursion, either we just located 573 * a left-hand-side event and we're ready to move on 574 * to the expanding the right-hand-side events, or 575 * we're further down the recursion and we just located 576 * a right-hand-side event. the passed-in parameter 577 * "nextnp" tells us whether we're working on the left 578 * side and need to move on to nextnp, or if nextnp is 579 * NULL, we're working on the right side. 580 */ 581 if (nextnp) { 582 /* 583 * finished a left side expansion, move on to right. 584 * tell generate() what event we just matched so 585 * it can be used at the source of any arrows 586 * we generate as we match events on the right side. 587 */ 588 generate_from(eventnp, 589 find_or_add_event(infop, eventnp)); 590 hmatch(infop, nextnp, NULL); 591 } else { 592 /* 593 * finished a right side expansion. tell generate 594 * the information about the destination and let 595 * it construct the arrows as appropriate. 596 */ 597 generate_to(eventnp, 598 find_or_add_event(infop, eventnp)); 599 generate(infop->ex); 600 } 601 602 return; 603 } 604 605 ASSERTeq(epname->t, T_NAME, ptree_nodetype2str); 606 607 /* 608 * we only get here when eventnp already has a completely 609 * instanced epname in it already. so we first recurse 610 * down to the end of the name and as the recursion pops 611 * up, we look for opportunities to advance horizontal 612 * expansions on to the next match. when we do advance 613 * horizontal expansions, we potentially render all cp 614 * pointers on all components to the right as invalid, 615 * so we pass in an "ncp" config handle so matching against 616 * the config can happen. 617 */ 618 if (rematch) { 619 struct config *ocp = epname->u.name.cp; 620 char *ncp_s; 621 int ncp_num, num; 622 623 for (; ncp; ncp = config_next(ncp)) { 624 config_getcompname(ncp, &ncp_s, &ncp_num); 625 626 if (ncp_s == epname->u.name.s) { 627 /* found a matching component name */ 628 config_getcompname(epname->u.name.cp, 629 NULL, &num); 630 631 if (epname->u.name.it != IT_HORIZONTAL && 632 ncp_num != num) 633 continue; 634 635 epname->u.name.cp = ncp; 636 hmatch_event(infop, eventnp, 637 epname->u.name.next, config_child(ncp), 638 nextnp, 1); 639 } 640 } 641 642 epname->u.name.cp = ocp; 643 644 return; /* no more config to match against */ 645 646 } else { 647 hmatch_event(infop, eventnp, epname->u.name.next, ncp, 648 nextnp, 0); 649 } 650 651 if (epname->u.name.it == IT_HORIZONTAL) { 652 struct config *cp; 653 struct config *ocp = epname->u.name.cp; 654 char *cp_s; 655 int cp_num; 656 int ocp_num; 657 struct iterinfo *iterinfop = NULL; 658 const char *iters; 659 660 config_getcompname(ocp, NULL, &ocp_num); 661 662 for (cp = config_next(ocp); cp; cp = config_next(cp)) { 663 config_getcompname(cp, &cp_s, &cp_num); 664 665 if (cp_s == epname->u.name.s) { 666 ASSERT(epname->u.name.child != NULL); 667 668 iters = epname->u.name.child->u.name.s; 669 if ((iterinfop = lut_lookup(infop->ex, 670 (void *)iters, NULL)) == NULL) { 671 out(O_DIE, 672 "hmatch_event: internal error: " 673 "iterator \"%s\" undefined", iters); 674 } else { 675 /* advance dict entry to next match */ 676 iterinfop->num = cp_num; 677 } 678 epname->u.name.cp = cp; 679 hmatch_event(infop, eventnp, 680 epname->u.name.next, config_child(cp), 681 nextnp, 1); 682 } 683 } 684 685 if (iterinfop != NULL) { 686 /* restore dict entry */ 687 iterinfop->num = ocp_num; 688 } 689 epname->u.name.cp = ocp; 690 } 691 } 692 693 /* 694 * hmatch -- check for horizontal expansion matches 695 * 696 * np points to the things we're matching (like a T_LIST or a T_EVENT) 697 * and if we're working on a left-side of a prop, nextnp points to 698 * the other side of the prop that we'll tackle next when this recursion 699 * bottoms out. when all the events in the entire prop arrow have been 700 * horizontally expanded, generate() will be called to generate the 701 * actualy arrow. 702 */ 703 static void 704 hmatch(struct info *infop, struct node *np, struct node *nextnp) 705 { 706 if (np == NULL) 707 return; /* all done */ 708 709 /* 710 * for each item in the list of events (which could just 711 * be a single event, or it could get larger in the loop 712 * below due to horizontal expansion), call hmatch on 713 * the right side and create arrows to each element. 714 */ 715 716 switch (np->t) { 717 case T_LIST: 718 /* loop through the list */ 719 if (np->u.expr.left) 720 hmatch(infop, np->u.expr.left, nextnp); 721 if (np->u.expr.right) 722 hmatch(infop, np->u.expr.right, nextnp); 723 break; 724 725 case T_EVENT: 726 hmatch_event(infop, np, np->u.event.epname, 727 NULL, nextnp, 0); 728 break; 729 730 default: 731 outfl(O_DIE, np->file, np->line, 732 "hmatch: unexpected type: %s", 733 ptree_nodetype2str(np->t)); 734 } 735 } 736 737 static int 738 itree_np2nork(struct node *norknp) 739 { 740 if (norknp == NULL) 741 return (1); 742 else if (norknp->t == T_NAME && norknp->u.name.s == L_A) 743 return (-1); /* our internal version of "all" */ 744 else if (norknp->t == T_NUM) 745 return ((int)norknp->u.ull); 746 else 747 out(O_DIE, norknp->file, norknp->line, 748 "itree_np2nork: internal error type %s", 749 ptree_nodetype2str(norknp->t)); 750 /*NOTREACHED*/ 751 } 752 753 static struct iterinfo * 754 newiterinfo(int num, struct node *np) 755 { 756 struct iterinfo *ret = MALLOC(sizeof (*ret)); 757 758 ret->num = num; 759 ret->np = np; 760 761 return (ret); 762 } 763 764 /*ARGSUSED*/ 765 static void 766 iterinfo_destructor(void *left, void *right, void *arg) 767 { 768 struct iterinfo *iterinfop = (struct iterinfo *)right; 769 770 bzero(iterinfop, sizeof (*iterinfop)); 771 FREE(iterinfop); 772 } 773 774 /* 775 * return 1 if wildcard path for wcp matches another wildcard path; 776 * return 0 if otherwise. 777 */ 778 static int 779 wc_paths_match(struct wildcardinfo *wcp) 780 { 781 struct node *np1, *np2; 782 783 ASSERT(wcp->matchwc != NULL); 784 785 for (np1 = wcp->p->ewname, np2 = wcp->matchwc->ewname; 786 np1 != NULL && np2 != NULL; 787 np1 = np1->u.name.next, np2 = np2->u.name.next) { 788 /* 789 * names must match 790 */ 791 if (np1->u.name.s != np2->u.name.s) 792 return (0); 793 794 /* 795 * children must exist and have the same numerical value 796 */ 797 if (np1->u.name.child == NULL || np2->u.name.child == NULL) 798 return (0); 799 800 if (np1->u.name.child->t != T_NUM || 801 np2->u.name.child->t != T_NUM) 802 return (0); 803 804 if (np1->u.name.child->u.ull != np2->u.name.child->u.ull) 805 return (0); 806 } 807 808 /* 809 * return true only if we have matches for all entries of n1 and 810 * n2. note that NULL wildcard paths (i.e., both wcp->p->ewname 811 * and wcp->matchwc->ewname are NULL) will be considered as 812 * matching paths. 813 */ 814 if (np1 == NULL && np2 == NULL) 815 return (1); 816 817 return (0); 818 } 819 820 /* 821 * update epname to include the wildcarded portion 822 */ 823 static void 824 create_wildcardedpath(struct wildcardinfo **wcproot) 825 { 826 struct wildcardinfo *wcp; 827 struct node *nptop; 828 829 wcp = *wcproot; 830 831 if (wcp->s == WC_UNDERCONSTRUCTION) { 832 ASSERT(wcp->p->refcount == 1); 833 wcp->s = WC_COMPLETE; 834 } 835 836 /* path has no wildcard */ 837 if (wcp->p->ewname == NULL) 838 return; 839 840 /* 841 * get to this point if a wildcard portion of the path exists. 842 * 843 * first set oldepname to the start of the existing epname for use 844 * in future comparisons, then update epname to include the 845 * wildcard portion. 846 */ 847 nptop = wcp->nptop; 848 849 ASSERT(wcp->oldepname == nptop->u.event.epname); 850 851 nptop->u.event.epname = tname_dup(wcp->p->ewname, CN_DUP); 852 nptop->u.event.epname = tree_name_append(nptop->u.event.epname, 853 tname_dup(wcp->oldepname, CN_DUP)); 854 } 855 856 /* 857 * restore epname to its former (nonwildcarded) state 858 */ 859 static void 860 undo_wildcardedpath(struct wildcardinfo **wcproot) 861 { 862 struct wildcardinfo *wcp; 863 864 wcp = *wcproot; 865 866 if (wcp->s == WC_COMPLETE) { 867 ASSERT(wcp->p->refcount == 1); 868 wcp->s = WC_UNDERCONSTRUCTION; 869 } 870 871 /* path has no wildcard */ 872 if (wcp->p->ewname == NULL) 873 return; 874 875 ASSERT(wcp->oldepname != NULL); 876 877 tree_free(wcp->nptop->u.event.epname); 878 wcp->nptop->u.event.epname = wcp->oldepname; 879 } 880 881 enum wildcard_action { 882 WA_NONE, /* do not do any wildcards */ 883 WA_SINGLE, /* do wildcard only for current cp node */ 884 WA_ALL /* do wildcards for all cp nodes */ 885 }; 886 887 static void 888 vmatch_event(struct info *infop, struct config *cp, struct node *np, 889 struct node *lnp, struct node *anp, 890 struct wildcardinfo **wcproot, enum wildcard_action dowildcard) 891 { 892 struct wildcardinfo *wcp; 893 char *cp_s; 894 int cp_num; 895 896 wcp = *wcproot; 897 898 if ((np == NULL && wcp->oldepname != NULL) || 899 (cp == NULL && wcp->oldepname == NULL)) { 900 /* 901 * get to this point if the pathname matched the config 902 * (but not necessarily a match at the end). first check 903 * for any matching wildcard paths. 904 */ 905 if (wcp->matchwc != NULL && wc_paths_match(wcp) == 0) 906 return; 907 908 create_wildcardedpath(wcproot); 909 vmatch(infop, np, lnp, anp, wcproot); 910 undo_wildcardedpath(wcproot); 911 912 return; 913 } 914 915 if (cp == NULL) 916 return; /* no more config to match against */ 917 918 for (; cp; cp = config_next(cp)) { 919 config_getcompname(cp, &cp_s, &cp_num); 920 921 if (cp_s == np->u.name.s && 922 ! (wcp->s == WC_UNDERCONSTRUCTION && 923 dowildcard == WA_SINGLE)) { 924 /* found a matching component name */ 925 if (np->u.name.child && 926 np->u.name.child->t == T_NUM) { 927 /* 928 * an explicit instance number was given 929 * in the source. so only consider this 930 * a configuration match if the number 931 * also matches. 932 */ 933 if (cp_num != np->u.name.child->u.ull) 934 continue; 935 936 np->u.name.cp = cp; 937 } else { 938 struct iterinfo *iterinfop; 939 const char *iters; 940 941 /* 942 * vertical iterator. look it up in 943 * the appropriate lut and if we get 944 * back a value it is either one that we 945 * set earlier, in which case we record 946 * the new value for this iteration and 947 * keep matching, or it is one that was 948 * set by an earlier reference to the 949 * iterator, in which case we only consider 950 * this a configuration match if the number 951 * matches cp_num. 952 */ 953 954 ASSERT(np->u.name.child != NULL); 955 ASSERT(np->u.name.child->t == T_NAME); 956 iters = np->u.name.child->u.name.s; 957 958 if ((iterinfop = lut_lookup(infop->ex, 959 (void *)iters, NULL)) == NULL) { 960 /* we're the first use, record our np */ 961 infop->ex = lut_add(infop->ex, 962 (void *)iters, 963 newiterinfo(cp_num, np), NULL); 964 } else if (np == iterinfop->np) { 965 /* 966 * we're the first use, back again 967 * for another iteration. so update 968 * the num bound to this iterator in 969 * the lut. 970 */ 971 iterinfop->num = cp_num; 972 } else if (cp_num != iterinfop->num) { 973 /* 974 * an earlier reference to this 975 * iterator bound it to a different 976 * instance number, so there's no 977 * match here after all. 978 * 979 * however, it's possible that this 980 * component should really be part of 981 * the wildcard. we explore this by 982 * forcing this component into the 983 * wildcarded section. 984 * 985 * for an more details of what's 986 * going to happen now, see 987 * comments block below entitled 988 * "forcing components into 989 * wildcard path". 990 */ 991 if (dowildcard == WA_ALL && 992 wcp->s == WC_UNDERCONSTRUCTION) { 993 vmatch_event(infop, cp, np, 994 lnp, anp, wcproot, 995 WA_SINGLE); 996 } 997 continue; 998 } 999 np->u.name.cp = cp; 1000 } 1001 1002 /* 1003 * if wildcarding was done in a call earlier in the 1004 * stack, record the current cp as the first 1005 * matching and nonwildcarded cp. 1006 */ 1007 if (dowildcard == WA_ALL && 1008 wcp->s == WC_UNDERCONSTRUCTION) 1009 wcp->p->cpstart = cp; 1010 1011 /* 1012 * if this was an IT_HORIZONTAL name, 1013 * hmatch() will use the cp to expand 1014 * all matches horizontally into a list. 1015 * we know the list will contain at least 1016 * one element (the one we just matched), 1017 * so we just store cp and let hmatch_event() 1018 * do the rest. 1019 * 1020 * recurse on to next component. note that 1021 * wildcarding is now turned off. 1022 */ 1023 vmatch_event(infop, config_child(cp), np->u.name.next, 1024 lnp, anp, wcproot, WA_NONE); 1025 1026 /* 1027 * forcing components into wildcard path: 1028 * 1029 * if this component is the first match, force it 1030 * to be part of the wildcarded path and see if we 1031 * can get additional matches. repeat call to 1032 * vmatch_event() with the same np, making sure 1033 * wildcarding is forced for this component alone 1034 * and not its peers by specifying vmatch_event( 1035 * ..., WA_SINGLE). in other words, in the call to 1036 * vmatch_event() below, there should be no loop 1037 * over cp's peers since that is being done in the 1038 * current loop [i.e., the loop we're in now]. 1039 * 1040 * here's an example. suppose we have the 1041 * definition 1042 * event foo@x/y 1043 * and configuration 1044 * a0/x0/y0/a1/x1/y1 1045 * 1046 * the code up to this point will treat "a0" as the 1047 * wildcarded part of the path and "x0/y0" as the 1048 * nonwildcarded part, resulting in the instanced 1049 * event 1050 * foo@a0/x0/y0 1051 * 1052 * in order to discover the next match (.../x1/y1) 1053 * in the configuration we have to force "x0" into 1054 * the wildcarded part of the path. the following 1055 * call to vmatch_event(..., WA_SINGLE) does this. 1056 * by doing so, we discover the wildcarded part 1057 * "a0/x0/y0/a1" and the nonwildcarded part "x1/y1" 1058 * 1059 * the following call to vmatch_event() is also 1060 * needed to properly handle the configuration 1061 * b0/x0/b1/x1/y1 1062 * 1063 * the recursions into vmatch_event() will start 1064 * off uncovering "b0" as the wildcarded part and 1065 * "x0" as the start of the nonwildcarded path. 1066 * however, the next recursion will not result in a 1067 * match since there is no "y" following "x0". the 1068 * subsequent match of (wildcard = "b0/x0/b1" and 1069 * nonwildcard = "x1/y1") will be discovered only 1070 * if "x0" is forced to be a part of the wildcarded 1071 * path. 1072 */ 1073 if (dowildcard == WA_ALL && 1074 wcp->s == WC_UNDERCONSTRUCTION) { 1075 vmatch_event(infop, cp, np, lnp, anp, 1076 wcproot, WA_SINGLE); 1077 } 1078 1079 if (np->u.name.it == IT_HORIZONTAL) { 1080 /* 1081 * hmatch() finished iterating through 1082 * the configuration as described above, so 1083 * don't continue iterating here. 1084 */ 1085 return; 1086 } 1087 1088 } else if ((dowildcard == WA_SINGLE || dowildcard == WA_ALL) && 1089 wcp->s == WC_UNDERCONSTRUCTION) { 1090 /* 1091 * no matching cp, and we are constructing our own 1092 * wildcard path. (in other words, we are not 1093 * referencing a wildcard path created for an 1094 * earlier event.) 1095 * 1096 * add wildcard entry, then recurse on to config 1097 * child 1098 */ 1099 struct node *cpnode, *prevlast; 1100 1101 cpnode = tree_name(cp_s, IT_NONE, NULL, 0); 1102 cpnode->u.name.child = newnode(T_NUM, NULL, 0); 1103 cpnode->u.name.child->u.ull = cp_num; 1104 cpnode->u.name.cp = cp; 1105 1106 if (wcp->p->ewname == NULL) { 1107 prevlast = NULL; 1108 wcp->p->ewname = cpnode; 1109 } else { 1110 prevlast = wcp->p->ewname->u.name.last; 1111 wcp->p->ewname = 1112 tree_name_append(wcp->p->ewname, 1113 cpnode); 1114 } 1115 1116 vmatch_event(infop, config_child(cp), np, lnp, anp, 1117 wcproot, WA_ALL); 1118 1119 /* 1120 * back out last addition to ewname and continue 1121 * with loop 1122 */ 1123 tree_free(cpnode); 1124 if (prevlast == NULL) { 1125 wcp->p->ewname = NULL; 1126 } else { 1127 prevlast->u.name.next = NULL; 1128 wcp->p->ewname->u.name.last = prevlast; 1129 } 1130 1131 /* 1132 * return if wildcarding is done only for this cp 1133 */ 1134 if (dowildcard == WA_SINGLE) 1135 return; 1136 } 1137 } 1138 } 1139 1140 /* 1141 * for the event node np, which will be subjected to pathname 1142 * expansion/matching, create a (struct wildcardinfo) to hold wildcard 1143 * information. this struct will be inserted into the first location in 1144 * the list that starts with *wcproot. 1145 * 1146 * cp is the starting node of the configuration; cpstart, which is output, 1147 * is the starting node of the nonwildcarded portion of the path. 1148 */ 1149 static void 1150 add_wildcardentry(struct wildcardinfo **wcproot, struct config *cp, 1151 struct node *np) 1152 { 1153 struct wildcardinfo *wcpnew, *wcp; 1154 struct node *np1, *np2; 1155 1156 /* 1157 * create entry for np 1158 */ 1159 wcpnew = MALLOC(sizeof (struct wildcardinfo)); 1160 bzero(wcpnew, sizeof (struct wildcardinfo)); 1161 wcpnew->nptop = np; 1162 wcpnew->oldepname = np->u.event.epname; 1163 wcpnew->s = WC_UNDERCONSTRUCTION; 1164 1165 wcpnew->p = MALLOC(sizeof (struct wildcardpath)); 1166 bzero(wcpnew->p, sizeof (struct wildcardpath)); 1167 wcpnew->p->cpstart = cp; 1168 wcpnew->p->refcount = 1; 1169 1170 /* 1171 * search all completed entries for an epname whose first entry 1172 * matches. note that NULL epnames are considered valid and can be 1173 * matched. 1174 */ 1175 np2 = wcpnew->oldepname; 1176 for (wcp = *wcproot; wcp; wcp = wcp->next) { 1177 ASSERT(wcp->s == WC_COMPLETE); 1178 1179 np1 = wcp->oldepname; 1180 if ((np1 && np2 && np1->u.name.s == np2->u.name.s) || 1181 (np1 == NULL && np2 == NULL)) { 1182 /* 1183 * if we find a match in a completed entry, set 1184 * matchwc to indicate that we would like to match 1185 * it. it is necessary to do this since wildcards 1186 * for each event are constructed independently. 1187 */ 1188 wcpnew->matchwc = wcp->p; 1189 1190 wcp->p->refcount++; 1191 break; 1192 } 1193 } 1194 1195 wcpnew->next = *wcproot; 1196 *wcproot = wcpnew; 1197 } 1198 1199 static void 1200 delete_wildcardentry(struct wildcardinfo **wcproot) 1201 { 1202 struct wildcardinfo *wcp; 1203 1204 wcp = *wcproot; 1205 *wcproot = wcp->next; 1206 1207 switch (wcp->s) { 1208 case WC_UNDERCONSTRUCTION: 1209 case WC_COMPLETE: 1210 if (wcp->matchwc != NULL) 1211 wcp->matchwc->refcount--; 1212 1213 ASSERT(wcp->p->refcount == 1); 1214 tree_free(wcp->p->ewname); 1215 FREE(wcp->p); 1216 break; 1217 1218 default: 1219 out(O_DIE, "deletewc: invalid status"); 1220 break; 1221 } 1222 1223 FREE(wcp); 1224 } 1225 1226 /* 1227 * vmatch -- find the next vertical expansion match in the config database 1228 * 1229 * this routine is called with three node pointers: 1230 * np -- the parse we're matching 1231 * lnp -- the rest of the list we're currently working on 1232 * anp -- the rest of the arrow we're currently working on 1233 * 1234 * the expansion matching happens via three types of recursion: 1235 * 1236 * - when given an arrow, handle the left-side and then recursively 1237 * handle the right side (which might be another cascaded arrow). 1238 * 1239 * - when handling one side of an arrow, recurse through the T_LIST 1240 * to get to each event (or just move on to the event if there 1241 * is a single event instead of a list) since the arrow parse 1242 * trees recurse left, we actually start with the right-most 1243 * event list in the prop statement and work our way towards 1244 * the left-most event list. 1245 * 1246 * - when handling an event, recurse down each component of the 1247 * pathname, matching in the config database and recording the 1248 * matches in the explicit iterator dictionary as we go. 1249 * 1250 * when the bottom of this matching recursion is met, meaning we have 1251 * set the "cp" pointers on all the names in the entire statement, 1252 * we call hmatch() which does it's own recursion to handle horizontal 1253 * expandsion and then call generate() to generate nodes, bubbles, and 1254 * arrows in the instance tree. generate() looks at the cp pointers to 1255 * see what instance numbers were matched in the configuration database. 1256 * 1257 * when horizontal expansion appears, vmatch() finds only the first match 1258 * and hmatch() then takes the horizontal expansion through all the other 1259 * matches when generating the arrows in the instance tree. 1260 * 1261 * the "infop" passed down through the recursion contains a dictionary 1262 * of the explicit iterators (all the implicit iterators have been converted 1263 * to explicit iterators when the parse tree was created by tree.c), which 1264 * allows things like this to work correctly: 1265 * 1266 * prop error.a@x[n]/y/z -> error.b@x/y[n]/z -> error.c@x/y/z[n]; 1267 * 1268 * during the top level call, the explicit iterator "n" will match an 1269 * instance number in the config database, and the result will be recorded 1270 * in the explicit iterator dictionary and passed down via "infop". so 1271 * when the recursive call tries to match y[n] in the config database, it 1272 * will only match the same instance number as x[n] did since the dictionary 1273 * is consulted to see if "n" took on a value already. 1274 * 1275 * at any point during the recursion, match*() can return to indicate 1276 * a match was not found in the config database and that the caller should 1277 * move on to the next potential match, if any. 1278 * 1279 * constraints are completely ignored by match(), so the statement: 1280 * 1281 * prop error.a@x[n] -> error.b@x[n] {n != 0}; 1282 * 1283 * might very well match x[0] if it appears in the config database. it 1284 * is the generate() routine that takes that match and then decides what 1285 * arrow, if any, should be generated in the instance tree. generate() 1286 * looks at the explicit iterator dictionary to get values like "n" in 1287 * the above example so that it can evaluate constraints. 1288 * 1289 */ 1290 static void 1291 vmatch(struct info *infop, struct node *np, struct node *lnp, 1292 struct node *anp, struct wildcardinfo **wcproot) 1293 { 1294 if (np == NULL) { 1295 if (lnp) 1296 vmatch(infop, lnp, NULL, anp, wcproot); 1297 else if (anp) 1298 vmatch(infop, anp, NULL, NULL, wcproot); 1299 else { 1300 struct node *src; 1301 struct node *dst; 1302 1303 /* end of vertical match recursion */ 1304 outfl(O_ALTFP|O_VERB3|O_NONL, 1305 infop->anp->file, infop->anp->line, "vmatch: "); 1306 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, infop->anp); 1307 out(O_ALTFP|O_VERB3, NULL); 1308 1309 generate_nork( 1310 itree_np2nork(infop->anp->u.arrow.nnp), 1311 itree_np2nork(infop->anp->u.arrow.knp)); 1312 dst = infop->anp->u.arrow.rhs; 1313 src = infop->anp->u.arrow.lhs; 1314 for (;;) { 1315 generate_new(); /* new set of arrows */ 1316 if (src->t == T_ARROW) { 1317 hmatch(infop, src->u.arrow.rhs, dst); 1318 generate_nork( 1319 itree_np2nork(src->u.arrow.nnp), 1320 itree_np2nork(src->u.arrow.knp)); 1321 dst = src->u.arrow.rhs; 1322 src = src->u.arrow.lhs; 1323 } else { 1324 hmatch(infop, src, dst); 1325 break; 1326 } 1327 } 1328 } 1329 return; 1330 } 1331 1332 switch (np->t) { 1333 case T_EVENT: { 1334 add_wildcardentry(wcproot, config_child(infop->croot), np); 1335 vmatch_event(infop, config_child(infop->croot), 1336 np->u.event.epname, lnp, anp, wcproot, WA_ALL); 1337 delete_wildcardentry(wcproot); 1338 break; 1339 } 1340 case T_LIST: 1341 ASSERT(lnp == NULL); 1342 vmatch(infop, np->u.expr.right, np->u.expr.left, anp, wcproot); 1343 break; 1344 1345 case T_ARROW: 1346 ASSERT(lnp == NULL && anp == NULL); 1347 vmatch(infop, np->u.arrow.rhs, NULL, np->u.arrow.lhs, wcproot); 1348 break; 1349 1350 default: 1351 outfl(O_DIE, np->file, np->line, 1352 "vmatch: unexpected type: %s", 1353 ptree_nodetype2str(np->t)); 1354 } 1355 } 1356 1357 static void 1358 cp_reset(struct node *np) 1359 { 1360 if (np == NULL) 1361 return; 1362 switch (np->t) { 1363 case T_NAME: 1364 np->u.name.cp = NULL; 1365 cp_reset(np->u.name.next); 1366 break; 1367 1368 case T_LIST: 1369 cp_reset(np->u.expr.left); 1370 cp_reset(np->u.expr.right); 1371 break; 1372 1373 case T_ARROW: 1374 cp_reset(np->u.arrow.lhs); 1375 cp_reset(np->u.arrow.rhs); 1376 break; 1377 1378 case T_EVENT: 1379 cp_reset(np->u.event.epname); 1380 break; 1381 } 1382 } 1383 1384 /* 1385 * itree_create -- apply the current config to the current parse tree 1386 * 1387 * returns a lut mapping fully-instance-qualified names to struct events. 1388 * 1389 */ 1390 struct lut * 1391 itree_create(struct config *croot) 1392 { 1393 struct lut *retval; 1394 struct node *propnp; 1395 1396 Ninfo.lut = NULL; 1397 Ninfo.croot = croot; 1398 for (propnp = Props; propnp; propnp = propnp->u.stmt.next) { 1399 struct node *anp = propnp->u.stmt.np; 1400 struct wildcardinfo *wcproot = NULL; 1401 1402 ASSERTeq(anp->t, T_ARROW, ptree_nodetype2str); 1403 1404 Ninfo.anp = anp; 1405 Ninfo.ex = NULL; 1406 1407 generate_arrownp(anp); 1408 vmatch(&Ninfo, anp, NULL, NULL, &wcproot); 1409 1410 if (Ninfo.ex) { 1411 lut_free(Ninfo.ex, iterinfo_destructor, NULL); 1412 Ninfo.ex = NULL; 1413 } 1414 ASSERT(wcproot == NULL); 1415 cp_reset(anp); 1416 } 1417 1418 retval = Ninfo.lut; 1419 Ninfo.lut = NULL; 1420 return (retval); 1421 } 1422 1423 void 1424 itree_free(struct lut *lutp) 1425 { 1426 lut_free(lutp, itree_destructor, NULL); 1427 } 1428 1429 int 1430 itree_nameinstancecmp(struct node *np1, struct node *np2) 1431 { 1432 int np1type = (int)np1->u.name.t; 1433 int np2type = (int)np2->u.name.t; 1434 int num1; 1435 int num2; 1436 1437 while (np1 && np2 && np1->u.name.s == np2->u.name.s) { 1438 if (np1->u.name.next != NULL && np2->u.name.next != NULL) { 1439 if (np1->u.name.cp != NULL) { 1440 config_getcompname(np1->u.name.cp, NULL, &num1); 1441 } else { 1442 ASSERT(np1->u.name.child != NULL); 1443 ASSERT(np1->u.name.child->t == T_NUM); 1444 num1 = (int)np1->u.name.child->u.ull; 1445 } 1446 1447 if (np2->u.name.cp != NULL) { 1448 config_getcompname(np2->u.name.cp, NULL, &num2); 1449 } else { 1450 ASSERT(np2->u.name.child != NULL); 1451 ASSERT(np2->u.name.child->t == T_NUM); 1452 num2 = (int)np2->u.name.child->u.ull; 1453 } 1454 1455 if (num1 != num2) 1456 return (num1 - num2); 1457 } 1458 1459 np1 = np1->u.name.next; 1460 np2 = np2->u.name.next; 1461 } 1462 if (np1 == NULL) 1463 if (np2 == NULL) 1464 return (np1type - np2type); 1465 else 1466 return (-1); 1467 else if (np2 == NULL) 1468 return (1); 1469 else 1470 return (strcmp(np1->u.name.s, np2->u.name.s)); 1471 } 1472 1473 void 1474 itree_pevent_brief(int flags, struct event *ep) 1475 { 1476 ASSERT(ep != NULL); 1477 ASSERT(ep->enode != NULL); 1478 ASSERT(ep->ipp != NULL); 1479 1480 ipath_print(flags, ep->enode->u.event.ename->u.name.s, ep->ipp); 1481 } 1482 1483 /*ARGSUSED*/ 1484 static void 1485 itree_pevent(struct event *lhs, struct event *ep, void *arg) 1486 { 1487 struct plut_wlk_data propd; 1488 struct bubble *bp; 1489 int flags = (int)arg; 1490 1491 itree_pevent_brief(flags, ep); 1492 if (ep->t == N_EREPORT) 1493 out(flags, " (count %d)", ep->count); 1494 else 1495 out(flags, NULL); 1496 1497 if (ep->props) { 1498 propd.flags = flags; 1499 propd.first = 1; 1500 out(flags, "Properties:"); 1501 lut_walk(ep->props, ptree_plut, (void *)&propd); 1502 } 1503 1504 for (bp = itree_next_bubble(ep, NULL); bp; 1505 bp = itree_next_bubble(ep, bp)) { 1506 /* Print only TO bubbles in this loop */ 1507 if (bp->t != B_TO) 1508 continue; 1509 itree_pbubble(flags, bp); 1510 } 1511 1512 for (bp = itree_next_bubble(ep, NULL); bp; 1513 bp = itree_next_bubble(ep, bp)) { 1514 /* Print only INHIBIT bubbles in this loop */ 1515 if (bp->t != B_INHIBIT) 1516 continue; 1517 itree_pbubble(flags, bp); 1518 } 1519 1520 for (bp = itree_next_bubble(ep, NULL); bp; 1521 bp = itree_next_bubble(ep, bp)) { 1522 /* Print only FROM bubbles in this loop */ 1523 if (bp->t != B_FROM) 1524 continue; 1525 itree_pbubble(flags, bp); 1526 } 1527 } 1528 1529 static void 1530 itree_pbubble(int flags, struct bubble *bp) 1531 { 1532 struct constraintlist *cp; 1533 struct arrowlist *ap; 1534 1535 ASSERT(bp != NULL); 1536 1537 out(flags|O_NONL, " "); 1538 if (bp->mark) 1539 out(flags|O_NONL, "*"); 1540 else 1541 out(flags|O_NONL, " "); 1542 if (bp->t == B_FROM) 1543 out(flags|O_NONL, "N=%d to:", bp->nork); 1544 else if (bp->t == B_TO) 1545 out(flags|O_NONL, "K=%d from:", bp->nork); 1546 else 1547 out(flags|O_NONL, "K=%d masked from:", bp->nork); 1548 1549 if (bp->t == B_TO || bp->t == B_INHIBIT) { 1550 for (ap = itree_next_arrow(bp, NULL); ap; 1551 ap = itree_next_arrow(bp, ap)) { 1552 ASSERT(ap->arrowp->head == bp); 1553 ASSERT(ap->arrowp->tail != NULL); 1554 ASSERT(ap->arrowp->tail->myevent != NULL); 1555 out(flags|O_NONL, " "); 1556 itree_pevent_brief(flags, ap->arrowp->tail->myevent); 1557 } 1558 out(flags, NULL); 1559 return; 1560 } 1561 1562 for (ap = itree_next_arrow(bp, NULL); ap; 1563 ap = itree_next_arrow(bp, ap)) { 1564 ASSERT(ap->arrowp->tail == bp); 1565 ASSERT(ap->arrowp->head != NULL); 1566 ASSERT(ap->arrowp->head->myevent != NULL); 1567 1568 out(flags|O_NONL, " "); 1569 itree_pevent_brief(flags, ap->arrowp->head->myevent); 1570 1571 out(flags|O_NONL, " "); 1572 ptree_timeval(flags, &ap->arrowp->mindelay); 1573 out(flags|O_NONL, ","); 1574 ptree_timeval(flags, &ap->arrowp->maxdelay); 1575 1576 /* Display anything from the propogation node? */ 1577 out(O_VERB3|O_NONL, " <%s:%d>", 1578 ap->arrowp->pnode->file, ap->arrowp->pnode->line); 1579 1580 if (itree_next_constraint(ap->arrowp, NULL)) 1581 out(flags|O_NONL, " {"); 1582 1583 for (cp = itree_next_constraint(ap->arrowp, NULL); cp; 1584 cp = itree_next_constraint(ap->arrowp, cp)) { 1585 ptree(flags, cp->cnode, 1, 0); 1586 if (itree_next_constraint(ap->arrowp, cp)) 1587 out(flags|O_NONL, ", "); 1588 } 1589 1590 if (itree_next_constraint(ap->arrowp, NULL)) 1591 out(flags|O_NONL, "}"); 1592 } 1593 out(flags, NULL); 1594 } 1595 1596 void 1597 itree_ptree(int flags, struct lut *itp) 1598 { 1599 lut_walk(itp, (lut_cb)itree_pevent, (void *)flags); 1600 } 1601 1602 /*ARGSUSED*/ 1603 static void 1604 itree_destructor(void *left, void *right, void *arg) 1605 { 1606 struct event *ep = (struct event *)right; 1607 struct bubble *nextbub, *bub; 1608 1609 /* Free the properties */ 1610 lut_free(ep->props, instances_destructor, NULL); 1611 1612 /* Free the payload properties */ 1613 lut_free(ep->payloadprops, payloadprops_destructor, NULL); 1614 1615 /* Free my bubbles */ 1616 for (bub = ep->bubbles; bub != NULL; ) { 1617 nextbub = bub->next; 1618 /* 1619 * Free arrows if they are FROM me. Free arrowlists on 1620 * other types of bubbles (but not the attached arrows, 1621 * which will be freed when we free the originating 1622 * bubble. 1623 */ 1624 if (bub->t == B_FROM) 1625 itree_free_arrowlists(bub, 1); 1626 else 1627 itree_free_arrowlists(bub, 0); 1628 itree_free_bubble(bub); 1629 bub = nextbub; 1630 } 1631 1632 if (ep->nvp != NULL) 1633 nvlist_free(ep->nvp); 1634 bzero(ep, sizeof (*ep)); 1635 FREE(ep); 1636 } 1637 1638 static void 1639 itree_free_bubble(struct bubble *freeme) 1640 { 1641 bzero(freeme, sizeof (*freeme)); 1642 FREE(freeme); 1643 } 1644 1645 static struct bubble * 1646 itree_add_bubble(struct event *eventp, enum bubbletype btype, int nork, int gen) 1647 { 1648 struct bubble *prev = NULL; 1649 struct bubble *curr; 1650 struct bubble *newb; 1651 1652 /* Use existing bubbles as appropriate when possible */ 1653 for (curr = eventp->bubbles; 1654 curr != NULL; 1655 prev = curr, curr = curr->next) { 1656 if (btype == B_TO && curr->t == B_TO) { 1657 /* see if an existing "to" bubble works for us */ 1658 if (gen == curr->gen) 1659 return (curr); /* matched gen number */ 1660 else if (nork == 1 && curr->nork == 1) { 1661 curr->gen = gen; 1662 return (curr); /* coalesce K==1 bubbles */ 1663 } 1664 } else if (btype == B_FROM && curr->t == B_FROM) { 1665 /* see if an existing "from" bubble works for us */ 1666 if ((nork == N_IS_ALL && curr->nork == N_IS_ALL) || 1667 (nork == 0 && curr->nork == 0)) 1668 return (curr); 1669 } 1670 } 1671 1672 newb = MALLOC(sizeof (struct bubble)); 1673 newb->next = NULL; 1674 newb->t = btype; 1675 newb->myevent = eventp; 1676 newb->nork = nork; 1677 newb->mark = 0; 1678 newb->gen = gen; 1679 newb->arrows = NULL; 1680 1681 if (prev == NULL) 1682 eventp->bubbles = newb; 1683 else 1684 prev->next = newb; 1685 1686 return (newb); 1687 } 1688 1689 struct bubble * 1690 itree_next_bubble(struct event *eventp, struct bubble *last) 1691 { 1692 struct bubble *next; 1693 1694 for (;;) { 1695 if (last != NULL) 1696 next = last->next; 1697 else 1698 next = eventp->bubbles; 1699 1700 if (next == NULL || next->arrows != NULL) 1701 return (next); 1702 1703 /* bubble was empty, skip it */ 1704 last = next; 1705 } 1706 } 1707 1708 static void 1709 add_arrow(struct bubble *bp, struct arrow *ap) 1710 { 1711 struct arrowlist *prev = NULL; 1712 struct arrowlist *curr; 1713 struct arrowlist *newal; 1714 1715 newal = MALLOC(sizeof (struct arrowlist)); 1716 bzero(newal, sizeof (struct arrowlist)); 1717 newal->arrowp = ap; 1718 1719 curr = itree_next_arrow(bp, NULL); 1720 while (curr != NULL) { 1721 prev = curr; 1722 curr = itree_next_arrow(bp, curr); 1723 } 1724 1725 if (prev == NULL) 1726 bp->arrows = newal; 1727 else 1728 prev->next = newal; 1729 } 1730 1731 static struct arrow * 1732 itree_add_arrow(struct bubble *frombubblep, struct bubble *tobubblep, 1733 struct node *apnode, struct node *fromevent, struct node *toevent, 1734 struct lut *ex) 1735 { 1736 struct arrow *newa; 1737 1738 ASSERTeq(frombubblep->t, B_FROM, itree_bubbletype2str); 1739 ASSERTinfo(tobubblep->t == B_TO || tobubblep->t == B_INHIBIT, 1740 itree_bubbletype2str(tobubblep->t)); 1741 newa = MALLOC(sizeof (struct arrow)); 1742 bzero(newa, sizeof (struct arrow)); 1743 newa->tail = frombubblep; 1744 newa->head = tobubblep; 1745 newa->pnode = apnode; 1746 newa->constraints = NULL; 1747 1748 /* 1749 * Set default delays, then try to re-set them from 1750 * any within() constraints. 1751 */ 1752 newa->mindelay = newa->maxdelay = 0ULL; 1753 if (itree_set_arrow_traits(newa, fromevent, toevent, ex) == 0) { 1754 FREE(newa); 1755 return (NULL); 1756 } 1757 1758 add_arrow(frombubblep, newa); 1759 add_arrow(tobubblep, newa); 1760 return (newa); 1761 } 1762 1763 /* returns false if traits show that arrow should not be added after all */ 1764 static int 1765 itree_set_arrow_traits(struct arrow *ap, struct node *fromev, 1766 struct node *toev, struct lut *ex) 1767 { 1768 struct node *epnames[] = { NULL, NULL, NULL }; 1769 struct node *newc = NULL; 1770 1771 ASSERTeq(fromev->t, T_EVENT, ptree_nodetype2str); 1772 ASSERTeq(toev->t, T_EVENT, ptree_nodetype2str); 1773 1774 /* 1775 * search for the within values first on the declaration of 1776 * the destination event, and then on the prop. this allows 1777 * one to specify a "default" within by putting it on the 1778 * declaration, but then allow overriding on the prop statement. 1779 */ 1780 arrow_add_within(ap, toev->u.event.declp->u.stmt.np->u.event.eexprlist); 1781 arrow_add_within(ap, toev->u.event.eexprlist); 1782 1783 /* 1784 * handle any global constraints inherited from the 1785 * "fromev" event's declaration 1786 */ 1787 ASSERT(fromev->u.event.declp != NULL); 1788 ASSERT(fromev->u.event.declp->u.stmt.np != NULL); 1789 1790 #ifdef notdef 1791 /* XXX not quite ready to evaluate constraints from decls yet */ 1792 if (fromev->u.event.declp->u.stmt.np->u.event.eexprlist) 1793 (void) itree_add_constraint(ap, 1794 fromev->u.event.declp->u.stmt.np->u.event.eexprlist); 1795 #endif /* notdef */ 1796 1797 /* handle constraints on the from event in the prop statement */ 1798 epnames[0] = fromev->u.event.epname; 1799 epnames[1] = toev->u.event.epname; 1800 if (eval_potential(fromev->u.event.eexprlist, ex, epnames, &newc) == 0) 1801 return (0); /* constraint disallows arrow */ 1802 1803 /* 1804 * handle any global constraints inherited from the 1805 * "toev" event's declaration 1806 */ 1807 ASSERT(toev->u.event.declp != NULL); 1808 ASSERT(toev->u.event.declp->u.stmt.np != NULL); 1809 1810 #ifdef notdef 1811 /* XXX not quite ready to evaluate constraints from decls yet */ 1812 if (toev->u.event.declp->u.stmt.np->u.event.eexprlist) 1813 (void) itree_add_constraint(ap, 1814 toev->u.event.declp->u.stmt.np->u.event.eexprlist); 1815 #endif /* notdef */ 1816 1817 /* handle constraints on the to event in the prop statement */ 1818 epnames[0] = toev->u.event.epname; 1819 epnames[1] = fromev->u.event.epname; 1820 if (eval_potential(toev->u.event.eexprlist, ex, epnames, &newc) == 0) 1821 return (0); /* constraint disallows arrow */ 1822 1823 /* if we came up with any deferred constraints, add them to arrow */ 1824 if (newc != NULL) 1825 (void) itree_add_constraint(ap, iexpr(newc)); 1826 1827 return (1); /* constraints allow arrow */ 1828 } 1829 1830 /* 1831 * Set within() constraint. If the constraint were set multiple times, 1832 * the last one would "win". 1833 */ 1834 static void 1835 arrow_add_within(struct arrow *ap, struct node *xpr) 1836 { 1837 struct node *arglist; 1838 1839 /* end of expressions list */ 1840 if (xpr == NULL) 1841 return; 1842 1843 switch (xpr->t) { 1844 case T_LIST: 1845 arrow_add_within(ap, xpr->u.expr.left); 1846 arrow_add_within(ap, xpr->u.expr.right); 1847 return; 1848 case T_FUNC: 1849 if (xpr->u.func.s != L_within) 1850 return; 1851 arglist = xpr->u.func.arglist; 1852 switch (arglist->t) { 1853 case T_TIMEVAL: 1854 ap->mindelay = 0; 1855 ap->maxdelay = arglist->u.ull; 1856 break; 1857 case T_NAME: 1858 ASSERT(arglist->u.name.s == L_infinity); 1859 ap->mindelay = 0; 1860 ap->maxdelay = TIMEVAL_EVENTUALLY; 1861 break; 1862 case T_LIST: 1863 ASSERT(arglist->u.expr.left->t == T_TIMEVAL); 1864 ap->mindelay = arglist->u.expr.left->u.ull; 1865 switch (arglist->u.expr.right->t) { 1866 case T_TIMEVAL: 1867 ap->maxdelay = arglist->u.ull; 1868 break; 1869 case T_NAME: 1870 ASSERT(arglist->u.expr.right->u.name.s == 1871 L_infinity); 1872 ap->maxdelay = TIMEVAL_EVENTUALLY; 1873 break; 1874 default: 1875 out(O_DIE, "within: unexpected 2nd arg type"); 1876 } 1877 break; 1878 default: 1879 out(O_DIE, "within: unexpected 1st arg type"); 1880 } 1881 break; 1882 default: 1883 return; 1884 } 1885 } 1886 1887 static void 1888 itree_free_arrowlists(struct bubble *bubp, int arrows_too) 1889 { 1890 struct arrowlist *al, *nal; 1891 1892 al = bubp->arrows; 1893 while (al != NULL) { 1894 nal = al->next; 1895 if (arrows_too) { 1896 itree_free_constraints(al->arrowp); 1897 bzero(al->arrowp, sizeof (struct arrow)); 1898 FREE(al->arrowp); 1899 } 1900 bzero(al, sizeof (*al)); 1901 FREE(al); 1902 al = nal; 1903 } 1904 } 1905 1906 struct arrowlist * 1907 itree_next_arrow(struct bubble *bubble, struct arrowlist *last) 1908 { 1909 struct arrowlist *next; 1910 1911 if (last != NULL) 1912 next = last->next; 1913 else 1914 next = bubble->arrows; 1915 return (next); 1916 } 1917 1918 static struct constraintlist * 1919 itree_add_constraint(struct arrow *arrowp, struct node *c) 1920 { 1921 struct constraintlist *prev = NULL; 1922 struct constraintlist *curr; 1923 struct constraintlist *newc; 1924 1925 for (curr = arrowp->constraints; 1926 curr != NULL; 1927 prev = curr, curr = curr->next); 1928 1929 newc = MALLOC(sizeof (struct constraintlist)); 1930 newc->next = NULL; 1931 newc->cnode = c; 1932 1933 if (prev == NULL) 1934 arrowp->constraints = newc; 1935 else 1936 prev->next = newc; 1937 1938 return (newc); 1939 } 1940 1941 struct constraintlist * 1942 itree_next_constraint(struct arrow *arrowp, struct constraintlist *last) 1943 { 1944 struct constraintlist *next; 1945 1946 if (last != NULL) 1947 next = last->next; 1948 else 1949 next = arrowp->constraints; 1950 return (next); 1951 } 1952 1953 static void 1954 itree_free_constraints(struct arrow *ap) 1955 { 1956 struct constraintlist *cl, *ncl; 1957 1958 cl = ap->constraints; 1959 while (cl != NULL) { 1960 ncl = cl->next; 1961 ASSERT(cl->cnode != NULL); 1962 if (!iexpr_cached(cl->cnode)) 1963 tree_free(cl->cnode); 1964 bzero(cl, sizeof (*cl)); 1965 FREE(cl); 1966 cl = ncl; 1967 } 1968 } 1969 1970 const char * 1971 itree_bubbletype2str(enum bubbletype t) 1972 { 1973 static char buf[100]; 1974 1975 switch (t) { 1976 case B_FROM: return L_from; 1977 case B_TO: return L_to; 1978 case B_INHIBIT: return L_inhibit; 1979 default: 1980 (void) sprintf(buf, "[unexpected bubbletype: %d]", t); 1981 return (buf); 1982 } 1983 } 1984 1985 /* 1986 * itree_fini -- clean up any half-built itrees 1987 */ 1988 void 1989 itree_fini(void) 1990 { 1991 if (Ninfo.lut != NULL) { 1992 itree_free(Ninfo.lut); 1993 Ninfo.lut = NULL; 1994 } 1995 if (Ninfo.ex) { 1996 lut_free(Ninfo.ex, iterinfo_destructor, NULL); 1997 Ninfo.ex = NULL; 1998 } 1999 } 2000