1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License (the "License"). 6 * You may not use this file except in compliance with the License. 7 * 8 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 9 * or http://www.opensolaris.org/os/licensing. 10 * See the License for the specific language governing permissions 11 * and limitations under the License. 12 * 13 * When distributing Covered Code, include this CDDL HEADER in each 14 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 15 * If applicable, add the following below this CDDL HEADER, with the 16 * fields enclosed by brackets "[]" replaced with your own identifying 17 * information: Portions Copyright [yyyy] [name of copyright owner] 18 * 19 * CDDL HEADER END 20 */ 21 22 /* 23 * Copyright 2006 Sun Microsystems, Inc. All rights reserved. 24 * Use is subject to license terms. 25 * 26 * itree.c -- instance tree creation and manipulation 27 * 28 * this module provides the instance tree 29 */ 30 31 #pragma ident "%Z%%M% %I% %E% SMI" 32 33 #include <stdio.h> 34 #include <ctype.h> 35 #include <string.h> 36 #include <strings.h> 37 #include "alloc.h" 38 #include "out.h" 39 #include "stable.h" 40 #include "literals.h" 41 #include "lut.h" 42 #include "tree.h" 43 #include "ptree.h" 44 #include "itree.h" 45 #include "ipath.h" 46 #include "iexpr.h" 47 #include "eval.h" 48 #include "config.h" 49 50 /* 51 * struct info contains the state we keep when expanding a prop statement 52 * as part of constructing the instance tree. state kept in struct info 53 * is the non-recursive stuff -- the stuff that doesn't need to be on 54 * the stack. the rest of the state that is passed between all the 55 * mutually recursive functions, is required to be on the stack since 56 * we need to backtrack and recurse as we do the instance tree construction. 57 */ 58 struct info { 59 struct lut *lut; 60 struct node *anp; /* arrow np */ 61 struct lut *ex; /* dictionary of explicit iterators */ 62 struct config *croot; 63 } Ninfo; 64 65 /* 66 * struct wildcardinfo is used to track wildcarded portions of paths. 67 * 68 * for example, if the epname of an event is "c/d" and the path "a/b/c/d" 69 * exists, the wildcard path ewname is filled in with the path "a/b". when 70 * matching is done, epname is temporarily replaced with the concatenation 71 * of ewname and epname. cpstart is set to the (struct config *) 72 * corresponding to component "c". 73 * 74 * a linked list of these structs is used to track the expansion of each 75 * event node as it is processed in vmatch() --> vmatch_event() calls. 76 */ 77 struct wildcardinfo { 78 struct node *nptop; /* event node fed to vmatch */ 79 struct node *oldepname; /* epname without the wildcard part */ 80 enum status { 81 WC_UNDEFINED, /* struct is not yet initialized */ 82 WC_UNDERCONSTRUCTION, /* wildcard path not yet done */ 83 WC_COMPLETE /* wildcard path done and is in use */ 84 } s; 85 struct wildcardpath { 86 struct node *ewname; /* wildcard path */ 87 struct config *cpstart; /* starting cp node for oldepname */ 88 int refcount; /* number of event nodes using this */ 89 } *p; 90 struct wildcardpath *matchwc; /* ptr to wc path to be matched */ 91 struct wildcardinfo *next; 92 }; 93 94 static void vmatch(struct info *infop, struct node *np, 95 struct node *lnp, struct node *anp, struct wildcardinfo **wcproot); 96 static void hmatch(struct info *infop, struct node *np, struct node *nextnp); 97 static void itree_pbubble(int flags, struct bubble *bp); 98 static void itree_destructor(void *left, void *right, void *arg); 99 static int itree_set_arrow_traits(struct arrow *ap, struct node *fromev, 100 struct node *toev, struct lut *ex); 101 static void itree_free_arrowlists(struct bubble *bubp, int arrows_too); 102 static void arrow_add_within(struct arrow *ap, struct node *xpr); 103 static struct arrow *itree_add_arrow(struct bubble *frombubblep, 104 struct bubble *tobubblep, struct node *apnode, struct node *fromevent, 105 struct node *toevent, struct lut *ex); 106 static struct constraintlist *itree_add_constraint(struct arrow *arrowp, 107 struct node *c); 108 static struct bubble *itree_add_bubble(struct event *eventp, 109 enum bubbletype btype, int nork, int gen); 110 static void itree_free_bubble(struct bubble *freeme); 111 static void itree_free_constraints(struct arrow *ap); 112 113 /* 114 * the following struct contains the state we build up during 115 * vertical and horizontal expansion so that generate() 116 * has everything it needs to construct the appropriate arrows. 117 * after setting up the state by calling: 118 * generate_arrownp() 119 * generate_nork() 120 * generate_new() 121 * generate_from() 122 * generate_to() 123 * the actual arrow generation is done by calling: 124 * generate() 125 */ 126 static struct { 127 int generation; /* generation number of arrow set */ 128 struct node *arrownp; /* top-level parse tree for arrow */ 129 int n; /* n value associated with arrow */ 130 int k; /* k value associated with arrow */ 131 struct node *fromnp; /* left-hand-side event in parse tree */ 132 struct node *tonp; /* right-hand-side event in parse tree */ 133 struct event *frome; /* left-hand-side event in instance tree */ 134 struct event *toe; /* right-hand-side event in instance tree */ 135 struct bubble *frombp; /* bubble arrow comes from */ 136 struct bubble *tobp; /* bubble arrow goes to */ 137 } G; 138 139 static void 140 generate_arrownp(struct node *arrownp) 141 { 142 G.arrownp = arrownp; 143 } 144 145 static void 146 generate_nork(int n, int k) 147 { 148 G.n = n; 149 G.k = k; 150 } 151 152 static void 153 generate_new(void) 154 { 155 G.generation++; 156 } 157 158 static void 159 generate_from(struct node *fromeventnp, struct event *fromevent) 160 { 161 G.fromnp = fromeventnp; 162 G.frome = fromevent; 163 164 out(O_ALTFP|O_VERB3|O_NONL, "from bubble on "); 165 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp); 166 out(O_ALTFP|O_VERB3, NULL); 167 168 G.frombp = itree_add_bubble(G.frome, B_FROM, G.n, 0); 169 } 170 171 static void 172 generate_to(struct node *toeventnp, struct event *toevent) 173 { 174 G.tonp = toeventnp; 175 G.toe = toevent; 176 177 out(O_ALTFP|O_VERB3|O_NONL, "to bubble (gen %d) on ", G.generation); 178 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp); 179 out(O_ALTFP|O_VERB3, NULL); 180 181 G.tobp = itree_add_bubble(G.toe, B_TO, G.k, G.generation); 182 } 183 184 static void 185 generate(struct lut *ex) 186 { 187 ASSERT(G.arrownp != NULL); 188 ASSERT(G.fromnp != NULL); 189 ASSERT(G.frome != NULL); 190 ASSERT(G.frombp != NULL); 191 ASSERT(G.tonp != NULL); 192 ASSERT(G.toe != NULL); 193 ASSERT(G.tobp != NULL); 194 195 out(O_ALTFP|O_VERB3|O_NONL, " Arrow \""); 196 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.fromnp); 197 out(O_ALTFP|O_VERB3|O_NONL, "\" -> \""); 198 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, G.tonp); 199 200 if (itree_add_arrow(G.frombp, G.tobp, G.arrownp, 201 G.fromnp, G.tonp, ex) == NULL) { 202 out(O_ALTFP|O_VERB3, "\" (prevented by constraints)"); 203 } else { 204 out(O_ALTFP|O_VERB3, "\""); 205 } 206 } 207 208 enum childnode_action { 209 CN_NONE, 210 CN_INSTANTIZE, 211 CN_DUP 212 }; 213 214 static struct node * 215 tname_dup(struct node *namep, enum childnode_action act) 216 { 217 struct node *retp = NULL; 218 const char *file; 219 int line; 220 221 if (namep == NULL) 222 return (NULL); 223 224 file = namep->file; 225 line = namep->line; 226 227 for (; namep != NULL; namep = namep->u.name.next) { 228 struct node *newnp = newnode(T_NAME, file, line); 229 230 newnp->u.name.t = namep->u.name.t; 231 newnp->u.name.s = namep->u.name.s; 232 newnp->u.name.last = newnp; 233 newnp->u.name.it = namep->u.name.it; 234 newnp->u.name.cp = namep->u.name.cp; 235 236 if (act == CN_DUP) { 237 struct node *npc; 238 239 npc = namep->u.name.child; 240 if (npc != NULL) { 241 switch (npc->t) { 242 case T_NUM: 243 newnp->u.name.child = 244 newnode(T_NUM, file, line); 245 newnp->u.name.child->u.ull = 246 npc->u.ull; 247 break; 248 case T_NAME: 249 newnp->u.name.child = 250 tree_name(npc->u.name.s, 251 npc->u.name.it, 252 file, line); 253 break; 254 default: 255 out(O_DIE, "tname_dup: " 256 "invalid child type %s", 257 ptree_nodetype2str(npc->t)); 258 } 259 } 260 } else if (act == CN_INSTANTIZE) { 261 newnp->u.name.child = newnode(T_NUM, file, line); 262 263 if (namep->u.name.child == NULL || 264 namep->u.name.child->t != T_NUM) { 265 int inum; 266 267 ASSERT(newnp->u.name.cp != NULL); 268 config_getcompname(newnp->u.name.cp, 269 NULL, &inum); 270 newnp->u.name.child->u.ull = 271 (unsigned long long)inum; 272 } else { 273 newnp->u.name.child->u.ull = 274 namep->u.name.child->u.ull; 275 } 276 } 277 278 if (retp == NULL) { 279 retp = newnp; 280 } else { 281 retp->u.name.last->u.name.next = newnp; 282 retp->u.name.last = newnp; 283 } 284 } 285 286 return (retp); 287 } 288 289 struct prop_wlk_data { 290 struct lut *props; 291 struct node *epname; 292 }; 293 294 static struct lut *props2instance(struct node *, struct node *); 295 296 /* 297 * let oldepname be a subset of epname. return the subsection of epname 298 * that ends with oldepname. make each component in the path explicitly 299 * instanced (i.e., with a T_NUM child). 300 */ 301 static struct node * 302 tname_dup_to_epname(struct node *oldepname, struct node *epname) 303 { 304 struct node *npref, *npend, *np1, *np2; 305 struct node *ret = NULL; 306 int foundmatch = 0; 307 308 if (epname == NULL) 309 return (NULL); 310 311 /* 312 * search for the longest path in epname which contains 313 * oldnode->u.event.epname. set npend to point to just past the 314 * end of this path. 315 */ 316 npend = NULL; 317 for (npref = epname; npref; npref = npref->u.name.next) { 318 if (npref->u.name.s == oldepname->u.name.s) { 319 for (np1 = npref, np2 = oldepname; 320 np1 != NULL && np2 != NULL; 321 np1 = np1->u.name.next, np2 = np2->u.name.next) { 322 if (np1->u.name.s != np2->u.name.s) 323 break; 324 } 325 if (np2 == NULL) { 326 foundmatch = 1; 327 npend = np1; 328 if (np1 == NULL) { 329 /* oldepname matched npref up to end */ 330 break; 331 } 332 } 333 } 334 } 335 336 if (foundmatch == 0) { 337 /* 338 * if oldepname could not be found in epname, return a 339 * duplicate of the former. do not try to instantize 340 * oldepname since it might not be a path. 341 */ 342 return (tname_dup(oldepname, CN_DUP)); 343 } 344 345 /* 346 * dup (epname -- npend). all children should be T_NUMs. 347 */ 348 for (npref = epname; 349 ! (npref == NULL || npref == npend); 350 npref = npref->u.name.next) { 351 struct node *newnp = newnode(T_NAME, oldepname->file, 352 oldepname->line); 353 354 newnp->u.name.t = npref->u.name.t; 355 newnp->u.name.s = npref->u.name.s; 356 newnp->u.name.last = newnp; 357 newnp->u.name.it = npref->u.name.it; 358 newnp->u.name.cp = npref->u.name.cp; 359 360 newnp->u.name.child = newnode(T_NUM, oldepname->file, 361 oldepname->line); 362 363 if (npref->u.name.child == NULL || 364 npref->u.name.child->t != T_NUM) { 365 int childnum; 366 367 ASSERT(npref->u.name.cp != NULL); 368 config_getcompname(npref->u.name.cp, NULL, &childnum); 369 newnp->u.name.child->u.ull = childnum; 370 } else { 371 newnp->u.name.child->u.ull = 372 npref->u.name.child->u.ull; 373 } 374 375 if (ret == NULL) { 376 ret = newnp; 377 } else { 378 ret->u.name.last->u.name.next = newnp; 379 ret->u.name.last = newnp; 380 } 381 } 382 383 return (ret); 384 } 385 386 /* 387 * restriction: oldnode->u.event.epname has to be equivalent to or a subset 388 * of epname 389 */ 390 static struct node * 391 tevent_dup_to_epname(struct node *oldnode, struct node *epname) 392 { 393 struct node *ret; 394 395 ret = newnode(T_EVENT, oldnode->file, oldnode->line); 396 ret->u.event.ename = tname_dup(oldnode->u.event.ename, CN_NONE); 397 ret->u.event.epname = tname_dup_to_epname(oldnode->u.event.epname, 398 epname); 399 return (ret); 400 } 401 402 static void 403 nv_instantiate(void *name, void *val, void *arg) 404 { 405 struct prop_wlk_data *pd = (struct prop_wlk_data *)arg; 406 struct node *orhs = (struct node *)val; 407 struct node *nrhs; 408 409 /* handle engines by instantizing the entire engine */ 410 if (name == L_engine) { 411 ASSERT(orhs->t == T_EVENT); 412 ASSERT(orhs->u.event.ename->u.name.t == N_SERD); 413 414 /* there are only SERD engines for now */ 415 416 nrhs = newnode(T_SERD, orhs->file, orhs->line); 417 nrhs->u.stmt.np = tevent_dup_to_epname(orhs, pd->epname); 418 nrhs->u.stmt.lutp = props2instance(orhs, pd->epname); 419 pd->props = lut_add(pd->props, name, nrhs, NULL); 420 return; 421 } 422 423 switch (orhs->t) { 424 case T_NUM: 425 nrhs = newnode(T_NUM, orhs->file, orhs->line); 426 nrhs->u.ull = orhs->u.ull; 427 pd->props = lut_add(pd->props, name, nrhs, NULL); 428 break; 429 case T_TIMEVAL: 430 nrhs = newnode(T_TIMEVAL, orhs->file, orhs->line); 431 nrhs->u.ull = orhs->u.ull; 432 pd->props = lut_add(pd->props, name, nrhs, NULL); 433 break; 434 case T_NAME: 435 nrhs = tname_dup_to_epname(orhs, pd->epname); 436 pd->props = lut_add(pd->props, name, nrhs, NULL); 437 break; 438 case T_EVENT: 439 nrhs = tevent_dup_to_epname(orhs, pd->epname); 440 pd->props = lut_add(pd->props, name, nrhs, NULL); 441 break; 442 case T_GLOBID: 443 nrhs = newnode(T_GLOBID, orhs->file, orhs->line); 444 nrhs->u.globid.s = orhs->u.globid.s; 445 pd->props = lut_add(pd->props, name, nrhs, NULL); 446 break; 447 case T_FUNC: 448 /* for T_FUNC, we don't duplicate it, just point to node */ 449 pd->props = lut_add(pd->props, name, orhs, NULL); 450 break; 451 default: 452 out(O_DIE, "unexpected nvpair value type %s", 453 ptree_nodetype2str(((struct node *)val)->t)); 454 } 455 } 456 457 static struct lut * 458 props2instance(struct node *eventnp, struct node *epname) 459 { 460 struct prop_wlk_data pd; 461 462 pd.props = NULL; 463 pd.epname = epname; 464 465 ASSERT(eventnp->u.event.declp != NULL); 466 lut_walk(eventnp->u.event.declp->u.stmt.lutp, nv_instantiate, &pd); 467 return (pd.props); 468 } 469 470 /*ARGSUSED*/ 471 static void 472 instances_destructor(void *left, void *right, void *arg) 473 { 474 struct node *dn = (struct node *)right; 475 476 if (dn->t == T_SERD) { 477 /* we allocated the lut during itree_create(), so free it */ 478 lut_free(dn->u.stmt.lutp, instances_destructor, NULL); 479 dn->u.stmt.lutp = NULL; 480 } 481 if (dn->t != T_FUNC) /* T_FUNC pointed to original node */ 482 tree_free(dn); 483 } 484 485 /*ARGSUSED*/ 486 static void 487 payloadprops_destructor(void *left, void *right, void *arg) 488 { 489 FREE(right); 490 } 491 492 /* 493 * event_cmp -- used via lut_lookup/lut_add on instance tree lut 494 */ 495 static int 496 event_cmp(struct event *ep1, struct event *ep2) 497 { 498 int diff; 499 500 if ((diff = ep2->enode->u.event.ename->u.name.s - 501 ep1->enode->u.event.ename->u.name.s) != 0) 502 return (diff); 503 if ((diff = (char *)ep2->ipp - (char *)ep1->ipp) != 0) 504 return (diff); 505 return (0); 506 507 } 508 509 struct event * 510 itree_lookup(struct lut *itp, const char *ename, const struct ipath *ipp) 511 { 512 struct event searchevent; /* just used for searching */ 513 struct node searcheventnode; 514 struct node searchenamenode; 515 516 searchevent.enode = &searcheventnode; 517 searcheventnode.t = T_EVENT; 518 searcheventnode.u.event.ename = &searchenamenode; 519 searchenamenode.t = T_NAME; 520 searchenamenode.u.name.s = ename; 521 searchevent.ipp = ipp; 522 return (lut_lookup(itp, (void *)&searchevent, (lut_cmp)event_cmp)); 523 } 524 525 static struct event * 526 find_or_add_event(struct info *infop, struct node *np) 527 { 528 struct event *ret; 529 struct event searchevent; /* just used for searching */ 530 531 ASSERTeq(np->t, T_EVENT, ptree_nodetype2str); 532 533 searchevent.enode = np; 534 searchevent.ipp = ipath(np->u.event.epname); 535 if ((ret = lut_lookup(infop->lut, (void *)&searchevent, 536 (lut_cmp)event_cmp)) != NULL) 537 return (ret); 538 539 /* wasn't already in tree, allocate it */ 540 ret = MALLOC(sizeof (*ret)); 541 bzero(ret, sizeof (*ret)); 542 543 ret->t = np->u.event.ename->u.name.t; 544 ret->enode = np; 545 ret->ipp = searchevent.ipp; 546 ret->props = props2instance(np, np->u.event.epname); 547 548 infop->lut = lut_add(infop->lut, (void *)ret, (void *)ret, 549 (lut_cmp)event_cmp); 550 551 return (ret); 552 } 553 554 /* 555 * hmatch_event -- perform any appropriate horizontal expansion on an event 556 * 557 * this routine is used to perform horizontal expansion on both the 558 * left-hand-side events in a prop, and the right-hand-side events. 559 * when called to handle a left-side event, nextnp point to the right 560 * side of the prop that should be passed to hmatch() for each match 561 * found by horizontal expansion. when no horizontal expansion exists, 562 * we will still "match" one event for every event found in the list on 563 * the left-hand-side of the prop because vmatch() already found that 564 * there's at least one match during vertical expansion. 565 */ 566 static void 567 hmatch_event(struct info *infop, struct node *eventnp, struct node *epname, 568 struct config *ncp, struct node *nextnp, int rematch) 569 { 570 if (epname == NULL) { 571 /* 572 * end of pathname recursion, either we just located 573 * a left-hand-side event and we're ready to move on 574 * to the expanding the right-hand-side events, or 575 * we're further down the recursion and we just located 576 * a right-hand-side event. the passed-in parameter 577 * "nextnp" tells us whether we're working on the left 578 * side and need to move on to nextnp, or if nextnp is 579 * NULL, we're working on the right side. 580 */ 581 if (nextnp) { 582 /* 583 * finished a left side expansion, move on to right. 584 * tell generate() what event we just matched so 585 * it can be used at the source of any arrows 586 * we generate as we match events on the right side. 587 */ 588 generate_from(eventnp, 589 find_or_add_event(infop, eventnp)); 590 hmatch(infop, nextnp, NULL); 591 } else { 592 /* 593 * finished a right side expansion. tell generate 594 * the information about the destination and let 595 * it construct the arrows as appropriate. 596 */ 597 generate_to(eventnp, 598 find_or_add_event(infop, eventnp)); 599 generate(infop->ex); 600 } 601 602 return; 603 } 604 605 ASSERTeq(epname->t, T_NAME, ptree_nodetype2str); 606 607 /* 608 * we only get here when eventnp already has a completely 609 * instanced epname in it already. so we first recurse 610 * down to the end of the name and as the recursion pops 611 * up, we look for opportunities to advance horizontal 612 * expansions on to the next match. when we do advance 613 * horizontal expansions, we potentially render all cp 614 * pointers on all components to the right as invalid, 615 * so we pass in an "ncp" config handle so matching against 616 * the config can happen. 617 */ 618 if (rematch) { 619 struct config *ocp = epname->u.name.cp; 620 char *ncp_s; 621 int ncp_num, num; 622 623 for (; ncp; ncp = config_next(ncp)) { 624 config_getcompname(ncp, &ncp_s, &ncp_num); 625 626 if (ncp_s == epname->u.name.s) { 627 /* found a matching component name */ 628 config_getcompname(epname->u.name.cp, 629 NULL, &num); 630 631 if (epname->u.name.it != IT_HORIZONTAL && 632 ncp_num != num) 633 continue; 634 635 epname->u.name.cp = ncp; 636 hmatch_event(infop, eventnp, 637 epname->u.name.next, config_child(ncp), 638 nextnp, 1); 639 } 640 } 641 642 epname->u.name.cp = ocp; 643 644 return; /* no more config to match against */ 645 646 } else { 647 hmatch_event(infop, eventnp, epname->u.name.next, ncp, 648 nextnp, 0); 649 } 650 651 if (epname->u.name.it == IT_HORIZONTAL) { 652 struct config *cp; 653 struct config *ocp = epname->u.name.cp; 654 char *cp_s; 655 int cp_num; 656 int ocp_num; 657 struct iterinfo *iterinfop = NULL; 658 const char *iters; 659 660 config_getcompname(ocp, NULL, &ocp_num); 661 662 for (cp = config_next(ocp); cp; cp = config_next(cp)) { 663 config_getcompname(cp, &cp_s, &cp_num); 664 665 if (cp_s == epname->u.name.s) { 666 ASSERT(epname->u.name.child != NULL); 667 668 iters = epname->u.name.child->u.name.s; 669 if ((iterinfop = lut_lookup(infop->ex, 670 (void *)iters, NULL)) == NULL) { 671 out(O_DIE, 672 "hmatch_event: internal error: " 673 "iterator \"%s\" undefined", iters); 674 } else { 675 /* advance dict entry to next match */ 676 iterinfop->num = cp_num; 677 } 678 epname->u.name.cp = cp; 679 hmatch_event(infop, eventnp, 680 epname->u.name.next, config_child(cp), 681 nextnp, 1); 682 } 683 } 684 685 if (iterinfop != NULL) { 686 /* restore dict entry */ 687 iterinfop->num = ocp_num; 688 } 689 epname->u.name.cp = ocp; 690 } 691 } 692 693 /* 694 * hmatch -- check for horizontal expansion matches 695 * 696 * np points to the things we're matching (like a T_LIST or a T_EVENT) 697 * and if we're working on a left-side of a prop, nextnp points to 698 * the other side of the prop that we'll tackle next when this recursion 699 * bottoms out. when all the events in the entire prop arrow have been 700 * horizontally expanded, generate() will be called to generate the 701 * actualy arrow. 702 */ 703 static void 704 hmatch(struct info *infop, struct node *np, struct node *nextnp) 705 { 706 if (np == NULL) 707 return; /* all done */ 708 709 /* 710 * for each item in the list of events (which could just 711 * be a single event, or it could get larger in the loop 712 * below due to horizontal expansion), call hmatch on 713 * the right side and create arrows to each element. 714 */ 715 716 switch (np->t) { 717 case T_LIST: 718 /* loop through the list */ 719 if (np->u.expr.left) 720 hmatch(infop, np->u.expr.left, nextnp); 721 if (np->u.expr.right) 722 hmatch(infop, np->u.expr.right, nextnp); 723 break; 724 725 case T_EVENT: 726 hmatch_event(infop, np, np->u.event.epname, 727 NULL, nextnp, 0); 728 break; 729 730 default: 731 outfl(O_DIE, np->file, np->line, 732 "hmatch: unexpected type: %s", 733 ptree_nodetype2str(np->t)); 734 } 735 } 736 737 static int 738 itree_np2nork(struct node *norknp) 739 { 740 if (norknp == NULL) 741 return (1); 742 else if (norknp->t == T_NAME && norknp->u.name.s == L_A) 743 return (-1); /* our internal version of "all" */ 744 else if (norknp->t == T_NUM) 745 return ((int)norknp->u.ull); 746 else 747 out(O_DIE, norknp->file, norknp->line, 748 "itree_np2nork: internal error type %s", 749 ptree_nodetype2str(norknp->t)); 750 /*NOTREACHED*/ 751 return (1); 752 } 753 754 static struct iterinfo * 755 newiterinfo(int num, struct node *np) 756 { 757 struct iterinfo *ret = MALLOC(sizeof (*ret)); 758 759 ret->num = num; 760 ret->np = np; 761 762 return (ret); 763 } 764 765 /*ARGSUSED*/ 766 static void 767 iterinfo_destructor(void *left, void *right, void *arg) 768 { 769 struct iterinfo *iterinfop = (struct iterinfo *)right; 770 771 bzero(iterinfop, sizeof (*iterinfop)); 772 FREE(iterinfop); 773 } 774 775 /* 776 * return 1 if wildcard path for wcp matches another wildcard path; 777 * return 0 if otherwise. 778 */ 779 static int 780 wc_paths_match(struct wildcardinfo *wcp) 781 { 782 struct node *np1, *np2; 783 784 ASSERT(wcp->matchwc != NULL); 785 786 for (np1 = wcp->p->ewname, np2 = wcp->matchwc->ewname; 787 np1 != NULL && np2 != NULL; 788 np1 = np1->u.name.next, np2 = np2->u.name.next) { 789 /* 790 * names must match 791 */ 792 if (np1->u.name.s != np2->u.name.s) 793 return (0); 794 795 /* 796 * children must exist and have the same numerical value 797 */ 798 if (np1->u.name.child == NULL || np2->u.name.child == NULL) 799 return (0); 800 801 if (np1->u.name.child->t != T_NUM || 802 np2->u.name.child->t != T_NUM) 803 return (0); 804 805 if (np1->u.name.child->u.ull != np2->u.name.child->u.ull) 806 return (0); 807 } 808 809 /* 810 * return true only if we have matches for all entries of n1 and 811 * n2. note that NULL wildcard paths (i.e., both wcp->p->ewname 812 * and wcp->matchwc->ewname are NULL) will be considered as 813 * matching paths. 814 */ 815 if (np1 == NULL && np2 == NULL) 816 return (1); 817 818 return (0); 819 } 820 821 /* 822 * update epname to include the wildcarded portion 823 */ 824 static void 825 create_wildcardedpath(struct wildcardinfo **wcproot) 826 { 827 struct wildcardinfo *wcp; 828 struct node *nptop; 829 830 wcp = *wcproot; 831 832 if (wcp->s == WC_UNDERCONSTRUCTION) { 833 ASSERT(wcp->p->refcount == 1); 834 wcp->s = WC_COMPLETE; 835 } 836 837 /* path has no wildcard */ 838 if (wcp->p->ewname == NULL) 839 return; 840 841 /* 842 * get to this point if a wildcard portion of the path exists. 843 * 844 * first set oldepname to the start of the existing epname for use 845 * in future comparisons, then update epname to include the 846 * wildcard portion. 847 */ 848 nptop = wcp->nptop; 849 850 ASSERT(wcp->oldepname == nptop->u.event.epname); 851 852 nptop->u.event.epname = tname_dup(wcp->p->ewname, CN_DUP); 853 nptop->u.event.epname = tree_name_append(nptop->u.event.epname, 854 tname_dup(wcp->oldepname, CN_DUP)); 855 } 856 857 /* 858 * restore epname to its former (nonwildcarded) state 859 */ 860 static void 861 undo_wildcardedpath(struct wildcardinfo **wcproot) 862 { 863 struct wildcardinfo *wcp; 864 865 wcp = *wcproot; 866 867 if (wcp->s == WC_COMPLETE) { 868 ASSERT(wcp->p->refcount == 1); 869 wcp->s = WC_UNDERCONSTRUCTION; 870 } 871 872 /* path has no wildcard */ 873 if (wcp->p->ewname == NULL) 874 return; 875 876 ASSERT(wcp->oldepname != NULL); 877 878 tree_free(wcp->nptop->u.event.epname); 879 wcp->nptop->u.event.epname = wcp->oldepname; 880 } 881 882 enum wildcard_action { 883 WA_NONE, /* do not do any wildcards */ 884 WA_SINGLE, /* do wildcard only for current cp node */ 885 WA_ALL /* do wildcards for all cp nodes */ 886 }; 887 888 static void 889 vmatch_event(struct info *infop, struct config *cp, struct node *np, 890 struct node *lnp, struct node *anp, 891 struct wildcardinfo **wcproot, enum wildcard_action dowildcard) 892 { 893 struct wildcardinfo *wcp; 894 char *cp_s; 895 int cp_num; 896 897 wcp = *wcproot; 898 899 if ((np == NULL && wcp->oldepname != NULL) || 900 (cp == NULL && wcp->oldepname == NULL)) { 901 /* 902 * get to this point if the pathname matched the config 903 * (but not necessarily a match at the end). first check 904 * for any matching wildcard paths. 905 */ 906 if (wcp->matchwc != NULL && wc_paths_match(wcp) == 0) 907 return; 908 909 create_wildcardedpath(wcproot); 910 vmatch(infop, np, lnp, anp, wcproot); 911 undo_wildcardedpath(wcproot); 912 913 return; 914 } 915 916 if (cp == NULL) 917 return; /* no more config to match against */ 918 919 for (; cp; cp = config_next(cp)) { 920 config_getcompname(cp, &cp_s, &cp_num); 921 922 if (cp_s == np->u.name.s && 923 ! (wcp->s == WC_UNDERCONSTRUCTION && 924 dowildcard == WA_SINGLE)) { 925 /* found a matching component name */ 926 if (np->u.name.child && 927 np->u.name.child->t == T_NUM) { 928 /* 929 * an explicit instance number was given 930 * in the source. so only consider this 931 * a configuration match if the number 932 * also matches. 933 */ 934 if (cp_num != np->u.name.child->u.ull) 935 continue; 936 937 np->u.name.cp = cp; 938 } else { 939 struct iterinfo *iterinfop; 940 const char *iters; 941 942 /* 943 * vertical iterator. look it up in 944 * the appropriate lut and if we get 945 * back a value it is either one that we 946 * set earlier, in which case we record 947 * the new value for this iteration and 948 * keep matching, or it is one that was 949 * set by an earlier reference to the 950 * iterator, in which case we only consider 951 * this a configuration match if the number 952 * matches cp_num. 953 */ 954 955 ASSERT(np->u.name.child != NULL); 956 ASSERT(np->u.name.child->t == T_NAME); 957 iters = np->u.name.child->u.name.s; 958 959 if ((iterinfop = lut_lookup(infop->ex, 960 (void *)iters, NULL)) == NULL) { 961 /* we're the first use, record our np */ 962 infop->ex = lut_add(infop->ex, 963 (void *)iters, 964 newiterinfo(cp_num, np), NULL); 965 } else if (np == iterinfop->np) { 966 /* 967 * we're the first use, back again 968 * for another iteration. so update 969 * the num bound to this iterator in 970 * the lut. 971 */ 972 iterinfop->num = cp_num; 973 } else if (cp_num != iterinfop->num) { 974 /* 975 * an earlier reference to this 976 * iterator bound it to a different 977 * instance number, so there's no 978 * match here after all. 979 * 980 * however, it's possible that this 981 * component should really be part of 982 * the wildcard. we explore this by 983 * forcing this component into the 984 * wildcarded section. 985 * 986 * for an more details of what's 987 * going to happen now, see 988 * comments block below entitled 989 * "forcing components into 990 * wildcard path". 991 */ 992 if (dowildcard == WA_ALL && 993 wcp->s == WC_UNDERCONSTRUCTION) { 994 vmatch_event(infop, cp, np, 995 lnp, anp, wcproot, 996 WA_SINGLE); 997 } 998 continue; 999 } 1000 np->u.name.cp = cp; 1001 } 1002 1003 /* 1004 * if wildcarding was done in a call earlier in the 1005 * stack, record the current cp as the first 1006 * matching and nonwildcarded cp. 1007 */ 1008 if (dowildcard == WA_ALL && 1009 wcp->s == WC_UNDERCONSTRUCTION) 1010 wcp->p->cpstart = cp; 1011 1012 /* 1013 * if this was an IT_HORIZONTAL name, 1014 * hmatch() will use the cp to expand 1015 * all matches horizontally into a list. 1016 * we know the list will contain at least 1017 * one element (the one we just matched), 1018 * so we just store cp and let hmatch_event() 1019 * do the rest. 1020 * 1021 * recurse on to next component. note that 1022 * wildcarding is now turned off. 1023 */ 1024 vmatch_event(infop, config_child(cp), np->u.name.next, 1025 lnp, anp, wcproot, WA_NONE); 1026 1027 /* 1028 * forcing components into wildcard path: 1029 * 1030 * if this component is the first match, force it 1031 * to be part of the wildcarded path and see if we 1032 * can get additional matches. repeat call to 1033 * vmatch_event() with the same np, making sure 1034 * wildcarding is forced for this component alone 1035 * and not its peers by specifying vmatch_event( 1036 * ..., WA_SINGLE). in other words, in the call to 1037 * vmatch_event() below, there should be no loop 1038 * over cp's peers since that is being done in the 1039 * current loop [i.e., the loop we're in now]. 1040 * 1041 * here's an example. suppose we have the 1042 * definition 1043 * event foo@x/y 1044 * and configuration 1045 * a0/x0/y0/a1/x1/y1 1046 * 1047 * the code up to this point will treat "a0" as the 1048 * wildcarded part of the path and "x0/y0" as the 1049 * nonwildcarded part, resulting in the instanced 1050 * event 1051 * foo@a0/x0/y0 1052 * 1053 * in order to discover the next match (.../x1/y1) 1054 * in the configuration we have to force "x0" into 1055 * the wildcarded part of the path. the following 1056 * call to vmatch_event(..., WA_SINGLE) does this. 1057 * by doing so, we discover the wildcarded part 1058 * "a0/x0/y0/a1" and the nonwildcarded part "x1/y1" 1059 * 1060 * the following call to vmatch_event() is also 1061 * needed to properly handle the configuration 1062 * b0/x0/b1/x1/y1 1063 * 1064 * the recursions into vmatch_event() will start 1065 * off uncovering "b0" as the wildcarded part and 1066 * "x0" as the start of the nonwildcarded path. 1067 * however, the next recursion will not result in a 1068 * match since there is no "y" following "x0". the 1069 * subsequent match of (wildcard = "b0/x0/b1" and 1070 * nonwildcard = "x1/y1") will be discovered only 1071 * if "x0" is forced to be a part of the wildcarded 1072 * path. 1073 */ 1074 if (dowildcard == WA_ALL && 1075 wcp->s == WC_UNDERCONSTRUCTION) { 1076 vmatch_event(infop, cp, np, lnp, anp, 1077 wcproot, WA_SINGLE); 1078 } 1079 1080 if (np->u.name.it == IT_HORIZONTAL) { 1081 /* 1082 * hmatch() finished iterating through 1083 * the configuration as described above, so 1084 * don't continue iterating here. 1085 */ 1086 return; 1087 } 1088 1089 } else if ((dowildcard == WA_SINGLE || dowildcard == WA_ALL) && 1090 wcp->s == WC_UNDERCONSTRUCTION) { 1091 /* 1092 * no matching cp, and we are constructing our own 1093 * wildcard path. (in other words, we are not 1094 * referencing a wildcard path created for an 1095 * earlier event.) 1096 * 1097 * add wildcard entry, then recurse on to config 1098 * child 1099 */ 1100 struct node *cpnode, *prevlast; 1101 1102 cpnode = tree_name(cp_s, IT_NONE, NULL, 0); 1103 cpnode->u.name.child = newnode(T_NUM, NULL, 0); 1104 cpnode->u.name.child->u.ull = cp_num; 1105 cpnode->u.name.cp = cp; 1106 1107 if (wcp->p->ewname == NULL) { 1108 prevlast = NULL; 1109 wcp->p->ewname = cpnode; 1110 } else { 1111 prevlast = wcp->p->ewname->u.name.last; 1112 wcp->p->ewname = 1113 tree_name_append(wcp->p->ewname, 1114 cpnode); 1115 } 1116 1117 vmatch_event(infop, config_child(cp), np, lnp, anp, 1118 wcproot, WA_ALL); 1119 1120 /* 1121 * back out last addition to ewname and continue 1122 * with loop 1123 */ 1124 tree_free(cpnode); 1125 if (prevlast == NULL) { 1126 wcp->p->ewname = NULL; 1127 } else { 1128 prevlast->u.name.next = NULL; 1129 wcp->p->ewname->u.name.last = prevlast; 1130 } 1131 1132 /* 1133 * return if wildcarding is done only for this cp 1134 */ 1135 if (dowildcard == WA_SINGLE) 1136 return; 1137 } 1138 } 1139 } 1140 1141 /* 1142 * for the event node np, which will be subjected to pathname 1143 * expansion/matching, create a (struct wildcardinfo) to hold wildcard 1144 * information. this struct will be inserted into the first location in 1145 * the list that starts with *wcproot. 1146 * 1147 * cp is the starting node of the configuration; cpstart, which is output, 1148 * is the starting node of the nonwildcarded portion of the path. 1149 */ 1150 static void 1151 add_wildcardentry(struct wildcardinfo **wcproot, struct config *cp, 1152 struct node *np) 1153 { 1154 struct wildcardinfo *wcpnew, *wcp; 1155 struct node *np1, *np2; 1156 1157 /* 1158 * create entry for np 1159 */ 1160 wcpnew = MALLOC(sizeof (struct wildcardinfo)); 1161 bzero(wcpnew, sizeof (struct wildcardinfo)); 1162 wcpnew->nptop = np; 1163 wcpnew->oldepname = np->u.event.epname; 1164 wcpnew->s = WC_UNDERCONSTRUCTION; 1165 1166 wcpnew->p = MALLOC(sizeof (struct wildcardpath)); 1167 bzero(wcpnew->p, sizeof (struct wildcardpath)); 1168 wcpnew->p->cpstart = cp; 1169 wcpnew->p->refcount = 1; 1170 1171 /* 1172 * search all completed entries for an epname whose first entry 1173 * matches. note that NULL epnames are considered valid and can be 1174 * matched. 1175 */ 1176 np2 = wcpnew->oldepname; 1177 for (wcp = *wcproot; wcp; wcp = wcp->next) { 1178 ASSERT(wcp->s == WC_COMPLETE); 1179 1180 np1 = wcp->oldepname; 1181 if ((np1 && np2 && np1->u.name.s == np2->u.name.s) || 1182 (np1 == NULL && np2 == NULL)) { 1183 /* 1184 * if we find a match in a completed entry, set 1185 * matchwc to indicate that we would like to match 1186 * it. it is necessary to do this since wildcards 1187 * for each event are constructed independently. 1188 */ 1189 wcpnew->matchwc = wcp->p; 1190 1191 wcp->p->refcount++; 1192 break; 1193 } 1194 } 1195 1196 wcpnew->next = *wcproot; 1197 *wcproot = wcpnew; 1198 } 1199 1200 static void 1201 delete_wildcardentry(struct wildcardinfo **wcproot) 1202 { 1203 struct wildcardinfo *wcp; 1204 1205 wcp = *wcproot; 1206 *wcproot = wcp->next; 1207 1208 switch (wcp->s) { 1209 case WC_UNDERCONSTRUCTION: 1210 case WC_COMPLETE: 1211 if (wcp->matchwc != NULL) 1212 wcp->matchwc->refcount--; 1213 1214 ASSERT(wcp->p->refcount == 1); 1215 tree_free(wcp->p->ewname); 1216 FREE(wcp->p); 1217 break; 1218 1219 default: 1220 out(O_DIE, "deletewc: invalid status"); 1221 break; 1222 } 1223 1224 FREE(wcp); 1225 } 1226 1227 /* 1228 * vmatch -- find the next vertical expansion match in the config database 1229 * 1230 * this routine is called with three node pointers: 1231 * np -- the parse we're matching 1232 * lnp -- the rest of the list we're currently working on 1233 * anp -- the rest of the arrow we're currently working on 1234 * 1235 * the expansion matching happens via three types of recursion: 1236 * 1237 * - when given an arrow, handle the left-side and then recursively 1238 * handle the right side (which might be another cascaded arrow). 1239 * 1240 * - when handling one side of an arrow, recurse through the T_LIST 1241 * to get to each event (or just move on to the event if there 1242 * is a single event instead of a list) since the arrow parse 1243 * trees recurse left, we actually start with the right-most 1244 * event list in the prop statement and work our way towards 1245 * the left-most event list. 1246 * 1247 * - when handling an event, recurse down each component of the 1248 * pathname, matching in the config database and recording the 1249 * matches in the explicit iterator dictionary as we go. 1250 * 1251 * when the bottom of this matching recursion is met, meaning we have 1252 * set the "cp" pointers on all the names in the entire statement, 1253 * we call hmatch() which does it's own recursion to handle horizontal 1254 * expandsion and then call generate() to generate nodes, bubbles, and 1255 * arrows in the instance tree. generate() looks at the cp pointers to 1256 * see what instance numbers were matched in the configuration database. 1257 * 1258 * when horizontal expansion appears, vmatch() finds only the first match 1259 * and hmatch() then takes the horizontal expansion through all the other 1260 * matches when generating the arrows in the instance tree. 1261 * 1262 * the "infop" passed down through the recursion contains a dictionary 1263 * of the explicit iterators (all the implicit iterators have been converted 1264 * to explicit iterators when the parse tree was created by tree.c), which 1265 * allows things like this to work correctly: 1266 * 1267 * prop error.a@x[n]/y/z -> error.b@x/y[n]/z -> error.c@x/y/z[n]; 1268 * 1269 * during the top level call, the explicit iterator "n" will match an 1270 * instance number in the config database, and the result will be recorded 1271 * in the explicit iterator dictionary and passed down via "infop". so 1272 * when the recursive call tries to match y[n] in the config database, it 1273 * will only match the same instance number as x[n] did since the dictionary 1274 * is consulted to see if "n" took on a value already. 1275 * 1276 * at any point during the recursion, match*() can return to indicate 1277 * a match was not found in the config database and that the caller should 1278 * move on to the next potential match, if any. 1279 * 1280 * constraints are completely ignored by match(), so the statement: 1281 * 1282 * prop error.a@x[n] -> error.b@x[n] {n != 0}; 1283 * 1284 * might very well match x[0] if it appears in the config database. it 1285 * is the generate() routine that takes that match and then decides what 1286 * arrow, if any, should be generated in the instance tree. generate() 1287 * looks at the explicit iterator dictionary to get values like "n" in 1288 * the above example so that it can evaluate constraints. 1289 * 1290 */ 1291 static void 1292 vmatch(struct info *infop, struct node *np, struct node *lnp, 1293 struct node *anp, struct wildcardinfo **wcproot) 1294 { 1295 if (np == NULL) { 1296 if (lnp) 1297 vmatch(infop, lnp, NULL, anp, wcproot); 1298 else if (anp) 1299 vmatch(infop, anp, NULL, NULL, wcproot); 1300 else { 1301 struct node *src; 1302 struct node *dst; 1303 1304 /* end of vertical match recursion */ 1305 outfl(O_ALTFP|O_VERB3|O_NONL, 1306 infop->anp->file, infop->anp->line, "vmatch: "); 1307 ptree_name_iter(O_ALTFP|O_VERB3|O_NONL, infop->anp); 1308 out(O_ALTFP|O_VERB3, NULL); 1309 1310 generate_nork( 1311 itree_np2nork(infop->anp->u.arrow.nnp), 1312 itree_np2nork(infop->anp->u.arrow.knp)); 1313 dst = infop->anp->u.arrow.rhs; 1314 src = infop->anp->u.arrow.lhs; 1315 for (;;) { 1316 generate_new(); /* new set of arrows */ 1317 if (src->t == T_ARROW) { 1318 hmatch(infop, src->u.arrow.rhs, dst); 1319 generate_nork( 1320 itree_np2nork(src->u.arrow.nnp), 1321 itree_np2nork(src->u.arrow.knp)); 1322 dst = src->u.arrow.rhs; 1323 src = src->u.arrow.lhs; 1324 } else { 1325 hmatch(infop, src, dst); 1326 break; 1327 } 1328 } 1329 } 1330 return; 1331 } 1332 1333 switch (np->t) { 1334 case T_EVENT: { 1335 add_wildcardentry(wcproot, config_child(infop->croot), np); 1336 vmatch_event(infop, config_child(infop->croot), 1337 np->u.event.epname, lnp, anp, wcproot, WA_ALL); 1338 delete_wildcardentry(wcproot); 1339 break; 1340 } 1341 case T_LIST: 1342 ASSERT(lnp == NULL); 1343 vmatch(infop, np->u.expr.right, np->u.expr.left, anp, wcproot); 1344 break; 1345 1346 case T_ARROW: 1347 ASSERT(lnp == NULL && anp == NULL); 1348 vmatch(infop, np->u.arrow.rhs, NULL, np->u.arrow.lhs, wcproot); 1349 break; 1350 1351 default: 1352 outfl(O_DIE, np->file, np->line, 1353 "vmatch: unexpected type: %s", 1354 ptree_nodetype2str(np->t)); 1355 } 1356 } 1357 1358 static void 1359 cp_reset(struct node *np) 1360 { 1361 if (np == NULL) 1362 return; 1363 switch (np->t) { 1364 case T_NAME: 1365 np->u.name.cp = NULL; 1366 cp_reset(np->u.name.next); 1367 break; 1368 1369 case T_LIST: 1370 cp_reset(np->u.expr.left); 1371 cp_reset(np->u.expr.right); 1372 break; 1373 1374 case T_ARROW: 1375 cp_reset(np->u.arrow.lhs); 1376 cp_reset(np->u.arrow.rhs); 1377 break; 1378 1379 case T_EVENT: 1380 cp_reset(np->u.event.epname); 1381 break; 1382 } 1383 } 1384 1385 /* 1386 * itree_create -- apply the current config to the current parse tree 1387 * 1388 * returns a lut mapping fully-instance-qualified names to struct events. 1389 * 1390 */ 1391 struct lut * 1392 itree_create(struct config *croot) 1393 { 1394 struct lut *retval; 1395 struct node *propnp; 1396 1397 Ninfo.lut = NULL; 1398 Ninfo.croot = croot; 1399 for (propnp = Props; propnp; propnp = propnp->u.stmt.next) { 1400 struct node *anp = propnp->u.stmt.np; 1401 struct wildcardinfo *wcproot = NULL; 1402 1403 ASSERTeq(anp->t, T_ARROW, ptree_nodetype2str); 1404 1405 Ninfo.anp = anp; 1406 Ninfo.ex = NULL; 1407 1408 generate_arrownp(anp); 1409 vmatch(&Ninfo, anp, NULL, NULL, &wcproot); 1410 1411 if (Ninfo.ex) { 1412 lut_free(Ninfo.ex, iterinfo_destructor, NULL); 1413 Ninfo.ex = NULL; 1414 } 1415 ASSERT(wcproot == NULL); 1416 cp_reset(anp); 1417 } 1418 1419 retval = Ninfo.lut; 1420 Ninfo.lut = NULL; 1421 return (retval); 1422 } 1423 1424 void 1425 itree_free(struct lut *lutp) 1426 { 1427 lut_free(lutp, itree_destructor, NULL); 1428 } 1429 1430 int 1431 itree_nameinstancecmp(struct node *np1, struct node *np2) 1432 { 1433 int np1type = (int)np1->u.name.t; 1434 int np2type = (int)np2->u.name.t; 1435 int num1; 1436 int num2; 1437 1438 while (np1 && np2 && np1->u.name.s == np2->u.name.s) { 1439 if (np1->u.name.next != NULL && np2->u.name.next != NULL) { 1440 if (np1->u.name.cp != NULL) { 1441 config_getcompname(np1->u.name.cp, NULL, &num1); 1442 } else { 1443 ASSERT(np1->u.name.child != NULL); 1444 ASSERT(np1->u.name.child->t == T_NUM); 1445 num1 = (int)np1->u.name.child->u.ull; 1446 } 1447 1448 if (np2->u.name.cp != NULL) { 1449 config_getcompname(np2->u.name.cp, NULL, &num2); 1450 } else { 1451 ASSERT(np2->u.name.child != NULL); 1452 ASSERT(np2->u.name.child->t == T_NUM); 1453 num2 = (int)np2->u.name.child->u.ull; 1454 } 1455 1456 if (num1 != num2) 1457 return (num1 - num2); 1458 } 1459 1460 np1 = np1->u.name.next; 1461 np2 = np2->u.name.next; 1462 } 1463 if (np1 == NULL) 1464 if (np2 == NULL) 1465 return (np1type - np2type); 1466 else 1467 return (-1); 1468 else if (np2 == NULL) 1469 return (1); 1470 else 1471 return (strcmp(np1->u.name.s, np2->u.name.s)); 1472 } 1473 1474 void 1475 itree_pevent_brief(int flags, struct event *ep) 1476 { 1477 ASSERT(ep != NULL); 1478 ASSERT(ep->enode != NULL); 1479 ASSERT(ep->ipp != NULL); 1480 1481 ipath_print(flags, ep->enode->u.event.ename->u.name.s, ep->ipp); 1482 } 1483 1484 /*ARGSUSED*/ 1485 static void 1486 itree_pevent(struct event *lhs, struct event *ep, void *arg) 1487 { 1488 struct plut_wlk_data propd; 1489 struct bubble *bp; 1490 int flags = (int)arg; 1491 1492 itree_pevent_brief(flags, ep); 1493 if (ep->t == N_EREPORT) 1494 out(flags, " (count %d)", ep->count); 1495 else 1496 out(flags, NULL); 1497 1498 if (ep->props) { 1499 propd.flags = flags; 1500 propd.first = 1; 1501 out(flags, "Properties:"); 1502 lut_walk(ep->props, ptree_plut, (void *)&propd); 1503 } 1504 1505 for (bp = itree_next_bubble(ep, NULL); bp; 1506 bp = itree_next_bubble(ep, bp)) { 1507 /* Print only TO bubbles in this loop */ 1508 if (bp->t != B_TO) 1509 continue; 1510 itree_pbubble(flags, bp); 1511 } 1512 1513 for (bp = itree_next_bubble(ep, NULL); bp; 1514 bp = itree_next_bubble(ep, bp)) { 1515 /* Print only INHIBIT bubbles in this loop */ 1516 if (bp->t != B_INHIBIT) 1517 continue; 1518 itree_pbubble(flags, bp); 1519 } 1520 1521 for (bp = itree_next_bubble(ep, NULL); bp; 1522 bp = itree_next_bubble(ep, bp)) { 1523 /* Print only FROM bubbles in this loop */ 1524 if (bp->t != B_FROM) 1525 continue; 1526 itree_pbubble(flags, bp); 1527 } 1528 } 1529 1530 static void 1531 itree_pbubble(int flags, struct bubble *bp) 1532 { 1533 struct constraintlist *cp; 1534 struct arrowlist *ap; 1535 1536 ASSERT(bp != NULL); 1537 1538 out(flags|O_NONL, " "); 1539 if (bp->mark) 1540 out(flags|O_NONL, "*"); 1541 else 1542 out(flags|O_NONL, " "); 1543 if (bp->t == B_FROM) 1544 out(flags|O_NONL, "N=%d to:", bp->nork); 1545 else if (bp->t == B_TO) 1546 out(flags|O_NONL, "K=%d from:", bp->nork); 1547 else 1548 out(flags|O_NONL, "K=%d masked from:", bp->nork); 1549 1550 if (bp->t == B_TO || bp->t == B_INHIBIT) { 1551 for (ap = itree_next_arrow(bp, NULL); ap; 1552 ap = itree_next_arrow(bp, ap)) { 1553 ASSERT(ap->arrowp->head == bp); 1554 ASSERT(ap->arrowp->tail != NULL); 1555 ASSERT(ap->arrowp->tail->myevent != NULL); 1556 out(flags|O_NONL, " "); 1557 itree_pevent_brief(flags, ap->arrowp->tail->myevent); 1558 } 1559 out(flags, NULL); 1560 return; 1561 } 1562 1563 for (ap = itree_next_arrow(bp, NULL); ap; 1564 ap = itree_next_arrow(bp, ap)) { 1565 ASSERT(ap->arrowp->tail == bp); 1566 ASSERT(ap->arrowp->head != NULL); 1567 ASSERT(ap->arrowp->head->myevent != NULL); 1568 1569 out(flags|O_NONL, " "); 1570 itree_pevent_brief(flags, ap->arrowp->head->myevent); 1571 1572 out(flags|O_NONL, " "); 1573 ptree_timeval(flags, &ap->arrowp->mindelay); 1574 out(flags|O_NONL, ","); 1575 ptree_timeval(flags, &ap->arrowp->maxdelay); 1576 1577 /* Display anything from the propogation node? */ 1578 out(O_VERB3|O_NONL, " <%s:%d>", 1579 ap->arrowp->pnode->file, ap->arrowp->pnode->line); 1580 1581 if (itree_next_constraint(ap->arrowp, NULL)) 1582 out(flags|O_NONL, " {"); 1583 1584 for (cp = itree_next_constraint(ap->arrowp, NULL); cp; 1585 cp = itree_next_constraint(ap->arrowp, cp)) { 1586 ptree(flags, cp->cnode, 1, 0); 1587 if (itree_next_constraint(ap->arrowp, cp)) 1588 out(flags|O_NONL, ", "); 1589 } 1590 1591 if (itree_next_constraint(ap->arrowp, NULL)) 1592 out(flags|O_NONL, "}"); 1593 } 1594 out(flags, NULL); 1595 } 1596 1597 void 1598 itree_ptree(int flags, struct lut *itp) 1599 { 1600 lut_walk(itp, (lut_cb)itree_pevent, (void *)flags); 1601 } 1602 1603 /*ARGSUSED*/ 1604 static void 1605 itree_destructor(void *left, void *right, void *arg) 1606 { 1607 struct event *ep = (struct event *)right; 1608 struct bubble *nextbub, *bub; 1609 1610 /* Free the properties */ 1611 lut_free(ep->props, instances_destructor, NULL); 1612 1613 /* Free the payload properties */ 1614 lut_free(ep->payloadprops, payloadprops_destructor, NULL); 1615 1616 /* Free my bubbles */ 1617 for (bub = ep->bubbles; bub != NULL; ) { 1618 nextbub = bub->next; 1619 /* 1620 * Free arrows if they are FROM me. Free arrowlists on 1621 * other types of bubbles (but not the attached arrows, 1622 * which will be freed when we free the originating 1623 * bubble. 1624 */ 1625 if (bub->t == B_FROM) 1626 itree_free_arrowlists(bub, 1); 1627 else 1628 itree_free_arrowlists(bub, 0); 1629 itree_free_bubble(bub); 1630 bub = nextbub; 1631 } 1632 1633 if (ep->nvp != NULL) 1634 nvlist_free(ep->nvp); 1635 bzero(ep, sizeof (*ep)); 1636 FREE(ep); 1637 } 1638 1639 static void 1640 itree_free_bubble(struct bubble *freeme) 1641 { 1642 bzero(freeme, sizeof (*freeme)); 1643 FREE(freeme); 1644 } 1645 1646 static struct bubble * 1647 itree_add_bubble(struct event *eventp, enum bubbletype btype, int nork, int gen) 1648 { 1649 struct bubble *prev = NULL; 1650 struct bubble *curr; 1651 struct bubble *newb; 1652 1653 /* Use existing bubbles as appropriate when possible */ 1654 for (curr = eventp->bubbles; 1655 curr != NULL; 1656 prev = curr, curr = curr->next) { 1657 if (btype == B_TO && curr->t == B_TO) { 1658 /* see if an existing "to" bubble works for us */ 1659 if (gen == curr->gen) 1660 return (curr); /* matched gen number */ 1661 else if (nork == 1 && curr->nork == 1) { 1662 curr->gen = gen; 1663 return (curr); /* coalesce K==1 bubbles */ 1664 } 1665 } else if (btype == B_FROM && curr->t == B_FROM) { 1666 /* see if an existing "from" bubble works for us */ 1667 if ((nork == N_IS_ALL && curr->nork == N_IS_ALL) || 1668 (nork == 0 && curr->nork == 0)) 1669 return (curr); 1670 } 1671 } 1672 1673 newb = MALLOC(sizeof (struct bubble)); 1674 newb->next = NULL; 1675 newb->t = btype; 1676 newb->myevent = eventp; 1677 newb->nork = nork; 1678 newb->mark = 0; 1679 newb->gen = gen; 1680 newb->arrows = NULL; 1681 1682 if (prev == NULL) 1683 eventp->bubbles = newb; 1684 else 1685 prev->next = newb; 1686 1687 return (newb); 1688 } 1689 1690 struct bubble * 1691 itree_next_bubble(struct event *eventp, struct bubble *last) 1692 { 1693 struct bubble *next; 1694 1695 for (;;) { 1696 if (last != NULL) 1697 next = last->next; 1698 else 1699 next = eventp->bubbles; 1700 1701 if (next == NULL || next->arrows != NULL) 1702 return (next); 1703 1704 /* bubble was empty, skip it */ 1705 last = next; 1706 } 1707 } 1708 1709 static void 1710 add_arrow(struct bubble *bp, struct arrow *ap) 1711 { 1712 struct arrowlist *prev = NULL; 1713 struct arrowlist *curr; 1714 struct arrowlist *newal; 1715 1716 newal = MALLOC(sizeof (struct arrowlist)); 1717 bzero(newal, sizeof (struct arrowlist)); 1718 newal->arrowp = ap; 1719 1720 curr = itree_next_arrow(bp, NULL); 1721 while (curr != NULL) { 1722 prev = curr; 1723 curr = itree_next_arrow(bp, curr); 1724 } 1725 1726 if (prev == NULL) 1727 bp->arrows = newal; 1728 else 1729 prev->next = newal; 1730 } 1731 1732 static struct arrow * 1733 itree_add_arrow(struct bubble *frombubblep, struct bubble *tobubblep, 1734 struct node *apnode, struct node *fromevent, struct node *toevent, 1735 struct lut *ex) 1736 { 1737 struct arrow *newa; 1738 1739 ASSERTeq(frombubblep->t, B_FROM, itree_bubbletype2str); 1740 ASSERTinfo(tobubblep->t == B_TO || tobubblep->t == B_INHIBIT, 1741 itree_bubbletype2str(tobubblep->t)); 1742 newa = MALLOC(sizeof (struct arrow)); 1743 bzero(newa, sizeof (struct arrow)); 1744 newa->tail = frombubblep; 1745 newa->head = tobubblep; 1746 newa->pnode = apnode; 1747 newa->constraints = NULL; 1748 1749 /* 1750 * Set default delays, then try to re-set them from 1751 * any within() constraints. 1752 */ 1753 newa->mindelay = newa->maxdelay = 0ULL; 1754 if (itree_set_arrow_traits(newa, fromevent, toevent, ex) == 0) { 1755 FREE(newa); 1756 return (NULL); 1757 } 1758 1759 add_arrow(frombubblep, newa); 1760 add_arrow(tobubblep, newa); 1761 return (newa); 1762 } 1763 1764 /* returns false if traits show that arrow should not be added after all */ 1765 static int 1766 itree_set_arrow_traits(struct arrow *ap, struct node *fromev, 1767 struct node *toev, struct lut *ex) 1768 { 1769 struct node *epnames[] = { NULL, NULL, NULL }; 1770 struct node *newc = NULL; 1771 1772 ASSERTeq(fromev->t, T_EVENT, ptree_nodetype2str); 1773 ASSERTeq(toev->t, T_EVENT, ptree_nodetype2str); 1774 1775 /* 1776 * search for the within values first on the declaration of 1777 * the destination event, and then on the prop. this allows 1778 * one to specify a "default" within by putting it on the 1779 * declaration, but then allow overriding on the prop statement. 1780 */ 1781 arrow_add_within(ap, toev->u.event.declp->u.stmt.np->u.event.eexprlist); 1782 arrow_add_within(ap, toev->u.event.eexprlist); 1783 1784 /* 1785 * handle any global constraints inherited from the 1786 * "fromev" event's declaration 1787 */ 1788 ASSERT(fromev->u.event.declp != NULL); 1789 ASSERT(fromev->u.event.declp->u.stmt.np != NULL); 1790 1791 #ifdef notdef 1792 /* XXX not quite ready to evaluate constraints from decls yet */ 1793 if (fromev->u.event.declp->u.stmt.np->u.event.eexprlist) 1794 (void) itree_add_constraint(ap, 1795 fromev->u.event.declp->u.stmt.np->u.event.eexprlist); 1796 #endif /* notdef */ 1797 1798 /* handle constraints on the from event in the prop statement */ 1799 epnames[0] = fromev->u.event.epname; 1800 epnames[1] = toev->u.event.epname; 1801 if (eval_potential(fromev->u.event.eexprlist, ex, epnames, &newc) == 0) 1802 return (0); /* constraint disallows arrow */ 1803 1804 /* 1805 * handle any global constraints inherited from the 1806 * "toev" event's declaration 1807 */ 1808 ASSERT(toev->u.event.declp != NULL); 1809 ASSERT(toev->u.event.declp->u.stmt.np != NULL); 1810 1811 #ifdef notdef 1812 /* XXX not quite ready to evaluate constraints from decls yet */ 1813 if (toev->u.event.declp->u.stmt.np->u.event.eexprlist) 1814 (void) itree_add_constraint(ap, 1815 toev->u.event.declp->u.stmt.np->u.event.eexprlist); 1816 #endif /* notdef */ 1817 1818 /* handle constraints on the to event in the prop statement */ 1819 epnames[0] = toev->u.event.epname; 1820 epnames[1] = fromev->u.event.epname; 1821 if (eval_potential(toev->u.event.eexprlist, ex, epnames, &newc) == 0) 1822 return (0); /* constraint disallows arrow */ 1823 1824 /* if we came up with any deferred constraints, add them to arrow */ 1825 if (newc != NULL) 1826 (void) itree_add_constraint(ap, iexpr(newc)); 1827 1828 return (1); /* constraints allow arrow */ 1829 } 1830 1831 /* 1832 * Set within() constraint. If the constraint were set multiple times, 1833 * the last one would "win". 1834 */ 1835 static void 1836 arrow_add_within(struct arrow *ap, struct node *xpr) 1837 { 1838 struct node *arglist; 1839 1840 /* end of expressions list */ 1841 if (xpr == NULL) 1842 return; 1843 1844 switch (xpr->t) { 1845 case T_LIST: 1846 arrow_add_within(ap, xpr->u.expr.left); 1847 arrow_add_within(ap, xpr->u.expr.right); 1848 return; 1849 case T_FUNC: 1850 if (xpr->u.func.s != L_within) 1851 return; 1852 arglist = xpr->u.func.arglist; 1853 switch (arglist->t) { 1854 case T_TIMEVAL: 1855 ap->mindelay = 0; 1856 ap->maxdelay = arglist->u.ull; 1857 break; 1858 case T_NAME: 1859 ASSERT(arglist->u.name.s == L_infinity); 1860 ap->mindelay = 0; 1861 ap->maxdelay = TIMEVAL_EVENTUALLY; 1862 break; 1863 case T_LIST: 1864 ASSERT(arglist->u.expr.left->t == T_TIMEVAL); 1865 ap->mindelay = arglist->u.expr.left->u.ull; 1866 switch (arglist->u.expr.right->t) { 1867 case T_TIMEVAL: 1868 ap->maxdelay = arglist->u.ull; 1869 break; 1870 case T_NAME: 1871 ASSERT(arglist->u.expr.right->u.name.s == 1872 L_infinity); 1873 ap->maxdelay = TIMEVAL_EVENTUALLY; 1874 break; 1875 default: 1876 out(O_DIE, "within: unexpected 2nd arg type"); 1877 } 1878 break; 1879 default: 1880 out(O_DIE, "within: unexpected 1st arg type"); 1881 } 1882 break; 1883 default: 1884 return; 1885 } 1886 } 1887 1888 static void 1889 itree_free_arrowlists(struct bubble *bubp, int arrows_too) 1890 { 1891 struct arrowlist *al, *nal; 1892 1893 al = bubp->arrows; 1894 while (al != NULL) { 1895 nal = al->next; 1896 if (arrows_too) { 1897 itree_free_constraints(al->arrowp); 1898 bzero(al->arrowp, sizeof (struct arrow)); 1899 FREE(al->arrowp); 1900 } 1901 bzero(al, sizeof (*al)); 1902 FREE(al); 1903 al = nal; 1904 } 1905 } 1906 1907 struct arrowlist * 1908 itree_next_arrow(struct bubble *bubble, struct arrowlist *last) 1909 { 1910 struct arrowlist *next; 1911 1912 if (last != NULL) 1913 next = last->next; 1914 else 1915 next = bubble->arrows; 1916 return (next); 1917 } 1918 1919 static struct constraintlist * 1920 itree_add_constraint(struct arrow *arrowp, struct node *c) 1921 { 1922 struct constraintlist *prev = NULL; 1923 struct constraintlist *curr; 1924 struct constraintlist *newc; 1925 1926 for (curr = arrowp->constraints; 1927 curr != NULL; 1928 prev = curr, curr = curr->next); 1929 1930 newc = MALLOC(sizeof (struct constraintlist)); 1931 newc->next = NULL; 1932 newc->cnode = c; 1933 1934 if (prev == NULL) 1935 arrowp->constraints = newc; 1936 else 1937 prev->next = newc; 1938 1939 return (newc); 1940 } 1941 1942 struct constraintlist * 1943 itree_next_constraint(struct arrow *arrowp, struct constraintlist *last) 1944 { 1945 struct constraintlist *next; 1946 1947 if (last != NULL) 1948 next = last->next; 1949 else 1950 next = arrowp->constraints; 1951 return (next); 1952 } 1953 1954 static void 1955 itree_free_constraints(struct arrow *ap) 1956 { 1957 struct constraintlist *cl, *ncl; 1958 1959 cl = ap->constraints; 1960 while (cl != NULL) { 1961 ncl = cl->next; 1962 ASSERT(cl->cnode != NULL); 1963 if (!iexpr_cached(cl->cnode)) 1964 tree_free(cl->cnode); 1965 bzero(cl, sizeof (*cl)); 1966 FREE(cl); 1967 cl = ncl; 1968 } 1969 } 1970 1971 const char * 1972 itree_bubbletype2str(enum bubbletype t) 1973 { 1974 static char buf[100]; 1975 1976 switch (t) { 1977 case B_FROM: return L_from; 1978 case B_TO: return L_to; 1979 case B_INHIBIT: return L_inhibit; 1980 default: 1981 (void) sprintf(buf, "[unexpected bubbletype: %d]", t); 1982 return (buf); 1983 } 1984 } 1985 1986 /* 1987 * itree_fini -- clean up any half-built itrees 1988 */ 1989 void 1990 itree_fini(void) 1991 { 1992 if (Ninfo.lut != NULL) { 1993 itree_free(Ninfo.lut); 1994 Ninfo.lut = NULL; 1995 } 1996 if (Ninfo.ex) { 1997 lut_free(Ninfo.ex, iterinfo_destructor, NULL); 1998 Ninfo.ex = NULL; 1999 } 2000 } 2001