1 /* 2 * CDDL HEADER START 3 * 4 * The contents of this file are subject to the terms of the 5 * Common Development and Distribution License, Version 1.0 only 6 * (the "License"). You may not use this file except in compliance 7 * with the License. 8 * 9 * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE 10 * or http://www.opensolaris.org/os/licensing. 11 * See the License for the specific language governing permissions 12 * and limitations under the License. 13 * 14 * When distributing Covered Code, include this CDDL HEADER in each 15 * file and include the License file at usr/src/OPENSOLARIS.LICENSE. 16 * If applicable, add the following below this CDDL HEADER, with the 17 * fields enclosed by brackets "[]" replaced with your own identifying 18 * information: Portions Copyright [yyyy] [name of copyright owner] 19 * 20 * CDDL HEADER END 21 */ 22 23 /* 24 * Copyright 2005 Sun Microsystems, Inc. All rights reserved. 25 * Use is subject to license terms. 26 */ 27 28 /* 29 * Copyright (c) 2012 by Delphix. All rights reserved. 30 * Copyright 2017 Joyent, Inc. 31 * Copyright 2022 Oxide Computer Company 32 */ 33 34 #include <sys/types.h> 35 #include <sys/sysmacros.h> 36 #include <sys/isa_defs.h> 37 38 #include <strings.h> 39 #include <stdlib.h> 40 #include <setjmp.h> 41 #include <assert.h> 42 #include <errno.h> 43 44 #include <dt_impl.h> 45 #include <dt_grammar.h> 46 #include <dt_parser.h> 47 #include <dt_provider.h> 48 49 static void dt_cg_node(dt_node_t *, dt_irlist_t *, dt_regset_t *); 50 51 static dt_irnode_t * 52 dt_cg_node_alloc(uint_t label, dif_instr_t instr) 53 { 54 dt_irnode_t *dip = malloc(sizeof (dt_irnode_t)); 55 56 if (dip == NULL) 57 longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM); 58 59 dip->di_label = label; 60 dip->di_instr = instr; 61 dip->di_extern = NULL; 62 dip->di_next = NULL; 63 64 return (dip); 65 } 66 67 /* 68 * Code generator wrapper function for ctf_member_info. If we are given a 69 * reference to a forward declaration tag, search the entire type space for 70 * the actual definition and then call ctf_member_info on the result. 71 */ 72 static ctf_file_t * 73 dt_cg_membinfo(ctf_file_t *fp, ctf_id_t type, const char *s, ctf_membinfo_t *mp) 74 { 75 while (ctf_type_kind(fp, type) == CTF_K_FORWARD) { 76 char n[DT_TYPE_NAMELEN]; 77 dtrace_typeinfo_t dtt; 78 79 if (ctf_type_name(fp, type, n, sizeof (n)) == NULL || 80 dt_type_lookup(n, &dtt) == -1 || ( 81 dtt.dtt_ctfp == fp && dtt.dtt_type == type)) 82 break; /* unable to improve our position */ 83 84 fp = dtt.dtt_ctfp; 85 type = ctf_type_resolve(fp, dtt.dtt_type); 86 } 87 88 if (ctf_member_info(fp, type, s, mp) == CTF_ERR) 89 return (NULL); /* ctf_errno is set for us */ 90 91 return (fp); 92 } 93 94 static void 95 dt_cg_xsetx(dt_irlist_t *dlp, dt_ident_t *idp, uint_t lbl, int reg, uint64_t x) 96 { 97 int flag = idp != NULL ? DT_INT_PRIVATE : DT_INT_SHARED; 98 int intoff = dt_inttab_insert(yypcb->pcb_inttab, x, flag); 99 dif_instr_t instr = DIF_INSTR_SETX((uint_t)intoff, reg); 100 101 if (intoff == -1) 102 longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM); 103 104 if (intoff > DIF_INTOFF_MAX) 105 longjmp(yypcb->pcb_jmpbuf, EDT_INT2BIG); 106 107 dt_irlist_append(dlp, dt_cg_node_alloc(lbl, instr)); 108 109 if (idp != NULL) 110 dlp->dl_last->di_extern = idp; 111 } 112 113 static void 114 dt_cg_setx(dt_irlist_t *dlp, int reg, uint64_t x) 115 { 116 dt_cg_xsetx(dlp, NULL, DT_LBL_NONE, reg, x); 117 } 118 119 /* 120 * When loading bit-fields, we want to convert a byte count in the range 121 * 1-8 to the closest power of 2 (e.g. 3->4, 5->8, etc). The clp2() function 122 * is a clever implementation from "Hacker's Delight" by Henry Warren, Jr. 123 */ 124 static size_t 125 clp2(size_t x) 126 { 127 x--; 128 129 x |= (x >> 1); 130 x |= (x >> 2); 131 x |= (x >> 4); 132 x |= (x >> 8); 133 x |= (x >> 16); 134 135 return (x + 1); 136 } 137 138 /* 139 * Lookup the correct load opcode to use for the specified node and CTF type. 140 * We determine the size and convert it to a 3-bit index. Our lookup table 141 * is constructed to use a 5-bit index, consisting of the 3-bit size 0-7, a 142 * bit for the sign, and a bit for userland address. For example, a 4-byte 143 * signed load from userland would be at the following table index: 144 * user=1 sign=1 size=4 => binary index 11011 = decimal index 27 145 */ 146 static uint_t 147 dt_cg_load(dt_node_t *dnp, ctf_file_t *ctfp, ctf_id_t type) 148 { 149 static const uint_t ops[] = { 150 DIF_OP_LDUB, DIF_OP_LDUH, 0, DIF_OP_LDUW, 151 0, 0, 0, DIF_OP_LDX, 152 DIF_OP_LDSB, DIF_OP_LDSH, 0, DIF_OP_LDSW, 153 0, 0, 0, DIF_OP_LDX, 154 DIF_OP_ULDUB, DIF_OP_ULDUH, 0, DIF_OP_ULDUW, 155 0, 0, 0, DIF_OP_ULDX, 156 DIF_OP_ULDSB, DIF_OP_ULDSH, 0, DIF_OP_ULDSW, 157 0, 0, 0, DIF_OP_ULDX, 158 }; 159 160 ctf_encoding_t e; 161 ssize_t size; 162 163 /* 164 * If we're loading a bit-field, we find the power-of-two that spans the 165 * full value. To do this we count the number of bytes that contain a 166 * portion of the bit-field. 167 */ 168 if ((dnp->dn_flags & DT_NF_BITFIELD) && 169 ctf_type_encoding(ctfp, type, &e) != CTF_ERR) { 170 uint_t nbits = e.cte_bits + (dnp->dn_bitoff % NBBY); 171 size = clp2(P2ROUNDUP(nbits, NBBY) / NBBY); 172 } else { 173 size = ctf_type_size(ctfp, type); 174 } 175 176 if (size < 1 || size > 8 || (size & (size - 1)) != 0) { 177 xyerror(D_UNKNOWN, "internal error -- cg cannot load " 178 "size %ld when passed by value\n", (long)size); 179 } 180 181 size--; /* convert size to 3-bit index */ 182 183 if (dnp->dn_flags & DT_NF_SIGNED) 184 size |= 0x08; 185 if (dnp->dn_flags & DT_NF_USERLAND) 186 size |= 0x10; 187 188 return (ops[size]); 189 } 190 191 static void 192 dt_cg_ptrsize(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, 193 uint_t op, int dreg) 194 { 195 ctf_file_t *ctfp = dnp->dn_ctfp; 196 ctf_arinfo_t r; 197 dif_instr_t instr; 198 ctf_id_t type; 199 uint_t kind; 200 ssize_t size; 201 int sreg; 202 203 type = ctf_type_resolve(ctfp, dnp->dn_type); 204 kind = ctf_type_kind(ctfp, type); 205 assert(kind == CTF_K_POINTER || kind == CTF_K_ARRAY); 206 207 if (kind == CTF_K_ARRAY) { 208 if (ctf_array_info(ctfp, type, &r) != 0) { 209 yypcb->pcb_hdl->dt_ctferr = ctf_errno(ctfp); 210 longjmp(yypcb->pcb_jmpbuf, EDT_CTF); 211 } 212 type = r.ctr_contents; 213 } else 214 type = ctf_type_reference(ctfp, type); 215 216 if ((size = ctf_type_size(ctfp, type)) == 1) 217 return; /* multiply or divide by one can be omitted */ 218 219 sreg = dt_regset_alloc(drp); 220 dt_cg_setx(dlp, sreg, size); 221 instr = DIF_INSTR_FMT(op, dreg, sreg, dreg); 222 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 223 dt_regset_free(drp, sreg); 224 } 225 226 /* 227 * If the result of a "." or "->" operation is a bit-field, we use this routine 228 * to generate an epilogue to the load instruction that extracts the value. In 229 * the diagrams below the "ld??" is the load instruction that is generated to 230 * load the containing word that is generating prior to calling this function. 231 * 232 * Epilogue for unsigned fields: Epilogue for signed fields: 233 * 234 * ldu? [r1], r1 lds? [r1], r1 235 * setx USHIFT, r2 setx 64 - SSHIFT, r2 236 * srl r1, r2, r1 sll r1, r2, r1 237 * setx (1 << bits) - 1, r2 setx 64 - bits, r2 238 * and r1, r2, r1 sra r1, r2, r1 239 * 240 * The *SHIFT constants above changes value depending on the endian-ness of our 241 * target architecture. Refer to the comments below for more details. 242 */ 243 static void 244 dt_cg_field_get(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, 245 ctf_file_t *fp, const ctf_membinfo_t *mp) 246 { 247 ctf_encoding_t e; 248 dif_instr_t instr; 249 uint64_t shift; 250 int r1, r2; 251 252 if (ctf_type_encoding(fp, mp->ctm_type, &e) != 0 || e.cte_bits > 64) { 253 xyerror(D_UNKNOWN, "cg: bad field: off %lu type <%ld> " 254 "bits %u\n", mp->ctm_offset, mp->ctm_type, e.cte_bits); 255 } 256 257 assert(dnp->dn_op == DT_TOK_PTR || dnp->dn_op == DT_TOK_DOT); 258 r1 = dnp->dn_left->dn_reg; 259 r2 = dt_regset_alloc(drp); 260 261 /* 262 * On little-endian architectures, ctm_offset counts from the right so 263 * ctm_offset % NBBY itself is the amount we want to shift right to 264 * move the value bits to the little end of the register to mask them. 265 * On big-endian architectures, ctm_offset counts from the left so we 266 * must subtract (ctm_offset % NBBY + cte_bits) from the size in bits 267 * we used for the load. The size of our load in turn is found by 268 * rounding cte_bits up to a byte boundary and then finding the 269 * nearest power of two to this value (see clp2(), above). These 270 * properties are used to compute shift as USHIFT or SSHIFT, below. 271 */ 272 if (dnp->dn_flags & DT_NF_SIGNED) { 273 #ifdef _BIG_ENDIAN 274 shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY - 275 mp->ctm_offset % NBBY; 276 #else 277 shift = mp->ctm_offset % NBBY + e.cte_bits; 278 #endif 279 dt_cg_setx(dlp, r2, 64 - shift); 280 instr = DIF_INSTR_FMT(DIF_OP_SLL, r1, r2, r1); 281 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 282 283 dt_cg_setx(dlp, r2, 64 - e.cte_bits); 284 instr = DIF_INSTR_FMT(DIF_OP_SRA, r1, r2, r1); 285 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 286 } else { 287 #ifdef _BIG_ENDIAN 288 shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY - 289 (mp->ctm_offset % NBBY + e.cte_bits); 290 #else 291 shift = mp->ctm_offset % NBBY; 292 #endif 293 dt_cg_setx(dlp, r2, shift); 294 instr = DIF_INSTR_FMT(DIF_OP_SRL, r1, r2, r1); 295 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 296 297 dt_cg_setx(dlp, r2, (1ULL << e.cte_bits) - 1); 298 instr = DIF_INSTR_FMT(DIF_OP_AND, r1, r2, r1); 299 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 300 } 301 302 dt_regset_free(drp, r2); 303 } 304 305 /* 306 * If the destination of a store operation is a bit-field, we use this routine 307 * to generate a prologue to the store instruction that loads the surrounding 308 * bits, clears the destination field, and ORs in the new value of the field. 309 * In the diagram below the "st?" is the store instruction that is generated to 310 * store the containing word that is generating after calling this function. 311 * 312 * ld [dst->dn_reg], r1 313 * setx ~(((1 << cte_bits) - 1) << (ctm_offset % NBBY)), r2 314 * and r1, r2, r1 315 * 316 * setx (1 << cte_bits) - 1, r2 317 * and src->dn_reg, r2, r2 318 * setx ctm_offset % NBBY, r3 319 * sll r2, r3, r2 320 * 321 * or r1, r2, r1 322 * st? r1, [dst->dn_reg] 323 * 324 * This routine allocates a new register to hold the value to be stored and 325 * returns it. The caller is responsible for freeing this register later. 326 */ 327 static int 328 dt_cg_field_set(dt_node_t *src, dt_irlist_t *dlp, 329 dt_regset_t *drp, dt_node_t *dst) 330 { 331 uint64_t cmask, fmask, shift; 332 dif_instr_t instr; 333 int r1, r2, r3; 334 335 ctf_membinfo_t m; 336 ctf_encoding_t e; 337 ctf_file_t *fp, *ofp; 338 ctf_id_t type; 339 340 assert(dst->dn_op == DT_TOK_PTR || dst->dn_op == DT_TOK_DOT); 341 assert(dst->dn_right->dn_kind == DT_NODE_IDENT); 342 343 fp = dst->dn_left->dn_ctfp; 344 type = ctf_type_resolve(fp, dst->dn_left->dn_type); 345 346 if (dst->dn_op == DT_TOK_PTR) { 347 type = ctf_type_reference(fp, type); 348 type = ctf_type_resolve(fp, type); 349 } 350 351 if ((fp = dt_cg_membinfo(ofp = fp, type, 352 dst->dn_right->dn_string, &m)) == NULL) { 353 yypcb->pcb_hdl->dt_ctferr = ctf_errno(ofp); 354 longjmp(yypcb->pcb_jmpbuf, EDT_CTF); 355 } 356 357 if (ctf_type_encoding(fp, m.ctm_type, &e) != 0 || e.cte_bits > 64) { 358 xyerror(D_UNKNOWN, "cg: bad field: off %lu type <%ld> " 359 "bits %u\n", m.ctm_offset, m.ctm_type, e.cte_bits); 360 } 361 362 r1 = dt_regset_alloc(drp); 363 r2 = dt_regset_alloc(drp); 364 r3 = dt_regset_alloc(drp); 365 366 /* 367 * Compute shifts and masks. We need to compute "shift" as the amount 368 * we need to shift left to position our field in the containing word. 369 * Refer to the comments in dt_cg_field_get(), above, for more info. 370 * We then compute fmask as the mask that truncates the value in the 371 * input register to width cte_bits, and cmask as the mask used to 372 * pass through the containing bits and zero the field bits. 373 */ 374 #ifdef _BIG_ENDIAN 375 shift = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY) * NBBY - 376 (m.ctm_offset % NBBY + e.cte_bits); 377 #else 378 shift = m.ctm_offset % NBBY; 379 #endif 380 fmask = (1ULL << e.cte_bits) - 1; 381 cmask = ~(fmask << shift); 382 383 instr = DIF_INSTR_LOAD( 384 dt_cg_load(dst, fp, m.ctm_type), dst->dn_reg, r1); 385 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 386 387 dt_cg_setx(dlp, r2, cmask); 388 instr = DIF_INSTR_FMT(DIF_OP_AND, r1, r2, r1); 389 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 390 391 dt_cg_setx(dlp, r2, fmask); 392 instr = DIF_INSTR_FMT(DIF_OP_AND, src->dn_reg, r2, r2); 393 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 394 395 dt_cg_setx(dlp, r3, shift); 396 instr = DIF_INSTR_FMT(DIF_OP_SLL, r2, r3, r2); 397 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 398 399 instr = DIF_INSTR_FMT(DIF_OP_OR, r1, r2, r1); 400 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 401 402 dt_regset_free(drp, r3); 403 dt_regset_free(drp, r2); 404 405 return (r1); 406 } 407 408 static void 409 dt_cg_store(dt_node_t *src, dt_irlist_t *dlp, dt_regset_t *drp, dt_node_t *dst) 410 { 411 ctf_encoding_t e; 412 dif_instr_t instr; 413 size_t size; 414 int reg; 415 416 /* 417 * If we're loading a bit-field, the size of our store is found by 418 * rounding dst's cte_bits up to a byte boundary and then finding the 419 * nearest power of two to this value (see clp2(), above). 420 */ 421 if ((dst->dn_flags & DT_NF_BITFIELD) && 422 ctf_type_encoding(dst->dn_ctfp, dst->dn_type, &e) != CTF_ERR) 423 size = clp2(P2ROUNDUP(e.cte_bits, NBBY) / NBBY); 424 else 425 size = dt_node_type_size(src); 426 427 if (src->dn_flags & DT_NF_REF) { 428 reg = dt_regset_alloc(drp); 429 dt_cg_setx(dlp, reg, size); 430 instr = DIF_INSTR_COPYS(src->dn_reg, reg, dst->dn_reg); 431 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 432 dt_regset_free(drp, reg); 433 } else { 434 if (dst->dn_flags & DT_NF_BITFIELD) 435 reg = dt_cg_field_set(src, dlp, drp, dst); 436 else 437 reg = src->dn_reg; 438 439 switch (size) { 440 case 1: 441 instr = DIF_INSTR_STORE(DIF_OP_STB, reg, dst->dn_reg); 442 break; 443 case 2: 444 instr = DIF_INSTR_STORE(DIF_OP_STH, reg, dst->dn_reg); 445 break; 446 case 4: 447 instr = DIF_INSTR_STORE(DIF_OP_STW, reg, dst->dn_reg); 448 break; 449 case 8: 450 instr = DIF_INSTR_STORE(DIF_OP_STX, reg, dst->dn_reg); 451 break; 452 default: 453 xyerror(D_UNKNOWN, "internal error -- cg cannot store " 454 "size %lu when passed by value\n", (ulong_t)size); 455 } 456 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 457 458 if (dst->dn_flags & DT_NF_BITFIELD) 459 dt_regset_free(drp, reg); 460 } 461 } 462 463 /* 464 * Generate code for a typecast or for argument promotion from the type of the 465 * actual to the type of the formal. We need to generate code for casts when 466 * a scalar type is being narrowed or changing signed-ness. We first shift the 467 * desired bits high (losing excess bits if narrowing) and then shift them down 468 * using logical shift (unsigned result) or arithmetic shift (signed result). 469 */ 470 static void 471 dt_cg_typecast(const dt_node_t *src, const dt_node_t *dst, 472 dt_irlist_t *dlp, dt_regset_t *drp) 473 { 474 size_t srcsize = dt_node_type_size(src); 475 size_t dstsize = dt_node_type_size(dst); 476 477 dif_instr_t instr; 478 int rg; 479 480 if (!dt_node_is_scalar(dst)) 481 return; /* not a scalar */ 482 if (dstsize == srcsize && 483 ((src->dn_flags ^ dst->dn_flags) & DT_NF_SIGNED) == 0) 484 return; /* not narrowing or changing signed-ness */ 485 if (dstsize > srcsize && (src->dn_flags & DT_NF_SIGNED) == 0) 486 return; /* nothing to do in this case */ 487 488 rg = dt_regset_alloc(drp); 489 490 if (dstsize > srcsize) { 491 int n = sizeof (uint64_t) * NBBY - srcsize * NBBY; 492 int s = (dstsize - srcsize) * NBBY; 493 494 dt_cg_setx(dlp, rg, n); 495 496 instr = DIF_INSTR_FMT(DIF_OP_SLL, src->dn_reg, rg, dst->dn_reg); 497 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 498 499 if ((dst->dn_flags & DT_NF_SIGNED) || n == s) { 500 instr = DIF_INSTR_FMT(DIF_OP_SRA, 501 dst->dn_reg, rg, dst->dn_reg); 502 dt_irlist_append(dlp, 503 dt_cg_node_alloc(DT_LBL_NONE, instr)); 504 } else { 505 dt_cg_setx(dlp, rg, s); 506 instr = DIF_INSTR_FMT(DIF_OP_SRA, 507 dst->dn_reg, rg, dst->dn_reg); 508 dt_irlist_append(dlp, 509 dt_cg_node_alloc(DT_LBL_NONE, instr)); 510 dt_cg_setx(dlp, rg, n - s); 511 instr = DIF_INSTR_FMT(DIF_OP_SRL, 512 dst->dn_reg, rg, dst->dn_reg); 513 dt_irlist_append(dlp, 514 dt_cg_node_alloc(DT_LBL_NONE, instr)); 515 } 516 } else if (dstsize != sizeof (uint64_t)) { 517 int n = sizeof (uint64_t) * NBBY - dstsize * NBBY; 518 519 dt_cg_setx(dlp, rg, n); 520 521 instr = DIF_INSTR_FMT(DIF_OP_SLL, src->dn_reg, rg, dst->dn_reg); 522 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 523 524 instr = DIF_INSTR_FMT((dst->dn_flags & DT_NF_SIGNED) ? 525 DIF_OP_SRA : DIF_OP_SRL, dst->dn_reg, rg, dst->dn_reg); 526 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 527 } 528 529 dt_regset_free(drp, rg); 530 } 531 532 /* 533 * Generate code to push the specified argument list on to the tuple stack. 534 * We use this routine for handling subroutine calls and associative arrays. 535 * We must first generate code for all subexpressions before loading the stack 536 * because any subexpression could itself require the use of the tuple stack. 537 * This holds a number of registers equal to the number of arguments, but this 538 * is not a huge problem because the number of arguments can't exceed the 539 * number of tuple register stack elements anyway. At most one extra register 540 * is required (either by dt_cg_typecast() or for dtdt_size, below). This 541 * implies that a DIF implementation should offer a number of general purpose 542 * registers at least one greater than the number of tuple registers. 543 */ 544 static void 545 dt_cg_arglist(dt_ident_t *idp, dt_node_t *args, 546 dt_irlist_t *dlp, dt_regset_t *drp) 547 { 548 const dt_idsig_t *isp = idp->di_data; 549 dt_node_t *dnp; 550 int i = 0; 551 552 for (dnp = args; dnp != NULL; dnp = dnp->dn_list) 553 dt_cg_node(dnp, dlp, drp); 554 555 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS)); 556 557 for (dnp = args; dnp != NULL; dnp = dnp->dn_list, i++) { 558 dtrace_diftype_t t; 559 dif_instr_t instr; 560 uint_t op; 561 int reg; 562 563 dt_node_diftype(yypcb->pcb_hdl, dnp, &t); 564 565 isp->dis_args[i].dn_reg = dnp->dn_reg; /* re-use register */ 566 dt_cg_typecast(dnp, &isp->dis_args[i], dlp, drp); 567 isp->dis_args[i].dn_reg = -1; 568 569 if (t.dtdt_flags & DIF_TF_BYREF) { 570 op = DIF_OP_PUSHTR; 571 if (t.dtdt_size != 0) { 572 reg = dt_regset_alloc(drp); 573 dt_cg_setx(dlp, reg, t.dtdt_size); 574 } else { 575 reg = DIF_REG_R0; 576 } 577 } else { 578 op = DIF_OP_PUSHTV; 579 reg = DIF_REG_R0; 580 } 581 582 instr = DIF_INSTR_PUSHTS(op, t.dtdt_kind, reg, dnp->dn_reg); 583 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 584 dt_regset_free(drp, dnp->dn_reg); 585 586 if (reg != DIF_REG_R0) 587 dt_regset_free(drp, reg); 588 } 589 590 if (i > yypcb->pcb_hdl->dt_conf.dtc_diftupregs) 591 longjmp(yypcb->pcb_jmpbuf, EDT_NOTUPREG); 592 } 593 594 static void 595 dt_cg_arithmetic_op(dt_node_t *dnp, dt_irlist_t *dlp, 596 dt_regset_t *drp, uint_t op) 597 { 598 int is_ptr_op = (dnp->dn_op == DT_TOK_ADD || dnp->dn_op == DT_TOK_SUB || 599 dnp->dn_op == DT_TOK_ADD_EQ || dnp->dn_op == DT_TOK_SUB_EQ); 600 601 int lp_is_ptr = dt_node_is_pointer(dnp->dn_left); 602 int rp_is_ptr = dt_node_is_pointer(dnp->dn_right); 603 604 dif_instr_t instr; 605 606 if (lp_is_ptr && rp_is_ptr) { 607 assert(dnp->dn_op == DT_TOK_SUB); 608 is_ptr_op = 0; 609 } 610 611 dt_cg_node(dnp->dn_left, dlp, drp); 612 if (is_ptr_op && rp_is_ptr) 613 dt_cg_ptrsize(dnp, dlp, drp, DIF_OP_MUL, dnp->dn_left->dn_reg); 614 615 dt_cg_node(dnp->dn_right, dlp, drp); 616 if (is_ptr_op && lp_is_ptr) 617 dt_cg_ptrsize(dnp, dlp, drp, DIF_OP_MUL, dnp->dn_right->dn_reg); 618 619 instr = DIF_INSTR_FMT(op, dnp->dn_left->dn_reg, 620 dnp->dn_right->dn_reg, dnp->dn_left->dn_reg); 621 622 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 623 dt_regset_free(drp, dnp->dn_right->dn_reg); 624 dnp->dn_reg = dnp->dn_left->dn_reg; 625 626 if (lp_is_ptr && rp_is_ptr) 627 dt_cg_ptrsize(dnp->dn_right, 628 dlp, drp, DIF_OP_UDIV, dnp->dn_reg); 629 } 630 631 static uint_t 632 dt_cg_stvar(const dt_ident_t *idp) 633 { 634 static const uint_t aops[] = { DIF_OP_STGAA, DIF_OP_STTAA, DIF_OP_NOP }; 635 static const uint_t sops[] = { DIF_OP_STGS, DIF_OP_STTS, DIF_OP_STLS }; 636 637 uint_t i = (((idp->di_flags & DT_IDFLG_LOCAL) != 0) << 1) | 638 ((idp->di_flags & DT_IDFLG_TLS) != 0); 639 640 return (idp->di_kind == DT_IDENT_ARRAY ? aops[i] : sops[i]); 641 } 642 643 static void 644 dt_cg_prearith_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, uint_t op) 645 { 646 ctf_file_t *ctfp = dnp->dn_ctfp; 647 dif_instr_t instr; 648 ctf_id_t type; 649 ssize_t size = 1; 650 int reg; 651 652 if (dt_node_is_pointer(dnp)) { 653 type = ctf_type_resolve(ctfp, dnp->dn_type); 654 assert(ctf_type_kind(ctfp, type) == CTF_K_POINTER); 655 size = ctf_type_size(ctfp, ctf_type_reference(ctfp, type)); 656 } 657 658 dt_cg_node(dnp->dn_child, dlp, drp); 659 dnp->dn_reg = dnp->dn_child->dn_reg; 660 661 reg = dt_regset_alloc(drp); 662 dt_cg_setx(dlp, reg, size); 663 664 instr = DIF_INSTR_FMT(op, dnp->dn_reg, reg, dnp->dn_reg); 665 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 666 dt_regset_free(drp, reg); 667 668 /* 669 * If we are modifying a variable, generate an stv instruction from 670 * the variable specified by the identifier. If we are storing to a 671 * memory address, generate code again for the left-hand side using 672 * DT_NF_REF to get the address, and then generate a store to it. 673 * In both paths, we store the value in dnp->dn_reg (the new value). 674 */ 675 if (dnp->dn_child->dn_kind == DT_NODE_VAR) { 676 dt_ident_t *idp = dt_ident_resolve(dnp->dn_child->dn_ident); 677 678 idp->di_flags |= DT_IDFLG_DIFW; 679 instr = DIF_INSTR_STV(dt_cg_stvar(idp), 680 idp->di_id, dnp->dn_reg); 681 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 682 } else { 683 uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF; 684 685 assert(dnp->dn_child->dn_flags & DT_NF_WRITABLE); 686 assert(dnp->dn_child->dn_flags & DT_NF_LVALUE); 687 688 dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */ 689 dt_cg_node(dnp->dn_child, dlp, drp); 690 691 dt_cg_store(dnp, dlp, drp, dnp->dn_child); 692 dt_regset_free(drp, dnp->dn_child->dn_reg); 693 694 dnp->dn_left->dn_flags &= ~DT_NF_REF; 695 dnp->dn_left->dn_flags |= rbit; 696 } 697 } 698 699 static void 700 dt_cg_postarith_op(dt_node_t *dnp, dt_irlist_t *dlp, 701 dt_regset_t *drp, uint_t op) 702 { 703 ctf_file_t *ctfp = dnp->dn_ctfp; 704 dif_instr_t instr; 705 ctf_id_t type; 706 ssize_t size = 1; 707 int nreg; 708 709 if (dt_node_is_pointer(dnp)) { 710 type = ctf_type_resolve(ctfp, dnp->dn_type); 711 assert(ctf_type_kind(ctfp, type) == CTF_K_POINTER); 712 size = ctf_type_size(ctfp, ctf_type_reference(ctfp, type)); 713 } 714 715 dt_cg_node(dnp->dn_child, dlp, drp); 716 dnp->dn_reg = dnp->dn_child->dn_reg; 717 718 nreg = dt_regset_alloc(drp); 719 dt_cg_setx(dlp, nreg, size); 720 instr = DIF_INSTR_FMT(op, dnp->dn_reg, nreg, nreg); 721 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 722 723 /* 724 * If we are modifying a variable, generate an stv instruction from 725 * the variable specified by the identifier. If we are storing to a 726 * memory address, generate code again for the left-hand side using 727 * DT_NF_REF to get the address, and then generate a store to it. 728 * In both paths, we store the value from 'nreg' (the new value). 729 */ 730 if (dnp->dn_child->dn_kind == DT_NODE_VAR) { 731 dt_ident_t *idp = dt_ident_resolve(dnp->dn_child->dn_ident); 732 733 idp->di_flags |= DT_IDFLG_DIFW; 734 instr = DIF_INSTR_STV(dt_cg_stvar(idp), idp->di_id, nreg); 735 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 736 } else { 737 uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF; 738 int oreg = dnp->dn_reg; 739 740 assert(dnp->dn_child->dn_flags & DT_NF_WRITABLE); 741 assert(dnp->dn_child->dn_flags & DT_NF_LVALUE); 742 743 dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */ 744 dt_cg_node(dnp->dn_child, dlp, drp); 745 746 dnp->dn_reg = nreg; 747 dt_cg_store(dnp, dlp, drp, dnp->dn_child); 748 dnp->dn_reg = oreg; 749 750 dt_regset_free(drp, dnp->dn_child->dn_reg); 751 dnp->dn_left->dn_flags &= ~DT_NF_REF; 752 dnp->dn_left->dn_flags |= rbit; 753 } 754 755 dt_regset_free(drp, nreg); 756 } 757 758 /* 759 * Determine if we should perform signed or unsigned comparison for an OP2. 760 * If both operands are of arithmetic type, perform the usual arithmetic 761 * conversions to determine the common real type for comparison [ISOC 6.5.8.3]. 762 */ 763 static int 764 dt_cg_compare_signed(dt_node_t *dnp) 765 { 766 dt_node_t dn; 767 768 if (dt_node_is_string(dnp->dn_left) || 769 dt_node_is_string(dnp->dn_right)) 770 return (1); /* strings always compare signed */ 771 else if (!dt_node_is_arith(dnp->dn_left) || 772 !dt_node_is_arith(dnp->dn_right)) 773 return (0); /* non-arithmetic types always compare unsigned */ 774 775 bzero(&dn, sizeof (dn)); 776 dt_node_promote(dnp->dn_left, dnp->dn_right, &dn); 777 return (dn.dn_flags & DT_NF_SIGNED); 778 } 779 780 static void 781 dt_cg_compare_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp, uint_t op) 782 { 783 uint_t lbl_true = dt_irlist_label(dlp); 784 uint_t lbl_post = dt_irlist_label(dlp); 785 786 dif_instr_t instr; 787 uint_t opc; 788 789 dt_cg_node(dnp->dn_left, dlp, drp); 790 dt_cg_node(dnp->dn_right, dlp, drp); 791 792 if (dt_node_is_string(dnp->dn_left) || dt_node_is_string(dnp->dn_right)) 793 opc = DIF_OP_SCMP; 794 else 795 opc = DIF_OP_CMP; 796 797 instr = DIF_INSTR_CMP(opc, dnp->dn_left->dn_reg, dnp->dn_right->dn_reg); 798 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 799 dt_regset_free(drp, dnp->dn_right->dn_reg); 800 dnp->dn_reg = dnp->dn_left->dn_reg; 801 802 instr = DIF_INSTR_BRANCH(op, lbl_true); 803 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 804 805 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg); 806 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 807 808 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post); 809 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 810 811 dt_cg_xsetx(dlp, NULL, lbl_true, dnp->dn_reg, 1); 812 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP)); 813 } 814 815 /* 816 * Code generation for the ternary op requires some trickery with the assembler 817 * in order to conserve registers. We generate code for dn_expr and dn_left 818 * and free their registers so they do not have be consumed across codegen for 819 * dn_right. We insert a dummy MOV at the end of dn_left into the destination 820 * register, which is not yet known because we haven't done dn_right yet, and 821 * save the pointer to this instruction node. We then generate code for 822 * dn_right and use its register as our output. Finally, we reach back and 823 * patch the instruction for dn_left to move its output into this register. 824 */ 825 static void 826 dt_cg_ternary_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 827 { 828 uint_t lbl_false = dt_irlist_label(dlp); 829 uint_t lbl_post = dt_irlist_label(dlp); 830 831 dif_instr_t instr; 832 dt_irnode_t *dip; 833 834 dt_cg_node(dnp->dn_expr, dlp, drp); 835 instr = DIF_INSTR_TST(dnp->dn_expr->dn_reg); 836 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 837 dt_regset_free(drp, dnp->dn_expr->dn_reg); 838 839 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false); 840 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 841 842 dt_cg_node(dnp->dn_left, dlp, drp); 843 instr = DIF_INSTR_MOV(dnp->dn_left->dn_reg, DIF_REG_R0); 844 dip = dt_cg_node_alloc(DT_LBL_NONE, instr); /* save dip for below */ 845 dt_irlist_append(dlp, dip); 846 dt_regset_free(drp, dnp->dn_left->dn_reg); 847 848 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post); 849 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 850 851 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, DIF_INSTR_NOP)); 852 dt_cg_node(dnp->dn_right, dlp, drp); 853 dnp->dn_reg = dnp->dn_right->dn_reg; 854 855 /* 856 * Now that dn_reg is assigned, reach back and patch the correct MOV 857 * instruction into the tail of dn_left. We know dn_reg was unused 858 * at that point because otherwise dn_right couldn't have allocated it. 859 */ 860 dip->di_instr = DIF_INSTR_MOV(dnp->dn_left->dn_reg, dnp->dn_reg); 861 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP)); 862 } 863 864 static void 865 dt_cg_logical_and(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 866 { 867 uint_t lbl_false = dt_irlist_label(dlp); 868 uint_t lbl_post = dt_irlist_label(dlp); 869 870 dif_instr_t instr; 871 872 dt_cg_node(dnp->dn_left, dlp, drp); 873 instr = DIF_INSTR_TST(dnp->dn_left->dn_reg); 874 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 875 dt_regset_free(drp, dnp->dn_left->dn_reg); 876 877 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false); 878 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 879 880 dt_cg_node(dnp->dn_right, dlp, drp); 881 instr = DIF_INSTR_TST(dnp->dn_right->dn_reg); 882 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 883 dnp->dn_reg = dnp->dn_right->dn_reg; 884 885 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false); 886 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 887 888 dt_cg_setx(dlp, dnp->dn_reg, 1); 889 890 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post); 891 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 892 893 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg); 894 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, instr)); 895 896 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP)); 897 } 898 899 static void 900 dt_cg_logical_xor(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 901 { 902 uint_t lbl_next = dt_irlist_label(dlp); 903 uint_t lbl_tail = dt_irlist_label(dlp); 904 905 dif_instr_t instr; 906 907 dt_cg_node(dnp->dn_left, dlp, drp); 908 instr = DIF_INSTR_TST(dnp->dn_left->dn_reg); 909 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 910 911 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_next); 912 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 913 dt_cg_setx(dlp, dnp->dn_left->dn_reg, 1); 914 915 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_next, DIF_INSTR_NOP)); 916 dt_cg_node(dnp->dn_right, dlp, drp); 917 918 instr = DIF_INSTR_TST(dnp->dn_right->dn_reg); 919 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 920 921 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_tail); 922 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 923 dt_cg_setx(dlp, dnp->dn_right->dn_reg, 1); 924 925 instr = DIF_INSTR_FMT(DIF_OP_XOR, dnp->dn_left->dn_reg, 926 dnp->dn_right->dn_reg, dnp->dn_left->dn_reg); 927 928 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_tail, instr)); 929 930 dt_regset_free(drp, dnp->dn_right->dn_reg); 931 dnp->dn_reg = dnp->dn_left->dn_reg; 932 } 933 934 static void 935 dt_cg_logical_or(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 936 { 937 uint_t lbl_true = dt_irlist_label(dlp); 938 uint_t lbl_false = dt_irlist_label(dlp); 939 uint_t lbl_post = dt_irlist_label(dlp); 940 941 dif_instr_t instr; 942 943 dt_cg_node(dnp->dn_left, dlp, drp); 944 instr = DIF_INSTR_TST(dnp->dn_left->dn_reg); 945 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 946 dt_regset_free(drp, dnp->dn_left->dn_reg); 947 948 instr = DIF_INSTR_BRANCH(DIF_OP_BNE, lbl_true); 949 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 950 951 dt_cg_node(dnp->dn_right, dlp, drp); 952 instr = DIF_INSTR_TST(dnp->dn_right->dn_reg); 953 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 954 dnp->dn_reg = dnp->dn_right->dn_reg; 955 956 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_false); 957 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 958 959 dt_cg_xsetx(dlp, NULL, lbl_true, dnp->dn_reg, 1); 960 961 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post); 962 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 963 964 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg); 965 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_false, instr)); 966 967 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP)); 968 } 969 970 static void 971 dt_cg_logical_neg(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 972 { 973 uint_t lbl_zero = dt_irlist_label(dlp); 974 uint_t lbl_post = dt_irlist_label(dlp); 975 976 dif_instr_t instr; 977 978 dt_cg_node(dnp->dn_child, dlp, drp); 979 dnp->dn_reg = dnp->dn_child->dn_reg; 980 981 instr = DIF_INSTR_TST(dnp->dn_reg); 982 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 983 984 instr = DIF_INSTR_BRANCH(DIF_OP_BE, lbl_zero); 985 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 986 987 instr = DIF_INSTR_MOV(DIF_REG_R0, dnp->dn_reg); 988 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 989 990 instr = DIF_INSTR_BRANCH(DIF_OP_BA, lbl_post); 991 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 992 993 dt_cg_xsetx(dlp, NULL, lbl_zero, dnp->dn_reg, 1); 994 dt_irlist_append(dlp, dt_cg_node_alloc(lbl_post, DIF_INSTR_NOP)); 995 } 996 997 static void 998 dt_cg_asgn_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 999 { 1000 dif_instr_t instr; 1001 dt_ident_t *idp; 1002 1003 /* 1004 * If we are performing a structure assignment of a translated type, 1005 * we must instantiate all members and create a snapshot of the object 1006 * in scratch space. We allocs a chunk of memory, generate code for 1007 * each member, and then set dnp->dn_reg to the scratch object address. 1008 */ 1009 if ((idp = dt_node_resolve(dnp->dn_right, DT_IDENT_XLSOU)) != NULL) { 1010 ctf_membinfo_t ctm; 1011 dt_xlator_t *dxp = idp->di_data; 1012 dt_node_t *mnp, dn, mn; 1013 int r1, r2; 1014 1015 /* 1016 * Create two fake dt_node_t's representing operator "." and a 1017 * right-hand identifier child node. These will be repeatedly 1018 * modified according to each instantiated member so that we 1019 * can pass them to dt_cg_store() and effect a member store. 1020 */ 1021 bzero(&dn, sizeof (dt_node_t)); 1022 dn.dn_kind = DT_NODE_OP2; 1023 dn.dn_op = DT_TOK_DOT; 1024 dn.dn_left = dnp; 1025 dn.dn_right = &mn; 1026 1027 bzero(&mn, sizeof (dt_node_t)); 1028 mn.dn_kind = DT_NODE_IDENT; 1029 mn.dn_op = DT_TOK_IDENT; 1030 1031 /* 1032 * Allocate a register for our scratch data pointer. First we 1033 * set it to the size of our data structure, and then replace 1034 * it with the result of an allocs of the specified size. 1035 */ 1036 r1 = dt_regset_alloc(drp); 1037 dt_cg_setx(dlp, r1, 1038 ctf_type_size(dxp->dx_dst_ctfp, dxp->dx_dst_base)); 1039 1040 instr = DIF_INSTR_ALLOCS(r1, r1); 1041 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1042 1043 /* 1044 * When dt_cg_asgn_op() is called, we have already generated 1045 * code for dnp->dn_right, which is the translator input. We 1046 * now associate this register with the translator's input 1047 * identifier so it can be referenced during our member loop. 1048 */ 1049 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG; 1050 dxp->dx_ident->di_id = dnp->dn_right->dn_reg; 1051 1052 for (mnp = dxp->dx_members; mnp != NULL; mnp = mnp->dn_list) { 1053 /* 1054 * Generate code for the translator member expression, 1055 * and then cast the result to the member type. 1056 */ 1057 dt_cg_node(mnp->dn_membexpr, dlp, drp); 1058 mnp->dn_reg = mnp->dn_membexpr->dn_reg; 1059 dt_cg_typecast(mnp->dn_membexpr, mnp, dlp, drp); 1060 1061 /* 1062 * Ask CTF for the offset of the member so we can store 1063 * to the appropriate offset. This call has already 1064 * been done once by the parser, so it should succeed. 1065 */ 1066 if (ctf_member_info(dxp->dx_dst_ctfp, dxp->dx_dst_base, 1067 mnp->dn_membname, &ctm) == CTF_ERR) { 1068 yypcb->pcb_hdl->dt_ctferr = 1069 ctf_errno(dxp->dx_dst_ctfp); 1070 longjmp(yypcb->pcb_jmpbuf, EDT_CTF); 1071 } 1072 1073 /* 1074 * If the destination member is at offset 0, store the 1075 * result directly to r1 (the scratch buffer address). 1076 * Otherwise allocate another temporary for the offset 1077 * and add r1 to it before storing the result. 1078 */ 1079 if (ctm.ctm_offset != 0) { 1080 r2 = dt_regset_alloc(drp); 1081 1082 /* 1083 * Add the member offset rounded down to the 1084 * nearest byte. If the offset was not aligned 1085 * on a byte boundary, this member is a bit- 1086 * field and dt_cg_store() will handle masking. 1087 */ 1088 dt_cg_setx(dlp, r2, ctm.ctm_offset / NBBY); 1089 instr = DIF_INSTR_FMT(DIF_OP_ADD, r1, r2, r2); 1090 dt_irlist_append(dlp, 1091 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1092 1093 dt_node_type_propagate(mnp, &dn); 1094 dn.dn_right->dn_string = mnp->dn_membname; 1095 dn.dn_reg = r2; 1096 1097 dt_cg_store(mnp, dlp, drp, &dn); 1098 dt_regset_free(drp, r2); 1099 1100 } else { 1101 dt_node_type_propagate(mnp, &dn); 1102 dn.dn_right->dn_string = mnp->dn_membname; 1103 dn.dn_reg = r1; 1104 1105 dt_cg_store(mnp, dlp, drp, &dn); 1106 } 1107 1108 dt_regset_free(drp, mnp->dn_reg); 1109 } 1110 1111 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG; 1112 dxp->dx_ident->di_id = 0; 1113 1114 if (dnp->dn_right->dn_reg != -1) 1115 dt_regset_free(drp, dnp->dn_right->dn_reg); 1116 1117 assert(dnp->dn_reg == dnp->dn_right->dn_reg); 1118 dnp->dn_reg = r1; 1119 } 1120 1121 /* 1122 * If we are storing to a memory address, generate code again for the 1123 * left-hand side using DT_NF_REF to get the address, and then generate 1124 * a store to it. 1125 * 1126 * Both here and the other variable-store paths, we assume dnp->dn_reg 1127 * already has the new value. 1128 */ 1129 if (dnp->dn_left->dn_kind != DT_NODE_VAR) { 1130 uint_t rbit = dnp->dn_left->dn_flags & DT_NF_REF; 1131 1132 assert(dnp->dn_left->dn_flags & DT_NF_WRITABLE); 1133 assert(dnp->dn_left->dn_flags & DT_NF_LVALUE); 1134 1135 dnp->dn_left->dn_flags |= DT_NF_REF; /* force pass-by-ref */ 1136 1137 dt_cg_node(dnp->dn_left, dlp, drp); 1138 dt_cg_store(dnp, dlp, drp, dnp->dn_left); 1139 dt_regset_free(drp, dnp->dn_left->dn_reg); 1140 1141 dnp->dn_left->dn_flags &= ~DT_NF_REF; 1142 dnp->dn_left->dn_flags |= rbit; 1143 return; 1144 } 1145 1146 idp = dt_ident_resolve(dnp->dn_left->dn_ident); 1147 idp->di_flags |= DT_IDFLG_DIFW; 1148 1149 /* 1150 * Storing to an array variable is a special case. 1151 * Only 'uregs[]' supports this for the time being. 1152 */ 1153 if (idp->di_kind == DT_IDENT_ARRAY && 1154 idp->di_id <= DIF_VAR_ARRAY_MAX) { 1155 dt_node_t *idx = dnp->dn_left->dn_args; 1156 1157 dt_cg_node(idx, dlp, drp); 1158 instr = DIF_INSTR_FMT(DIF_OP_STGA, idp->di_id, idx->dn_reg, 1159 dnp->dn_reg); 1160 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1161 dt_regset_free(drp, idx->dn_reg); 1162 return; 1163 } 1164 1165 if (idp->di_kind == DT_IDENT_ARRAY) 1166 dt_cg_arglist(idp, dnp->dn_left->dn_args, dlp, drp); 1167 1168 instr = DIF_INSTR_STV(dt_cg_stvar(idp), idp->di_id, dnp->dn_reg); 1169 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1170 } 1171 1172 static void 1173 dt_cg_assoc_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 1174 { 1175 dif_instr_t instr; 1176 uint_t op; 1177 1178 assert(dnp->dn_kind == DT_NODE_VAR); 1179 assert(!(dnp->dn_ident->di_flags & DT_IDFLG_LOCAL)); 1180 assert(dnp->dn_args != NULL); 1181 1182 dt_cg_arglist(dnp->dn_ident, dnp->dn_args, dlp, drp); 1183 1184 dnp->dn_reg = dt_regset_alloc(drp); 1185 1186 if (dnp->dn_ident->di_flags & DT_IDFLG_TLS) 1187 op = DIF_OP_LDTAA; 1188 else 1189 op = DIF_OP_LDGAA; 1190 1191 dnp->dn_ident->di_flags |= DT_IDFLG_DIFR; 1192 instr = DIF_INSTR_LDV(op, dnp->dn_ident->di_id, dnp->dn_reg); 1193 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1194 1195 /* 1196 * If the associative array is a pass-by-reference type, then we are 1197 * loading its value as a pointer to either load or store through it. 1198 * The array element in question may not have been faulted in yet, in 1199 * which case DIF_OP_LD*AA will return zero. We append an epilogue 1200 * of instructions similar to the following: 1201 * 1202 * ld?aa id, %r1 ! base ld?aa instruction above 1203 * tst %r1 ! start of epilogue 1204 * +--- bne label 1205 * | setx size, %r1 1206 * | allocs %r1, %r1 1207 * | st?aa id, %r1 1208 * | ld?aa id, %r1 1209 * v 1210 * label: < rest of code > 1211 * 1212 * The idea is that we allocs a zero-filled chunk of scratch space and 1213 * do a DIF_OP_ST*AA to fault in and initialize the array element, and 1214 * then reload it to get the faulted-in address of the new variable 1215 * storage. This isn't cheap, but pass-by-ref associative array values 1216 * are (thus far) uncommon and the allocs cost only occurs once. If 1217 * this path becomes important to DTrace users, we can improve things 1218 * by adding a new DIF opcode to fault in associative array elements. 1219 */ 1220 if (dnp->dn_flags & DT_NF_REF) { 1221 uint_t stvop = op == DIF_OP_LDTAA ? DIF_OP_STTAA : DIF_OP_STGAA; 1222 uint_t label = dt_irlist_label(dlp); 1223 1224 instr = DIF_INSTR_TST(dnp->dn_reg); 1225 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1226 1227 instr = DIF_INSTR_BRANCH(DIF_OP_BNE, label); 1228 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1229 1230 dt_cg_setx(dlp, dnp->dn_reg, dt_node_type_size(dnp)); 1231 instr = DIF_INSTR_ALLOCS(dnp->dn_reg, dnp->dn_reg); 1232 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1233 1234 dnp->dn_ident->di_flags |= DT_IDFLG_DIFW; 1235 instr = DIF_INSTR_STV(stvop, dnp->dn_ident->di_id, dnp->dn_reg); 1236 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1237 1238 instr = DIF_INSTR_LDV(op, dnp->dn_ident->di_id, dnp->dn_reg); 1239 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1240 1241 dt_irlist_append(dlp, dt_cg_node_alloc(label, DIF_INSTR_NOP)); 1242 } 1243 } 1244 1245 static void 1246 dt_cg_array_op(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 1247 { 1248 dt_probe_t *prp = yypcb->pcb_probe; 1249 uintmax_t saved = dnp->dn_args->dn_value; 1250 dt_ident_t *idp = dnp->dn_ident; 1251 1252 dif_instr_t instr; 1253 uint_t op; 1254 size_t size; 1255 int reg, n; 1256 1257 assert(dnp->dn_kind == DT_NODE_VAR); 1258 assert(!(idp->di_flags & DT_IDFLG_LOCAL)); 1259 1260 assert(dnp->dn_args->dn_kind == DT_NODE_INT); 1261 assert(dnp->dn_args->dn_list == NULL); 1262 1263 /* 1264 * If this is a reference in the args[] array, temporarily modify the 1265 * array index according to the static argument mapping (if any), 1266 * unless the argument reference is provided by a dynamic translator. 1267 * If we're using a dynamic translator for args[], then just set dn_reg 1268 * to an invalid reg and return: DIF_OP_XLARG will fetch the arg later. 1269 */ 1270 if (idp->di_id == DIF_VAR_ARGS) { 1271 if ((idp->di_kind == DT_IDENT_XLPTR || 1272 idp->di_kind == DT_IDENT_XLSOU) && 1273 dt_xlator_dynamic(idp->di_data)) { 1274 dnp->dn_reg = -1; 1275 return; 1276 } 1277 dnp->dn_args->dn_value = prp->pr_mapping[saved]; 1278 } 1279 1280 dt_cg_node(dnp->dn_args, dlp, drp); 1281 dnp->dn_args->dn_value = saved; 1282 1283 dnp->dn_reg = dnp->dn_args->dn_reg; 1284 1285 if (idp->di_flags & DT_IDFLG_TLS) 1286 op = DIF_OP_LDTA; 1287 else 1288 op = DIF_OP_LDGA; 1289 1290 idp->di_flags |= DT_IDFLG_DIFR; 1291 1292 instr = DIF_INSTR_LDA(op, idp->di_id, 1293 dnp->dn_args->dn_reg, dnp->dn_reg); 1294 1295 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1296 1297 /* 1298 * If this is a reference to the args[] array, we need to take the 1299 * additional step of explicitly eliminating any bits larger than the 1300 * type size: the DIF interpreter in the kernel will always give us 1301 * the raw (64-bit) argument value, and any bits larger than the type 1302 * size may be junk. As a practical matter, this arises only on 64-bit 1303 * architectures and only when the argument index is larger than the 1304 * number of arguments passed directly to DTrace: if a 8-, 16- or 1305 * 32-bit argument must be retrieved from the stack, it is possible 1306 * (and it some cases, likely) that the upper bits will be garbage. 1307 */ 1308 if (idp->di_id != DIF_VAR_ARGS || !dt_node_is_scalar(dnp)) 1309 return; 1310 1311 if ((size = dt_node_type_size(dnp)) == sizeof (uint64_t)) 1312 return; 1313 1314 reg = dt_regset_alloc(drp); 1315 assert(size < sizeof (uint64_t)); 1316 n = sizeof (uint64_t) * NBBY - size * NBBY; 1317 1318 dt_cg_setx(dlp, reg, n); 1319 1320 instr = DIF_INSTR_FMT(DIF_OP_SLL, dnp->dn_reg, reg, dnp->dn_reg); 1321 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1322 1323 instr = DIF_INSTR_FMT((dnp->dn_flags & DT_NF_SIGNED) ? 1324 DIF_OP_SRA : DIF_OP_SRL, dnp->dn_reg, reg, dnp->dn_reg); 1325 1326 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1327 dt_regset_free(drp, reg); 1328 } 1329 1330 /* 1331 * Generate code for an inlined variable reference. Inlines can be used to 1332 * define either scalar or associative array substitutions. For scalars, we 1333 * simply generate code for the parse tree saved in the identifier's din_root, 1334 * and then cast the resulting expression to the inline's declaration type. 1335 * For arrays, we take the input parameter subtrees from dnp->dn_args and 1336 * temporarily store them in the din_root of each din_argv[i] identifier, 1337 * which are themselves inlines and were set up for us by the parser. The 1338 * result is that any reference to the inlined parameter inside the top-level 1339 * din_root will turn into a recursive call to dt_cg_inline() for a scalar 1340 * inline whose din_root will refer to the subtree pointed to by the argument. 1341 */ 1342 static void 1343 dt_cg_inline(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 1344 { 1345 dt_ident_t *idp = dnp->dn_ident; 1346 dt_idnode_t *inp = idp->di_iarg; 1347 1348 dt_idnode_t *pinp; 1349 dt_node_t *pnp; 1350 int i; 1351 1352 assert(idp->di_flags & DT_IDFLG_INLINE); 1353 assert(idp->di_ops == &dt_idops_inline); 1354 1355 if (idp->di_kind == DT_IDENT_ARRAY) { 1356 for (i = 0, pnp = dnp->dn_args; 1357 pnp != NULL; pnp = pnp->dn_list, i++) { 1358 if (inp->din_argv[i] != NULL) { 1359 pinp = inp->din_argv[i]->di_iarg; 1360 pinp->din_root = pnp; 1361 } 1362 } 1363 } 1364 1365 dt_cg_node(inp->din_root, dlp, drp); 1366 dnp->dn_reg = inp->din_root->dn_reg; 1367 dt_cg_typecast(inp->din_root, dnp, dlp, drp); 1368 1369 if (idp->di_kind == DT_IDENT_ARRAY) { 1370 for (i = 0; i < inp->din_argc; i++) { 1371 pinp = inp->din_argv[i]->di_iarg; 1372 pinp->din_root = NULL; 1373 } 1374 } 1375 } 1376 1377 typedef struct dt_xlmemb { 1378 dt_ident_t *dtxl_idp; /* translated ident */ 1379 dt_irlist_t *dtxl_dlp; /* instruction list */ 1380 dt_regset_t *dtxl_drp; /* register set */ 1381 int dtxl_sreg; /* location of the translation input */ 1382 int dtxl_dreg; /* location of our allocated buffer */ 1383 } dt_xlmemb_t; 1384 1385 /*ARGSUSED*/ 1386 static int 1387 dt_cg_xlate_member(const char *name, ctf_id_t type, ulong_t off, void *arg) 1388 { 1389 dt_xlmemb_t *dx = arg; 1390 dt_ident_t *idp = dx->dtxl_idp; 1391 dt_irlist_t *dlp = dx->dtxl_dlp; 1392 dt_regset_t *drp = dx->dtxl_drp; 1393 1394 dt_node_t *mnp; 1395 dt_xlator_t *dxp; 1396 1397 int reg, treg; 1398 uint32_t instr; 1399 size_t size; 1400 1401 /* Generate code for the translation. */ 1402 dxp = idp->di_data; 1403 mnp = dt_xlator_member(dxp, name); 1404 1405 /* If there's no translator for the given member, skip it. */ 1406 if (mnp == NULL) 1407 return (0); 1408 1409 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG; 1410 dxp->dx_ident->di_id = dx->dtxl_sreg; 1411 1412 dt_cg_node(mnp->dn_membexpr, dlp, drp); 1413 1414 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG; 1415 dxp->dx_ident->di_id = 0; 1416 1417 treg = mnp->dn_membexpr->dn_reg; 1418 1419 /* Compute the offset into our buffer and store the result there. */ 1420 reg = dt_regset_alloc(drp); 1421 1422 dt_cg_setx(dlp, reg, off / NBBY); 1423 instr = DIF_INSTR_FMT(DIF_OP_ADD, dx->dtxl_dreg, reg, reg); 1424 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1425 1426 size = ctf_type_size(mnp->dn_membexpr->dn_ctfp, 1427 mnp->dn_membexpr->dn_type); 1428 if (dt_node_is_scalar(mnp->dn_membexpr)) { 1429 /* 1430 * Copying scalars is simple. 1431 */ 1432 switch (size) { 1433 case 1: 1434 instr = DIF_INSTR_STORE(DIF_OP_STB, treg, reg); 1435 break; 1436 case 2: 1437 instr = DIF_INSTR_STORE(DIF_OP_STH, treg, reg); 1438 break; 1439 case 4: 1440 instr = DIF_INSTR_STORE(DIF_OP_STW, treg, reg); 1441 break; 1442 case 8: 1443 instr = DIF_INSTR_STORE(DIF_OP_STX, treg, reg); 1444 break; 1445 default: 1446 xyerror(D_UNKNOWN, "internal error -- unexpected " 1447 "size: %lu\n", (ulong_t)size); 1448 } 1449 1450 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1451 1452 } else if (dt_node_is_string(mnp->dn_membexpr)) { 1453 int szreg; 1454 1455 /* 1456 * Use the copys instruction for strings. 1457 */ 1458 szreg = dt_regset_alloc(drp); 1459 dt_cg_setx(dlp, szreg, size); 1460 instr = DIF_INSTR_COPYS(treg, szreg, reg); 1461 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1462 dt_regset_free(drp, szreg); 1463 } else { 1464 int szreg; 1465 1466 /* 1467 * If it's anything else then we'll just bcopy it. 1468 */ 1469 szreg = dt_regset_alloc(drp); 1470 dt_cg_setx(dlp, szreg, size); 1471 dt_irlist_append(dlp, 1472 dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS)); 1473 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF, 1474 DIF_REG_R0, treg); 1475 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1476 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF, 1477 DIF_REG_R0, reg); 1478 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1479 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF, 1480 DIF_REG_R0, szreg); 1481 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1482 instr = DIF_INSTR_CALL(DIF_SUBR_BCOPY, szreg); 1483 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1484 dt_regset_free(drp, szreg); 1485 } 1486 1487 dt_regset_free(drp, reg); 1488 dt_regset_free(drp, treg); 1489 1490 return (0); 1491 } 1492 1493 /* 1494 * If we're expanding a translated type, we create an appropriately sized 1495 * buffer with alloca() and then translate each member into it. 1496 */ 1497 static int 1498 dt_cg_xlate_expand(dt_node_t *dnp, dt_ident_t *idp, dt_irlist_t *dlp, 1499 dt_regset_t *drp) 1500 { 1501 dt_xlmemb_t dlm; 1502 uint32_t instr; 1503 int dreg; 1504 size_t size; 1505 1506 dreg = dt_regset_alloc(drp); 1507 size = ctf_type_size(dnp->dn_ident->di_ctfp, dnp->dn_ident->di_type); 1508 1509 /* Call alloca() to create the buffer. */ 1510 dt_cg_setx(dlp, dreg, size); 1511 1512 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, DIF_INSTR_FLUSHTS)); 1513 1514 instr = DIF_INSTR_PUSHTS(DIF_OP_PUSHTV, DIF_TYPE_CTF, DIF_REG_R0, dreg); 1515 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1516 1517 instr = DIF_INSTR_CALL(DIF_SUBR_ALLOCA, dreg); 1518 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1519 1520 /* Generate the translation for each member. */ 1521 dlm.dtxl_idp = idp; 1522 dlm.dtxl_dlp = dlp; 1523 dlm.dtxl_drp = drp; 1524 dlm.dtxl_sreg = dnp->dn_reg; 1525 dlm.dtxl_dreg = dreg; 1526 (void) ctf_member_iter(dnp->dn_ident->di_ctfp, 1527 dnp->dn_ident->di_type, dt_cg_xlate_member, 1528 &dlm); 1529 1530 return (dreg); 1531 } 1532 1533 static void 1534 dt_cg_node(dt_node_t *dnp, dt_irlist_t *dlp, dt_regset_t *drp) 1535 { 1536 ctf_file_t *ctfp = dnp->dn_ctfp; 1537 ctf_file_t *octfp; 1538 ctf_membinfo_t m; 1539 ctf_id_t type; 1540 1541 dif_instr_t instr; 1542 dt_ident_t *idp; 1543 ssize_t stroff; 1544 uint_t op; 1545 1546 switch (dnp->dn_op) { 1547 case DT_TOK_COMMA: 1548 dt_cg_node(dnp->dn_left, dlp, drp); 1549 dt_regset_free(drp, dnp->dn_left->dn_reg); 1550 dt_cg_node(dnp->dn_right, dlp, drp); 1551 dnp->dn_reg = dnp->dn_right->dn_reg; 1552 break; 1553 1554 case DT_TOK_ASGN: 1555 dt_cg_node(dnp->dn_right, dlp, drp); 1556 dnp->dn_reg = dnp->dn_right->dn_reg; 1557 dt_cg_asgn_op(dnp, dlp, drp); 1558 break; 1559 1560 case DT_TOK_ADD_EQ: 1561 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_ADD); 1562 dt_cg_asgn_op(dnp, dlp, drp); 1563 break; 1564 1565 case DT_TOK_SUB_EQ: 1566 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SUB); 1567 dt_cg_asgn_op(dnp, dlp, drp); 1568 break; 1569 1570 case DT_TOK_MUL_EQ: 1571 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_MUL); 1572 dt_cg_asgn_op(dnp, dlp, drp); 1573 break; 1574 1575 case DT_TOK_DIV_EQ: 1576 dt_cg_arithmetic_op(dnp, dlp, drp, 1577 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SDIV : DIF_OP_UDIV); 1578 dt_cg_asgn_op(dnp, dlp, drp); 1579 break; 1580 1581 case DT_TOK_MOD_EQ: 1582 dt_cg_arithmetic_op(dnp, dlp, drp, 1583 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SREM : DIF_OP_UREM); 1584 dt_cg_asgn_op(dnp, dlp, drp); 1585 break; 1586 1587 case DT_TOK_AND_EQ: 1588 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_AND); 1589 dt_cg_asgn_op(dnp, dlp, drp); 1590 break; 1591 1592 case DT_TOK_XOR_EQ: 1593 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_XOR); 1594 dt_cg_asgn_op(dnp, dlp, drp); 1595 break; 1596 1597 case DT_TOK_OR_EQ: 1598 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_OR); 1599 dt_cg_asgn_op(dnp, dlp, drp); 1600 break; 1601 1602 case DT_TOK_LSH_EQ: 1603 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SLL); 1604 dt_cg_asgn_op(dnp, dlp, drp); 1605 break; 1606 1607 case DT_TOK_RSH_EQ: 1608 dt_cg_arithmetic_op(dnp, dlp, drp, 1609 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SRA : DIF_OP_SRL); 1610 dt_cg_asgn_op(dnp, dlp, drp); 1611 break; 1612 1613 case DT_TOK_QUESTION: 1614 dt_cg_ternary_op(dnp, dlp, drp); 1615 break; 1616 1617 case DT_TOK_LOR: 1618 dt_cg_logical_or(dnp, dlp, drp); 1619 break; 1620 1621 case DT_TOK_LXOR: 1622 dt_cg_logical_xor(dnp, dlp, drp); 1623 break; 1624 1625 case DT_TOK_LAND: 1626 dt_cg_logical_and(dnp, dlp, drp); 1627 break; 1628 1629 case DT_TOK_BOR: 1630 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_OR); 1631 break; 1632 1633 case DT_TOK_XOR: 1634 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_XOR); 1635 break; 1636 1637 case DT_TOK_BAND: 1638 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_AND); 1639 break; 1640 1641 case DT_TOK_EQU: 1642 dt_cg_compare_op(dnp, dlp, drp, DIF_OP_BE); 1643 break; 1644 1645 case DT_TOK_NEQ: 1646 dt_cg_compare_op(dnp, dlp, drp, DIF_OP_BNE); 1647 break; 1648 1649 case DT_TOK_LT: 1650 dt_cg_compare_op(dnp, dlp, drp, 1651 dt_cg_compare_signed(dnp) ? DIF_OP_BL : DIF_OP_BLU); 1652 break; 1653 1654 case DT_TOK_LE: 1655 dt_cg_compare_op(dnp, dlp, drp, 1656 dt_cg_compare_signed(dnp) ? DIF_OP_BLE : DIF_OP_BLEU); 1657 break; 1658 1659 case DT_TOK_GT: 1660 dt_cg_compare_op(dnp, dlp, drp, 1661 dt_cg_compare_signed(dnp) ? DIF_OP_BG : DIF_OP_BGU); 1662 break; 1663 1664 case DT_TOK_GE: 1665 dt_cg_compare_op(dnp, dlp, drp, 1666 dt_cg_compare_signed(dnp) ? DIF_OP_BGE : DIF_OP_BGEU); 1667 break; 1668 1669 case DT_TOK_LSH: 1670 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SLL); 1671 break; 1672 1673 case DT_TOK_RSH: 1674 dt_cg_arithmetic_op(dnp, dlp, drp, 1675 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SRA : DIF_OP_SRL); 1676 break; 1677 1678 case DT_TOK_ADD: 1679 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_ADD); 1680 break; 1681 1682 case DT_TOK_SUB: 1683 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_SUB); 1684 break; 1685 1686 case DT_TOK_MUL: 1687 dt_cg_arithmetic_op(dnp, dlp, drp, DIF_OP_MUL); 1688 break; 1689 1690 case DT_TOK_DIV: 1691 dt_cg_arithmetic_op(dnp, dlp, drp, 1692 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SDIV : DIF_OP_UDIV); 1693 break; 1694 1695 case DT_TOK_MOD: 1696 dt_cg_arithmetic_op(dnp, dlp, drp, 1697 (dnp->dn_flags & DT_NF_SIGNED) ? DIF_OP_SREM : DIF_OP_UREM); 1698 break; 1699 1700 case DT_TOK_LNEG: 1701 dt_cg_logical_neg(dnp, dlp, drp); 1702 break; 1703 1704 case DT_TOK_BNEG: 1705 dt_cg_node(dnp->dn_child, dlp, drp); 1706 dnp->dn_reg = dnp->dn_child->dn_reg; 1707 instr = DIF_INSTR_NOT(dnp->dn_reg, dnp->dn_reg); 1708 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1709 break; 1710 1711 case DT_TOK_PREINC: 1712 dt_cg_prearith_op(dnp, dlp, drp, DIF_OP_ADD); 1713 break; 1714 1715 case DT_TOK_POSTINC: 1716 dt_cg_postarith_op(dnp, dlp, drp, DIF_OP_ADD); 1717 break; 1718 1719 case DT_TOK_PREDEC: 1720 dt_cg_prearith_op(dnp, dlp, drp, DIF_OP_SUB); 1721 break; 1722 1723 case DT_TOK_POSTDEC: 1724 dt_cg_postarith_op(dnp, dlp, drp, DIF_OP_SUB); 1725 break; 1726 1727 case DT_TOK_IPOS: 1728 dt_cg_node(dnp->dn_child, dlp, drp); 1729 dnp->dn_reg = dnp->dn_child->dn_reg; 1730 break; 1731 1732 case DT_TOK_INEG: 1733 dt_cg_node(dnp->dn_child, dlp, drp); 1734 dnp->dn_reg = dnp->dn_child->dn_reg; 1735 1736 instr = DIF_INSTR_FMT(DIF_OP_SUB, DIF_REG_R0, 1737 dnp->dn_reg, dnp->dn_reg); 1738 1739 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1740 break; 1741 1742 case DT_TOK_DEREF: 1743 dt_cg_node(dnp->dn_child, dlp, drp); 1744 dnp->dn_reg = dnp->dn_child->dn_reg; 1745 1746 if (dt_node_is_dynamic(dnp->dn_child)) { 1747 int reg; 1748 idp = dt_node_resolve(dnp->dn_child, DT_IDENT_XLPTR); 1749 assert(idp != NULL); 1750 reg = dt_cg_xlate_expand(dnp, idp, dlp, drp); 1751 1752 dt_regset_free(drp, dnp->dn_child->dn_reg); 1753 dnp->dn_reg = reg; 1754 1755 } else if (!(dnp->dn_flags & DT_NF_REF)) { 1756 uint_t ubit = dnp->dn_flags & DT_NF_USERLAND; 1757 1758 /* 1759 * Save and restore DT_NF_USERLAND across dt_cg_load(): 1760 * we need the sign bit from dnp and the user bit from 1761 * dnp->dn_child in order to get the proper opcode. 1762 */ 1763 dnp->dn_flags |= 1764 (dnp->dn_child->dn_flags & DT_NF_USERLAND); 1765 1766 instr = DIF_INSTR_LOAD(dt_cg_load(dnp, ctfp, 1767 dnp->dn_type), dnp->dn_reg, dnp->dn_reg); 1768 1769 dnp->dn_flags &= ~DT_NF_USERLAND; 1770 dnp->dn_flags |= ubit; 1771 1772 dt_irlist_append(dlp, 1773 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1774 } 1775 break; 1776 1777 case DT_TOK_ADDROF: { 1778 uint_t rbit = dnp->dn_child->dn_flags & DT_NF_REF; 1779 1780 dnp->dn_child->dn_flags |= DT_NF_REF; /* force pass-by-ref */ 1781 dt_cg_node(dnp->dn_child, dlp, drp); 1782 dnp->dn_reg = dnp->dn_child->dn_reg; 1783 1784 dnp->dn_child->dn_flags &= ~DT_NF_REF; 1785 dnp->dn_child->dn_flags |= rbit; 1786 break; 1787 } 1788 1789 case DT_TOK_SIZEOF: { 1790 size_t size = dt_node_sizeof(dnp->dn_child); 1791 dnp->dn_reg = dt_regset_alloc(drp); 1792 assert(size != 0); 1793 dt_cg_setx(dlp, dnp->dn_reg, size); 1794 break; 1795 } 1796 1797 case DT_TOK_STRINGOF: 1798 dt_cg_node(dnp->dn_child, dlp, drp); 1799 dnp->dn_reg = dnp->dn_child->dn_reg; 1800 break; 1801 1802 case DT_TOK_XLATE: 1803 /* 1804 * An xlate operator appears in either an XLATOR, indicating a 1805 * reference to a dynamic translator, or an OP2, indicating 1806 * use of the xlate operator in the user's program. For the 1807 * dynamic case, generate an xlate opcode with a reference to 1808 * the corresponding member, pre-computed for us in dn_members. 1809 */ 1810 if (dnp->dn_kind == DT_NODE_XLATOR) { 1811 dt_xlator_t *dxp = dnp->dn_xlator; 1812 1813 assert(dxp->dx_ident->di_flags & DT_IDFLG_CGREG); 1814 assert(dxp->dx_ident->di_id != 0); 1815 1816 dnp->dn_reg = dt_regset_alloc(drp); 1817 1818 if (dxp->dx_arg == -1) { 1819 instr = DIF_INSTR_MOV( 1820 dxp->dx_ident->di_id, dnp->dn_reg); 1821 dt_irlist_append(dlp, 1822 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1823 op = DIF_OP_XLATE; 1824 } else 1825 op = DIF_OP_XLARG; 1826 1827 instr = DIF_INSTR_XLATE(op, 0, dnp->dn_reg); 1828 dt_irlist_append(dlp, 1829 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1830 1831 dlp->dl_last->di_extern = dnp->dn_xmember; 1832 break; 1833 } 1834 1835 assert(dnp->dn_kind == DT_NODE_OP2); 1836 dt_cg_node(dnp->dn_right, dlp, drp); 1837 dnp->dn_reg = dnp->dn_right->dn_reg; 1838 break; 1839 1840 case DT_TOK_LPAR: 1841 dt_cg_node(dnp->dn_right, dlp, drp); 1842 dnp->dn_reg = dnp->dn_right->dn_reg; 1843 dt_cg_typecast(dnp->dn_right, dnp, dlp, drp); 1844 break; 1845 1846 case DT_TOK_PTR: 1847 case DT_TOK_DOT: 1848 assert(dnp->dn_right->dn_kind == DT_NODE_IDENT); 1849 dt_cg_node(dnp->dn_left, dlp, drp); 1850 1851 /* 1852 * If the left-hand side of PTR or DOT is a dynamic variable, 1853 * we expect it to be the output of a D translator. In this 1854 * case, we look up the parse tree corresponding to the member 1855 * that is being accessed and run the code generator over it. 1856 * We then cast the result as if by the assignment operator. 1857 */ 1858 if ((idp = dt_node_resolve( 1859 dnp->dn_left, DT_IDENT_XLSOU)) != NULL || 1860 (idp = dt_node_resolve( 1861 dnp->dn_left, DT_IDENT_XLPTR)) != NULL) { 1862 1863 dt_xlator_t *dxp; 1864 dt_node_t *mnp; 1865 1866 dxp = idp->di_data; 1867 mnp = dt_xlator_member(dxp, dnp->dn_right->dn_string); 1868 assert(mnp != NULL); 1869 1870 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG; 1871 dxp->dx_ident->di_id = dnp->dn_left->dn_reg; 1872 1873 dt_cg_node(mnp->dn_membexpr, dlp, drp); 1874 dnp->dn_reg = mnp->dn_membexpr->dn_reg; 1875 dt_cg_typecast(mnp->dn_membexpr, dnp, dlp, drp); 1876 1877 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG; 1878 dxp->dx_ident->di_id = 0; 1879 1880 if (dnp->dn_left->dn_reg != -1) 1881 dt_regset_free(drp, dnp->dn_left->dn_reg); 1882 break; 1883 } 1884 1885 ctfp = dnp->dn_left->dn_ctfp; 1886 type = ctf_type_resolve(ctfp, dnp->dn_left->dn_type); 1887 1888 if (dnp->dn_op == DT_TOK_PTR) { 1889 type = ctf_type_reference(ctfp, type); 1890 type = ctf_type_resolve(ctfp, type); 1891 } 1892 1893 if ((ctfp = dt_cg_membinfo(octfp = ctfp, type, 1894 dnp->dn_right->dn_string, &m)) == NULL) { 1895 yypcb->pcb_hdl->dt_ctferr = ctf_errno(octfp); 1896 longjmp(yypcb->pcb_jmpbuf, EDT_CTF); 1897 } 1898 1899 if (m.ctm_offset != 0) { 1900 int reg; 1901 1902 reg = dt_regset_alloc(drp); 1903 1904 /* 1905 * If the offset is not aligned on a byte boundary, it 1906 * is a bit-field member and we will extract the value 1907 * bits below after we generate the appropriate load. 1908 */ 1909 dt_cg_setx(dlp, reg, m.ctm_offset / NBBY); 1910 1911 instr = DIF_INSTR_FMT(DIF_OP_ADD, 1912 dnp->dn_left->dn_reg, reg, dnp->dn_left->dn_reg); 1913 1914 dt_irlist_append(dlp, 1915 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1916 dt_regset_free(drp, reg); 1917 } 1918 1919 if (!(dnp->dn_flags & DT_NF_REF)) { 1920 uint_t ubit = dnp->dn_flags & DT_NF_USERLAND; 1921 1922 /* 1923 * Save and restore DT_NF_USERLAND across dt_cg_load(): 1924 * we need the sign bit from dnp and the user bit from 1925 * dnp->dn_left in order to get the proper opcode. 1926 */ 1927 dnp->dn_flags |= 1928 (dnp->dn_left->dn_flags & DT_NF_USERLAND); 1929 1930 instr = DIF_INSTR_LOAD(dt_cg_load(dnp, 1931 ctfp, m.ctm_type), dnp->dn_left->dn_reg, 1932 dnp->dn_left->dn_reg); 1933 1934 dnp->dn_flags &= ~DT_NF_USERLAND; 1935 dnp->dn_flags |= ubit; 1936 1937 dt_irlist_append(dlp, 1938 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1939 1940 if (dnp->dn_flags & DT_NF_BITFIELD) 1941 dt_cg_field_get(dnp, dlp, drp, ctfp, &m); 1942 } 1943 1944 dnp->dn_reg = dnp->dn_left->dn_reg; 1945 break; 1946 1947 case DT_TOK_STRING: 1948 dnp->dn_reg = dt_regset_alloc(drp); 1949 1950 assert(dnp->dn_kind == DT_NODE_STRING); 1951 stroff = dt_strtab_insert(yypcb->pcb_strtab, dnp->dn_string); 1952 1953 if (stroff == -1L) 1954 longjmp(yypcb->pcb_jmpbuf, EDT_NOMEM); 1955 if (stroff > DIF_STROFF_MAX) 1956 longjmp(yypcb->pcb_jmpbuf, EDT_STR2BIG); 1957 1958 instr = DIF_INSTR_SETS((ulong_t)stroff, dnp->dn_reg); 1959 dt_irlist_append(dlp, dt_cg_node_alloc(DT_LBL_NONE, instr)); 1960 break; 1961 1962 case DT_TOK_IDENT: 1963 /* 1964 * If the specified identifier is a variable on which we have 1965 * set the code generator register flag, then this variable 1966 * has already had code generated for it and saved in di_id. 1967 * Allocate a new register and copy the existing value to it. 1968 */ 1969 if (dnp->dn_kind == DT_NODE_VAR && 1970 (dnp->dn_ident->di_flags & DT_IDFLG_CGREG)) { 1971 dnp->dn_reg = dt_regset_alloc(drp); 1972 instr = DIF_INSTR_MOV(dnp->dn_ident->di_id, 1973 dnp->dn_reg); 1974 dt_irlist_append(dlp, 1975 dt_cg_node_alloc(DT_LBL_NONE, instr)); 1976 break; 1977 } 1978 1979 /* 1980 * Identifiers can represent function calls, variable refs, or 1981 * symbols. First we check for inlined variables, and handle 1982 * them by generating code for the inline parse tree. 1983 */ 1984 if (dnp->dn_kind == DT_NODE_VAR && 1985 (dnp->dn_ident->di_flags & DT_IDFLG_INLINE)) { 1986 dt_cg_inline(dnp, dlp, drp); 1987 break; 1988 } 1989 1990 switch (dnp->dn_kind) { 1991 case DT_NODE_FUNC: 1992 if ((idp = dnp->dn_ident)->di_kind != DT_IDENT_FUNC) { 1993 dnerror(dnp, D_CG_EXPR, "%s %s( ) may not be " 1994 "called from a D expression (D program " 1995 "context required)\n", 1996 dt_idkind_name(idp->di_kind), idp->di_name); 1997 } 1998 1999 dt_cg_arglist(dnp->dn_ident, dnp->dn_args, dlp, drp); 2000 2001 dnp->dn_reg = dt_regset_alloc(drp); 2002 instr = DIF_INSTR_CALL(dnp->dn_ident->di_id, 2003 dnp->dn_reg); 2004 2005 dt_irlist_append(dlp, 2006 dt_cg_node_alloc(DT_LBL_NONE, instr)); 2007 2008 break; 2009 2010 case DT_NODE_VAR: 2011 if (dnp->dn_ident->di_kind == DT_IDENT_XLSOU || 2012 dnp->dn_ident->di_kind == DT_IDENT_XLPTR) { 2013 /* 2014 * This can only happen if we have translated 2015 * args[]. See dt_idcook_args() for details. 2016 */ 2017 assert(dnp->dn_ident->di_id == DIF_VAR_ARGS); 2018 dt_cg_array_op(dnp, dlp, drp); 2019 break; 2020 } 2021 2022 if (dnp->dn_ident->di_kind == DT_IDENT_ARRAY) { 2023 if (dnp->dn_ident->di_id > DIF_VAR_ARRAY_MAX) 2024 dt_cg_assoc_op(dnp, dlp, drp); 2025 else 2026 dt_cg_array_op(dnp, dlp, drp); 2027 break; 2028 } 2029 2030 dnp->dn_reg = dt_regset_alloc(drp); 2031 2032 if (dnp->dn_ident->di_flags & DT_IDFLG_LOCAL) 2033 op = DIF_OP_LDLS; 2034 else if (dnp->dn_ident->di_flags & DT_IDFLG_TLS) 2035 op = DIF_OP_LDTS; 2036 else 2037 op = DIF_OP_LDGS; 2038 2039 dnp->dn_ident->di_flags |= DT_IDFLG_DIFR; 2040 2041 instr = DIF_INSTR_LDV(op, 2042 dnp->dn_ident->di_id, dnp->dn_reg); 2043 2044 dt_irlist_append(dlp, 2045 dt_cg_node_alloc(DT_LBL_NONE, instr)); 2046 break; 2047 2048 case DT_NODE_SYM: { 2049 dtrace_hdl_t *dtp = yypcb->pcb_hdl; 2050 dtrace_syminfo_t *sip = dnp->dn_ident->di_data; 2051 GElf_Sym sym; 2052 2053 if (dtrace_lookup_by_name(dtp, 2054 sip->dts_object, sip->dts_name, &sym, NULL) == -1) { 2055 xyerror(D_UNKNOWN, "cg failed for symbol %s`%s:" 2056 " %s\n", sip->dts_object, sip->dts_name, 2057 dtrace_errmsg(dtp, dtrace_errno(dtp))); 2058 } 2059 2060 dnp->dn_reg = dt_regset_alloc(drp); 2061 dt_cg_xsetx(dlp, dnp->dn_ident, 2062 DT_LBL_NONE, dnp->dn_reg, sym.st_value); 2063 2064 if (!(dnp->dn_flags & DT_NF_REF)) { 2065 instr = DIF_INSTR_LOAD(dt_cg_load(dnp, ctfp, 2066 dnp->dn_type), dnp->dn_reg, dnp->dn_reg); 2067 dt_irlist_append(dlp, 2068 dt_cg_node_alloc(DT_LBL_NONE, instr)); 2069 } 2070 break; 2071 } 2072 2073 default: 2074 xyerror(D_UNKNOWN, "internal error -- node type %u is " 2075 "not valid for an identifier\n", dnp->dn_kind); 2076 } 2077 break; 2078 2079 case DT_TOK_INT: 2080 dnp->dn_reg = dt_regset_alloc(drp); 2081 dt_cg_setx(dlp, dnp->dn_reg, dnp->dn_value); 2082 break; 2083 2084 default: 2085 xyerror(D_UNKNOWN, "internal error -- token type %u is not a " 2086 "valid D compilation token\n", dnp->dn_op); 2087 } 2088 } 2089 2090 void 2091 dt_cg(dt_pcb_t *pcb, dt_node_t *dnp) 2092 { 2093 dif_instr_t instr; 2094 dt_xlator_t *dxp; 2095 dt_ident_t *idp; 2096 2097 if (pcb->pcb_regs == NULL && (pcb->pcb_regs = 2098 dt_regset_create(pcb->pcb_hdl->dt_conf.dtc_difintregs)) == NULL) 2099 longjmp(pcb->pcb_jmpbuf, EDT_NOMEM); 2100 2101 dt_regset_reset(pcb->pcb_regs); 2102 (void) dt_regset_alloc(pcb->pcb_regs); /* allocate %r0 */ 2103 2104 if (pcb->pcb_inttab != NULL) 2105 dt_inttab_destroy(pcb->pcb_inttab); 2106 2107 if ((pcb->pcb_inttab = dt_inttab_create(yypcb->pcb_hdl)) == NULL) 2108 longjmp(pcb->pcb_jmpbuf, EDT_NOMEM); 2109 2110 if (pcb->pcb_strtab != NULL) 2111 dt_strtab_destroy(pcb->pcb_strtab); 2112 2113 if ((pcb->pcb_strtab = dt_strtab_create(BUFSIZ)) == NULL) 2114 longjmp(pcb->pcb_jmpbuf, EDT_NOMEM); 2115 2116 dt_irlist_destroy(&pcb->pcb_ir); 2117 dt_irlist_create(&pcb->pcb_ir); 2118 2119 assert(pcb->pcb_dret == NULL); 2120 pcb->pcb_dret = dnp; 2121 2122 if (dt_node_resolve(dnp, DT_IDENT_XLPTR) != NULL) { 2123 dnerror(dnp, D_CG_DYN, "expression cannot evaluate to result " 2124 "of a translated pointer\n"); 2125 } 2126 2127 /* 2128 * If we're generating code for a translator body, assign the input 2129 * parameter to the first available register (i.e. caller passes %r1). 2130 */ 2131 if (dnp->dn_kind == DT_NODE_MEMBER) { 2132 dxp = dnp->dn_membxlator; 2133 dnp = dnp->dn_membexpr; 2134 2135 dxp->dx_ident->di_flags |= DT_IDFLG_CGREG; 2136 dxp->dx_ident->di_id = dt_regset_alloc(pcb->pcb_regs); 2137 } 2138 2139 dt_cg_node(dnp, &pcb->pcb_ir, pcb->pcb_regs); 2140 2141 if ((idp = dt_node_resolve(dnp, DT_IDENT_XLSOU)) != NULL) { 2142 int reg = dt_cg_xlate_expand(dnp, idp, 2143 &pcb->pcb_ir, pcb->pcb_regs); 2144 dt_regset_free(pcb->pcb_regs, dnp->dn_reg); 2145 dnp->dn_reg = reg; 2146 } 2147 2148 instr = DIF_INSTR_RET(dnp->dn_reg); 2149 dt_regset_free(pcb->pcb_regs, dnp->dn_reg); 2150 dt_irlist_append(&pcb->pcb_ir, dt_cg_node_alloc(DT_LBL_NONE, instr)); 2151 2152 if (dnp->dn_kind == DT_NODE_MEMBER) { 2153 dt_regset_free(pcb->pcb_regs, dxp->dx_ident->di_id); 2154 dxp->dx_ident->di_id = 0; 2155 dxp->dx_ident->di_flags &= ~DT_IDFLG_CGREG; 2156 } 2157 2158 dt_regset_free(pcb->pcb_regs, 0); 2159 dt_regset_assert_free(pcb->pcb_regs); 2160 } 2161