1 //===--- InterpBuiltin.cpp - Interpreter for the constexpr VM ---*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 #include "../ExprConstShared.h" 9 #include "Boolean.h" 10 #include "Interp.h" 11 #include "PrimType.h" 12 #include "clang/AST/RecordLayout.h" 13 #include "clang/Basic/Builtins.h" 14 #include "clang/Basic/TargetInfo.h" 15 16 namespace clang { 17 namespace interp { 18 19 template <typename T> 20 static T getParam(const InterpFrame *Frame, unsigned Index) { 21 assert(Frame->getFunction()->getNumParams() > Index); 22 unsigned Offset = Frame->getFunction()->getParamOffset(Index); 23 return Frame->getParam<T>(Offset); 24 } 25 26 PrimType getIntPrimType(const InterpState &S) { 27 const TargetInfo &TI = S.getCtx().getTargetInfo(); 28 unsigned IntWidth = TI.getIntWidth(); 29 30 if (IntWidth == 32) 31 return PT_Sint32; 32 else if (IntWidth == 16) 33 return PT_Sint16; 34 llvm_unreachable("Int isn't 16 or 32 bit?"); 35 } 36 37 PrimType getLongPrimType(const InterpState &S) { 38 const TargetInfo &TI = S.getCtx().getTargetInfo(); 39 unsigned LongWidth = TI.getLongWidth(); 40 41 if (LongWidth == 64) 42 return PT_Sint64; 43 else if (LongWidth == 32) 44 return PT_Sint32; 45 else if (LongWidth == 16) 46 return PT_Sint16; 47 llvm_unreachable("long isn't 16, 32 or 64 bit?"); 48 } 49 50 /// Peek an integer value from the stack into an APSInt. 51 static APSInt peekToAPSInt(InterpStack &Stk, PrimType T, size_t Offset = 0) { 52 if (Offset == 0) 53 Offset = align(primSize(T)); 54 55 APSInt R; 56 INT_TYPE_SWITCH(T, { 57 T Val = Stk.peek<T>(Offset); 58 R = APSInt( 59 APInt(Val.bitWidth(), static_cast<uint64_t>(Val), T::isSigned())); 60 }); 61 62 return R; 63 } 64 65 /// Pushes \p Val to the stack, as a target-dependent 'int'. 66 static void pushInt(InterpState &S, int32_t Val) { 67 PrimType IntType = getIntPrimType(S); 68 if (IntType == PT_Sint32) 69 S.Stk.push<Integral<32, true>>(Integral<32, true>::from(Val)); 70 else if (IntType == PT_Sint16) 71 S.Stk.push<Integral<16, true>>(Integral<16, true>::from(Val)); 72 else 73 llvm_unreachable("Int isn't 16 or 32 bit?"); 74 } 75 76 static void pushAPSInt(InterpState &S, const APSInt &Val) { 77 bool Signed = Val.isSigned(); 78 79 if (Signed) { 80 switch (Val.getBitWidth()) { 81 case 64: 82 S.Stk.push<Integral<64, true>>( 83 Integral<64, true>::from(Val.getSExtValue())); 84 break; 85 case 32: 86 S.Stk.push<Integral<32, true>>( 87 Integral<32, true>::from(Val.getSExtValue())); 88 break; 89 case 16: 90 S.Stk.push<Integral<16, true>>( 91 Integral<16, true>::from(Val.getSExtValue())); 92 break; 93 case 8: 94 S.Stk.push<Integral<8, true>>( 95 Integral<8, true>::from(Val.getSExtValue())); 96 break; 97 default: 98 llvm_unreachable("Invalid integer bitwidth"); 99 } 100 return; 101 } 102 103 // Unsigned. 104 switch (Val.getBitWidth()) { 105 case 64: 106 S.Stk.push<Integral<64, false>>( 107 Integral<64, false>::from(Val.getZExtValue())); 108 break; 109 case 32: 110 S.Stk.push<Integral<32, false>>( 111 Integral<32, false>::from(Val.getZExtValue())); 112 break; 113 case 16: 114 S.Stk.push<Integral<16, false>>( 115 Integral<16, false>::from(Val.getZExtValue())); 116 break; 117 case 8: 118 S.Stk.push<Integral<8, false>>( 119 Integral<8, false>::from(Val.getZExtValue())); 120 break; 121 default: 122 llvm_unreachable("Invalid integer bitwidth"); 123 } 124 } 125 126 /// Pushes \p Val to the stack, as a target-dependent 'long'. 127 static void pushLong(InterpState &S, int64_t Val) { 128 PrimType LongType = getLongPrimType(S); 129 if (LongType == PT_Sint64) 130 S.Stk.push<Integral<64, true>>(Integral<64, true>::from(Val)); 131 else if (LongType == PT_Sint32) 132 S.Stk.push<Integral<32, true>>(Integral<32, true>::from(Val)); 133 else if (LongType == PT_Sint16) 134 S.Stk.push<Integral<16, true>>(Integral<16, true>::from(Val)); 135 else 136 llvm_unreachable("Long isn't 16, 32 or 64 bit?"); 137 } 138 139 static void pushSizeT(InterpState &S, uint64_t Val) { 140 const TargetInfo &TI = S.getCtx().getTargetInfo(); 141 unsigned SizeTWidth = TI.getTypeWidth(TI.getSizeType()); 142 143 switch (SizeTWidth) { 144 case 64: 145 S.Stk.push<Integral<64, false>>(Integral<64, false>::from(Val)); 146 break; 147 case 32: 148 S.Stk.push<Integral<32, false>>(Integral<32, false>::from(Val)); 149 break; 150 case 16: 151 S.Stk.push<Integral<16, false>>(Integral<16, false>::from(Val)); 152 break; 153 default: 154 llvm_unreachable("We don't handle this size_t size."); 155 } 156 } 157 158 static bool retPrimValue(InterpState &S, CodePtr OpPC, APValue &Result, 159 std::optional<PrimType> &T) { 160 if (!T) 161 return RetVoid(S, OpPC, Result); 162 163 #define RET_CASE(X) \ 164 case X: \ 165 return Ret<X>(S, OpPC, Result); 166 switch (*T) { 167 RET_CASE(PT_Ptr); 168 RET_CASE(PT_FnPtr); 169 RET_CASE(PT_Float); 170 RET_CASE(PT_Bool); 171 RET_CASE(PT_Sint8); 172 RET_CASE(PT_Uint8); 173 RET_CASE(PT_Sint16); 174 RET_CASE(PT_Uint16); 175 RET_CASE(PT_Sint32); 176 RET_CASE(PT_Uint32); 177 RET_CASE(PT_Sint64); 178 RET_CASE(PT_Uint64); 179 default: 180 llvm_unreachable("Unsupported return type for builtin function"); 181 } 182 #undef RET_CASE 183 } 184 185 static bool interp__builtin_strcmp(InterpState &S, CodePtr OpPC, 186 const InterpFrame *Frame) { 187 const Pointer &A = getParam<Pointer>(Frame, 0); 188 const Pointer &B = getParam<Pointer>(Frame, 1); 189 190 if (!CheckLive(S, OpPC, A, AK_Read) || !CheckLive(S, OpPC, B, AK_Read)) 191 return false; 192 193 assert(A.getFieldDesc()->isPrimitiveArray()); 194 assert(B.getFieldDesc()->isPrimitiveArray()); 195 196 unsigned IndexA = A.getIndex(); 197 unsigned IndexB = B.getIndex(); 198 int32_t Result = 0; 199 for (;; ++IndexA, ++IndexB) { 200 const Pointer &PA = A.atIndex(IndexA); 201 const Pointer &PB = B.atIndex(IndexB); 202 if (!CheckRange(S, OpPC, PA, AK_Read) || 203 !CheckRange(S, OpPC, PB, AK_Read)) { 204 return false; 205 } 206 uint8_t CA = PA.deref<uint8_t>(); 207 uint8_t CB = PB.deref<uint8_t>(); 208 209 if (CA > CB) { 210 Result = 1; 211 break; 212 } else if (CA < CB) { 213 Result = -1; 214 break; 215 } 216 if (CA == 0 || CB == 0) 217 break; 218 } 219 220 pushInt(S, Result); 221 return true; 222 } 223 224 static bool interp__builtin_strlen(InterpState &S, CodePtr OpPC, 225 const InterpFrame *Frame) { 226 const Pointer &StrPtr = getParam<Pointer>(Frame, 0); 227 228 if (!CheckArray(S, OpPC, StrPtr)) 229 return false; 230 231 if (!CheckLive(S, OpPC, StrPtr, AK_Read)) 232 return false; 233 234 if (!CheckDummy(S, OpPC, StrPtr)) 235 return false; 236 237 assert(StrPtr.getFieldDesc()->isPrimitiveArray()); 238 239 size_t Len = 0; 240 for (size_t I = StrPtr.getIndex();; ++I, ++Len) { 241 const Pointer &ElemPtr = StrPtr.atIndex(I); 242 243 if (!CheckRange(S, OpPC, ElemPtr, AK_Read)) 244 return false; 245 246 uint8_t Val = ElemPtr.deref<uint8_t>(); 247 if (Val == 0) 248 break; 249 } 250 251 pushSizeT(S, Len); 252 return true; 253 } 254 255 static bool interp__builtin_nan(InterpState &S, CodePtr OpPC, 256 const InterpFrame *Frame, const Function *F, 257 bool Signaling) { 258 const Pointer &Arg = getParam<Pointer>(Frame, 0); 259 260 if (!CheckLoad(S, OpPC, Arg)) 261 return false; 262 263 assert(Arg.getFieldDesc()->isPrimitiveArray()); 264 265 // Convert the given string to an integer using StringRef's API. 266 llvm::APInt Fill; 267 std::string Str; 268 assert(Arg.getNumElems() >= 1); 269 for (unsigned I = 0;; ++I) { 270 const Pointer &Elem = Arg.atIndex(I); 271 272 if (!CheckLoad(S, OpPC, Elem)) 273 return false; 274 275 if (Elem.deref<int8_t>() == 0) 276 break; 277 278 Str += Elem.deref<char>(); 279 } 280 281 // Treat empty strings as if they were zero. 282 if (Str.empty()) 283 Fill = llvm::APInt(32, 0); 284 else if (StringRef(Str).getAsInteger(0, Fill)) 285 return false; 286 287 const llvm::fltSemantics &TargetSemantics = 288 S.getCtx().getFloatTypeSemantics(F->getDecl()->getReturnType()); 289 290 Floating Result; 291 if (S.getCtx().getTargetInfo().isNan2008()) { 292 if (Signaling) 293 Result = Floating( 294 llvm::APFloat::getSNaN(TargetSemantics, /*Negative=*/false, &Fill)); 295 else 296 Result = Floating( 297 llvm::APFloat::getQNaN(TargetSemantics, /*Negative=*/false, &Fill)); 298 } else { 299 // Prior to IEEE 754-2008, architectures were allowed to choose whether 300 // the first bit of their significand was set for qNaN or sNaN. MIPS chose 301 // a different encoding to what became a standard in 2008, and for pre- 302 // 2008 revisions, MIPS interpreted sNaN-2008 as qNan and qNaN-2008 as 303 // sNaN. This is now known as "legacy NaN" encoding. 304 if (Signaling) 305 Result = Floating( 306 llvm::APFloat::getQNaN(TargetSemantics, /*Negative=*/false, &Fill)); 307 else 308 Result = Floating( 309 llvm::APFloat::getSNaN(TargetSemantics, /*Negative=*/false, &Fill)); 310 } 311 312 S.Stk.push<Floating>(Result); 313 return true; 314 } 315 316 static bool interp__builtin_inf(InterpState &S, CodePtr OpPC, 317 const InterpFrame *Frame, const Function *F) { 318 const llvm::fltSemantics &TargetSemantics = 319 S.getCtx().getFloatTypeSemantics(F->getDecl()->getReturnType()); 320 321 S.Stk.push<Floating>(Floating::getInf(TargetSemantics)); 322 return true; 323 } 324 325 static bool interp__builtin_copysign(InterpState &S, CodePtr OpPC, 326 const InterpFrame *Frame, 327 const Function *F) { 328 const Floating &Arg1 = getParam<Floating>(Frame, 0); 329 const Floating &Arg2 = getParam<Floating>(Frame, 1); 330 331 APFloat Copy = Arg1.getAPFloat(); 332 Copy.copySign(Arg2.getAPFloat()); 333 S.Stk.push<Floating>(Floating(Copy)); 334 335 return true; 336 } 337 338 static bool interp__builtin_fmin(InterpState &S, CodePtr OpPC, 339 const InterpFrame *Frame, const Function *F) { 340 const Floating &LHS = getParam<Floating>(Frame, 0); 341 const Floating &RHS = getParam<Floating>(Frame, 1); 342 343 Floating Result; 344 345 // When comparing zeroes, return -0.0 if one of the zeroes is negative. 346 if (LHS.isZero() && RHS.isZero() && RHS.isNegative()) 347 Result = RHS; 348 else if (LHS.isNan() || RHS < LHS) 349 Result = RHS; 350 else 351 Result = LHS; 352 353 S.Stk.push<Floating>(Result); 354 return true; 355 } 356 357 static bool interp__builtin_fmax(InterpState &S, CodePtr OpPC, 358 const InterpFrame *Frame, 359 const Function *Func) { 360 const Floating &LHS = getParam<Floating>(Frame, 0); 361 const Floating &RHS = getParam<Floating>(Frame, 1); 362 363 Floating Result; 364 365 // When comparing zeroes, return +0.0 if one of the zeroes is positive. 366 if (LHS.isZero() && RHS.isZero() && LHS.isNegative()) 367 Result = RHS; 368 else if (LHS.isNan() || RHS > LHS) 369 Result = RHS; 370 else 371 Result = LHS; 372 373 S.Stk.push<Floating>(Result); 374 return true; 375 } 376 377 /// Defined as __builtin_isnan(...), to accommodate the fact that it can 378 /// take a float, double, long double, etc. 379 /// But for us, that's all a Floating anyway. 380 static bool interp__builtin_isnan(InterpState &S, CodePtr OpPC, 381 const InterpFrame *Frame, const Function *F) { 382 const Floating &Arg = S.Stk.peek<Floating>(); 383 384 pushInt(S, Arg.isNan()); 385 return true; 386 } 387 388 static bool interp__builtin_issignaling(InterpState &S, CodePtr OpPC, 389 const InterpFrame *Frame, 390 const Function *F) { 391 const Floating &Arg = S.Stk.peek<Floating>(); 392 393 pushInt(S, Arg.isSignaling()); 394 return true; 395 } 396 397 static bool interp__builtin_isinf(InterpState &S, CodePtr OpPC, 398 const InterpFrame *Frame, const Function *F, 399 bool CheckSign) { 400 const Floating &Arg = S.Stk.peek<Floating>(); 401 bool IsInf = Arg.isInf(); 402 403 if (CheckSign) 404 pushInt(S, IsInf ? (Arg.isNegative() ? -1 : 1) : 0); 405 else 406 pushInt(S, Arg.isInf()); 407 return true; 408 } 409 410 static bool interp__builtin_isfinite(InterpState &S, CodePtr OpPC, 411 const InterpFrame *Frame, 412 const Function *F) { 413 const Floating &Arg = S.Stk.peek<Floating>(); 414 415 pushInt(S, Arg.isFinite()); 416 return true; 417 } 418 419 static bool interp__builtin_isnormal(InterpState &S, CodePtr OpPC, 420 const InterpFrame *Frame, 421 const Function *F) { 422 const Floating &Arg = S.Stk.peek<Floating>(); 423 424 pushInt(S, Arg.isNormal()); 425 return true; 426 } 427 428 static bool interp__builtin_issubnormal(InterpState &S, CodePtr OpPC, 429 const InterpFrame *Frame, 430 const Function *F) { 431 const Floating &Arg = S.Stk.peek<Floating>(); 432 433 pushInt(S, Arg.isDenormal()); 434 return true; 435 } 436 437 static bool interp__builtin_iszero(InterpState &S, CodePtr OpPC, 438 const InterpFrame *Frame, 439 const Function *F) { 440 const Floating &Arg = S.Stk.peek<Floating>(); 441 442 pushInt(S, Arg.isZero()); 443 return true; 444 } 445 446 /// First parameter to __builtin_isfpclass is the floating value, the 447 /// second one is an integral value. 448 static bool interp__builtin_isfpclass(InterpState &S, CodePtr OpPC, 449 const InterpFrame *Frame, 450 const Function *Func, 451 const CallExpr *Call) { 452 PrimType FPClassArgT = *S.getContext().classify(Call->getArg(1)->getType()); 453 APSInt FPClassArg = peekToAPSInt(S.Stk, FPClassArgT); 454 const Floating &F = 455 S.Stk.peek<Floating>(align(primSize(FPClassArgT) + primSize(PT_Float))); 456 457 int32_t Result = 458 static_cast<int32_t>((F.classify() & FPClassArg).getZExtValue()); 459 pushInt(S, Result); 460 461 return true; 462 } 463 464 /// Five int values followed by one floating value. 465 static bool interp__builtin_fpclassify(InterpState &S, CodePtr OpPC, 466 const InterpFrame *Frame, 467 const Function *Func) { 468 const Floating &Val = S.Stk.peek<Floating>(); 469 470 unsigned Index; 471 switch (Val.getCategory()) { 472 case APFloat::fcNaN: 473 Index = 0; 474 break; 475 case APFloat::fcInfinity: 476 Index = 1; 477 break; 478 case APFloat::fcNormal: 479 Index = Val.isDenormal() ? 3 : 2; 480 break; 481 case APFloat::fcZero: 482 Index = 4; 483 break; 484 } 485 486 // The last argument is first on the stack. 487 assert(Index <= 4); 488 unsigned IntSize = primSize(getIntPrimType(S)); 489 unsigned Offset = 490 align(primSize(PT_Float)) + ((1 + (4 - Index)) * align(IntSize)); 491 492 APSInt I = peekToAPSInt(S.Stk, getIntPrimType(S), Offset); 493 pushInt(S, I.getZExtValue()); 494 return true; 495 } 496 497 // The C standard says "fabs raises no floating-point exceptions, 498 // even if x is a signaling NaN. The returned value is independent of 499 // the current rounding direction mode." Therefore constant folding can 500 // proceed without regard to the floating point settings. 501 // Reference, WG14 N2478 F.10.4.3 502 static bool interp__builtin_fabs(InterpState &S, CodePtr OpPC, 503 const InterpFrame *Frame, 504 const Function *Func) { 505 const Floating &Val = getParam<Floating>(Frame, 0); 506 507 S.Stk.push<Floating>(Floating::abs(Val)); 508 return true; 509 } 510 511 static bool interp__builtin_popcount(InterpState &S, CodePtr OpPC, 512 const InterpFrame *Frame, 513 const Function *Func, 514 const CallExpr *Call) { 515 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 516 APSInt Val = peekToAPSInt(S.Stk, ArgT); 517 pushInt(S, Val.popcount()); 518 return true; 519 } 520 521 static bool interp__builtin_parity(InterpState &S, CodePtr OpPC, 522 const InterpFrame *Frame, 523 const Function *Func, const CallExpr *Call) { 524 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 525 APSInt Val = peekToAPSInt(S.Stk, ArgT); 526 pushInt(S, Val.popcount() % 2); 527 return true; 528 } 529 530 static bool interp__builtin_clrsb(InterpState &S, CodePtr OpPC, 531 const InterpFrame *Frame, 532 const Function *Func, const CallExpr *Call) { 533 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 534 APSInt Val = peekToAPSInt(S.Stk, ArgT); 535 pushInt(S, Val.getBitWidth() - Val.getSignificantBits()); 536 return true; 537 } 538 539 static bool interp__builtin_bitreverse(InterpState &S, CodePtr OpPC, 540 const InterpFrame *Frame, 541 const Function *Func, 542 const CallExpr *Call) { 543 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 544 APSInt Val = peekToAPSInt(S.Stk, ArgT); 545 pushAPSInt(S, APSInt(Val.reverseBits(), /*IsUnsigned=*/true)); 546 return true; 547 } 548 549 static bool interp__builtin_classify_type(InterpState &S, CodePtr OpPC, 550 const InterpFrame *Frame, 551 const Function *Func, 552 const CallExpr *Call) { 553 // This is an unevaluated call, so there are no arguments on the stack. 554 assert(Call->getNumArgs() == 1); 555 const Expr *Arg = Call->getArg(0); 556 557 GCCTypeClass ResultClass = 558 EvaluateBuiltinClassifyType(Arg->getType(), S.getLangOpts()); 559 int32_t ReturnVal = static_cast<int32_t>(ResultClass); 560 pushInt(S, ReturnVal); 561 return true; 562 } 563 564 // __builtin_expect(long, long) 565 // __builtin_expect_with_probability(long, long, double) 566 static bool interp__builtin_expect(InterpState &S, CodePtr OpPC, 567 const InterpFrame *Frame, 568 const Function *Func, const CallExpr *Call) { 569 // The return value is simply the value of the first parameter. 570 // We ignore the probability. 571 unsigned NumArgs = Call->getNumArgs(); 572 assert(NumArgs == 2 || NumArgs == 3); 573 574 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 575 unsigned Offset = align(primSize(getLongPrimType(S))) * 2; 576 if (NumArgs == 3) 577 Offset += align(primSize(PT_Float)); 578 579 APSInt Val = peekToAPSInt(S.Stk, ArgT, Offset); 580 pushLong(S, Val.getSExtValue()); 581 return true; 582 } 583 584 /// rotateleft(value, amount) 585 static bool interp__builtin_rotate(InterpState &S, CodePtr OpPC, 586 const InterpFrame *Frame, 587 const Function *Func, const CallExpr *Call, 588 bool Right) { 589 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 590 assert(ArgT == *S.getContext().classify(Call->getArg(1)->getType())); 591 592 APSInt Amount = peekToAPSInt(S.Stk, ArgT); 593 APSInt Value = peekToAPSInt(S.Stk, ArgT, align(primSize(ArgT)) * 2); 594 595 APSInt Result; 596 if (Right) 597 Result = APSInt(Value.rotr(Amount.urem(Value.getBitWidth())), 598 /*IsUnsigned=*/true); 599 else // Left. 600 Result = APSInt(Value.rotl(Amount.urem(Value.getBitWidth())), 601 /*IsUnsigned=*/true); 602 603 pushAPSInt(S, Result); 604 return true; 605 } 606 607 static bool interp__builtin_ffs(InterpState &S, CodePtr OpPC, 608 const InterpFrame *Frame, const Function *Func, 609 const CallExpr *Call) { 610 PrimType ArgT = *S.getContext().classify(Call->getArg(0)->getType()); 611 APSInt Value = peekToAPSInt(S.Stk, ArgT); 612 613 uint64_t N = Value.countr_zero(); 614 pushInt(S, N == Value.getBitWidth() ? 0 : N + 1); 615 return true; 616 } 617 618 static bool interp__builtin_addressof(InterpState &S, CodePtr OpPC, 619 const InterpFrame *Frame, 620 const Function *Func, 621 const CallExpr *Call) { 622 PrimType PtrT = 623 S.getContext().classify(Call->getArg(0)->getType()).value_or(PT_Ptr); 624 625 if (PtrT == PT_FnPtr) { 626 const FunctionPointer &Arg = S.Stk.peek<FunctionPointer>(); 627 S.Stk.push<FunctionPointer>(Arg); 628 } else if (PtrT == PT_Ptr) { 629 const Pointer &Arg = S.Stk.peek<Pointer>(); 630 S.Stk.push<Pointer>(Arg); 631 } else { 632 assert(false && "Unsupported pointer type passed to __builtin_addressof()"); 633 } 634 return true; 635 } 636 637 bool InterpretBuiltin(InterpState &S, CodePtr OpPC, const Function *F, 638 const CallExpr *Call) { 639 InterpFrame *Frame = S.Current; 640 APValue Dummy; 641 642 std::optional<PrimType> ReturnT = S.getContext().classify(Call->getType()); 643 644 // If classify failed, we assume void. 645 assert(ReturnT || Call->getType()->isVoidType()); 646 647 switch (F->getBuiltinID()) { 648 case Builtin::BI__builtin_is_constant_evaluated: 649 S.Stk.push<Boolean>(Boolean::from(S.inConstantContext())); 650 break; 651 case Builtin::BI__builtin_assume: 652 break; 653 case Builtin::BI__builtin_strcmp: 654 if (!interp__builtin_strcmp(S, OpPC, Frame)) 655 return false; 656 break; 657 case Builtin::BI__builtin_strlen: 658 if (!interp__builtin_strlen(S, OpPC, Frame)) 659 return false; 660 break; 661 case Builtin::BI__builtin_nan: 662 case Builtin::BI__builtin_nanf: 663 case Builtin::BI__builtin_nanl: 664 case Builtin::BI__builtin_nanf16: 665 case Builtin::BI__builtin_nanf128: 666 if (!interp__builtin_nan(S, OpPC, Frame, F, /*Signaling=*/false)) 667 return false; 668 break; 669 case Builtin::BI__builtin_nans: 670 case Builtin::BI__builtin_nansf: 671 case Builtin::BI__builtin_nansl: 672 case Builtin::BI__builtin_nansf16: 673 case Builtin::BI__builtin_nansf128: 674 if (!interp__builtin_nan(S, OpPC, Frame, F, /*Signaling=*/true)) 675 return false; 676 break; 677 678 case Builtin::BI__builtin_huge_val: 679 case Builtin::BI__builtin_huge_valf: 680 case Builtin::BI__builtin_huge_vall: 681 case Builtin::BI__builtin_huge_valf16: 682 case Builtin::BI__builtin_huge_valf128: 683 case Builtin::BI__builtin_inf: 684 case Builtin::BI__builtin_inff: 685 case Builtin::BI__builtin_infl: 686 case Builtin::BI__builtin_inff16: 687 case Builtin::BI__builtin_inff128: 688 if (!interp__builtin_inf(S, OpPC, Frame, F)) 689 return false; 690 break; 691 case Builtin::BI__builtin_copysign: 692 case Builtin::BI__builtin_copysignf: 693 case Builtin::BI__builtin_copysignl: 694 case Builtin::BI__builtin_copysignf128: 695 if (!interp__builtin_copysign(S, OpPC, Frame, F)) 696 return false; 697 break; 698 699 case Builtin::BI__builtin_fmin: 700 case Builtin::BI__builtin_fminf: 701 case Builtin::BI__builtin_fminl: 702 case Builtin::BI__builtin_fminf16: 703 case Builtin::BI__builtin_fminf128: 704 if (!interp__builtin_fmin(S, OpPC, Frame, F)) 705 return false; 706 break; 707 708 case Builtin::BI__builtin_fmax: 709 case Builtin::BI__builtin_fmaxf: 710 case Builtin::BI__builtin_fmaxl: 711 case Builtin::BI__builtin_fmaxf16: 712 case Builtin::BI__builtin_fmaxf128: 713 if (!interp__builtin_fmax(S, OpPC, Frame, F)) 714 return false; 715 break; 716 717 case Builtin::BI__builtin_isnan: 718 if (!interp__builtin_isnan(S, OpPC, Frame, F)) 719 return false; 720 break; 721 case Builtin::BI__builtin_issignaling: 722 if (!interp__builtin_issignaling(S, OpPC, Frame, F)) 723 return false; 724 break; 725 726 case Builtin::BI__builtin_isinf: 727 if (!interp__builtin_isinf(S, OpPC, Frame, F, /*Sign=*/false)) 728 return false; 729 break; 730 731 case Builtin::BI__builtin_isinf_sign: 732 if (!interp__builtin_isinf(S, OpPC, Frame, F, /*Sign=*/true)) 733 return false; 734 break; 735 736 case Builtin::BI__builtin_isfinite: 737 if (!interp__builtin_isfinite(S, OpPC, Frame, F)) 738 return false; 739 break; 740 case Builtin::BI__builtin_isnormal: 741 if (!interp__builtin_isnormal(S, OpPC, Frame, F)) 742 return false; 743 break; 744 case Builtin::BI__builtin_issubnormal: 745 if (!interp__builtin_issubnormal(S, OpPC, Frame, F)) 746 return false; 747 break; 748 case Builtin::BI__builtin_iszero: 749 if (!interp__builtin_iszero(S, OpPC, Frame, F)) 750 return false; 751 break; 752 case Builtin::BI__builtin_isfpclass: 753 if (!interp__builtin_isfpclass(S, OpPC, Frame, F, Call)) 754 return false; 755 break; 756 case Builtin::BI__builtin_fpclassify: 757 if (!interp__builtin_fpclassify(S, OpPC, Frame, F)) 758 return false; 759 break; 760 761 case Builtin::BI__builtin_fabs: 762 case Builtin::BI__builtin_fabsf: 763 case Builtin::BI__builtin_fabsl: 764 case Builtin::BI__builtin_fabsf128: 765 if (!interp__builtin_fabs(S, OpPC, Frame, F)) 766 return false; 767 break; 768 769 case Builtin::BI__builtin_popcount: 770 case Builtin::BI__builtin_popcountl: 771 case Builtin::BI__builtin_popcountll: 772 case Builtin::BI__popcnt16: // Microsoft variants of popcount 773 case Builtin::BI__popcnt: 774 case Builtin::BI__popcnt64: 775 if (!interp__builtin_popcount(S, OpPC, Frame, F, Call)) 776 return false; 777 break; 778 779 case Builtin::BI__builtin_parity: 780 case Builtin::BI__builtin_parityl: 781 case Builtin::BI__builtin_parityll: 782 if (!interp__builtin_parity(S, OpPC, Frame, F, Call)) 783 return false; 784 break; 785 786 case Builtin::BI__builtin_clrsb: 787 case Builtin::BI__builtin_clrsbl: 788 case Builtin::BI__builtin_clrsbll: 789 if (!interp__builtin_clrsb(S, OpPC, Frame, F, Call)) 790 return false; 791 break; 792 793 case Builtin::BI__builtin_bitreverse8: 794 case Builtin::BI__builtin_bitreverse16: 795 case Builtin::BI__builtin_bitreverse32: 796 case Builtin::BI__builtin_bitreverse64: 797 if (!interp__builtin_bitreverse(S, OpPC, Frame, F, Call)) 798 return false; 799 break; 800 801 case Builtin::BI__builtin_classify_type: 802 if (!interp__builtin_classify_type(S, OpPC, Frame, F, Call)) 803 return false; 804 break; 805 806 case Builtin::BI__builtin_expect: 807 case Builtin::BI__builtin_expect_with_probability: 808 if (!interp__builtin_expect(S, OpPC, Frame, F, Call)) 809 return false; 810 break; 811 812 case Builtin::BI__builtin_rotateleft8: 813 case Builtin::BI__builtin_rotateleft16: 814 case Builtin::BI__builtin_rotateleft32: 815 case Builtin::BI__builtin_rotateleft64: 816 case Builtin::BI_rotl8: // Microsoft variants of rotate left 817 case Builtin::BI_rotl16: 818 case Builtin::BI_rotl: 819 case Builtin::BI_lrotl: 820 case Builtin::BI_rotl64: 821 if (!interp__builtin_rotate(S, OpPC, Frame, F, Call, /*Right=*/false)) 822 return false; 823 break; 824 825 case Builtin::BI__builtin_rotateright8: 826 case Builtin::BI__builtin_rotateright16: 827 case Builtin::BI__builtin_rotateright32: 828 case Builtin::BI__builtin_rotateright64: 829 case Builtin::BI_rotr8: // Microsoft variants of rotate right 830 case Builtin::BI_rotr16: 831 case Builtin::BI_rotr: 832 case Builtin::BI_lrotr: 833 case Builtin::BI_rotr64: 834 if (!interp__builtin_rotate(S, OpPC, Frame, F, Call, /*Right=*/true)) 835 return false; 836 break; 837 838 case Builtin::BI__builtin_ffs: 839 case Builtin::BI__builtin_ffsl: 840 case Builtin::BI__builtin_ffsll: 841 if (!interp__builtin_ffs(S, OpPC, Frame, F, Call)) 842 return false; 843 break; 844 case Builtin::BIaddressof: 845 case Builtin::BI__addressof: 846 case Builtin::BI__builtin_addressof: 847 if (!interp__builtin_addressof(S, OpPC, Frame, F, Call)) 848 return false; 849 break; 850 851 default: 852 return false; 853 } 854 855 return retPrimValue(S, OpPC, Dummy, ReturnT); 856 } 857 858 bool InterpretOffsetOf(InterpState &S, CodePtr OpPC, const OffsetOfExpr *E, 859 llvm::ArrayRef<int64_t> ArrayIndices, 860 int64_t &IntResult) { 861 CharUnits Result; 862 unsigned N = E->getNumComponents(); 863 assert(N > 0); 864 865 unsigned ArrayIndex = 0; 866 QualType CurrentType = E->getTypeSourceInfo()->getType(); 867 for (unsigned I = 0; I != N; ++I) { 868 const OffsetOfNode &Node = E->getComponent(I); 869 switch (Node.getKind()) { 870 case OffsetOfNode::Field: { 871 const FieldDecl *MemberDecl = Node.getField(); 872 const RecordType *RT = CurrentType->getAs<RecordType>(); 873 if (!RT) 874 return false; 875 RecordDecl *RD = RT->getDecl(); 876 if (RD->isInvalidDecl()) 877 return false; 878 const ASTRecordLayout &RL = S.getCtx().getASTRecordLayout(RD); 879 unsigned FieldIndex = MemberDecl->getFieldIndex(); 880 assert(FieldIndex < RL.getFieldCount() && "offsetof field in wrong type"); 881 Result += S.getCtx().toCharUnitsFromBits(RL.getFieldOffset(FieldIndex)); 882 CurrentType = MemberDecl->getType().getNonReferenceType(); 883 break; 884 } 885 case OffsetOfNode::Array: { 886 // When generating bytecode, we put all the index expressions as Sint64 on 887 // the stack. 888 int64_t Index = ArrayIndices[ArrayIndex]; 889 const ArrayType *AT = S.getCtx().getAsArrayType(CurrentType); 890 if (!AT) 891 return false; 892 CurrentType = AT->getElementType(); 893 CharUnits ElementSize = S.getCtx().getTypeSizeInChars(CurrentType); 894 Result += Index * ElementSize; 895 ++ArrayIndex; 896 break; 897 } 898 case OffsetOfNode::Base: { 899 const CXXBaseSpecifier *BaseSpec = Node.getBase(); 900 if (BaseSpec->isVirtual()) 901 return false; 902 903 // Find the layout of the class whose base we are looking into. 904 const RecordType *RT = CurrentType->getAs<RecordType>(); 905 if (!RT) 906 return false; 907 const RecordDecl *RD = RT->getDecl(); 908 if (RD->isInvalidDecl()) 909 return false; 910 const ASTRecordLayout &RL = S.getCtx().getASTRecordLayout(RD); 911 912 // Find the base class itself. 913 CurrentType = BaseSpec->getType(); 914 const RecordType *BaseRT = CurrentType->getAs<RecordType>(); 915 if (!BaseRT) 916 return false; 917 918 // Add the offset to the base. 919 Result += RL.getBaseClassOffset(cast<CXXRecordDecl>(BaseRT->getDecl())); 920 break; 921 } 922 case OffsetOfNode::Identifier: 923 llvm_unreachable("Dependent OffsetOfExpr?"); 924 } 925 } 926 927 IntResult = Result.getQuantity(); 928 929 return true; 930 } 931 932 bool SetThreeWayComparisonField(InterpState &S, CodePtr OpPC, 933 const Pointer &Ptr, const APSInt &IntValue) { 934 935 const Record *R = Ptr.getRecord(); 936 assert(R); 937 assert(R->getNumFields() == 1); 938 939 unsigned FieldOffset = R->getField(0u)->Offset; 940 const Pointer &FieldPtr = Ptr.atField(FieldOffset); 941 PrimType FieldT = *S.getContext().classify(FieldPtr.getType()); 942 943 INT_TYPE_SWITCH(FieldT, 944 FieldPtr.deref<T>() = T::from(IntValue.getSExtValue())); 945 FieldPtr.initialize(); 946 return true; 947 } 948 949 } // namespace interp 950 } // namespace clang 951