1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Program.h" 10 #include "ByteCodeStmtGen.h" 11 #include "Context.h" 12 #include "Function.h" 13 #include "Opcode.h" 14 #include "PrimType.h" 15 #include "clang/AST/Decl.h" 16 #include "clang/AST/DeclCXX.h" 17 18 using namespace clang; 19 using namespace clang::interp; 20 21 unsigned Program::getOrCreateNativePointer(const void *Ptr) { 22 auto It = NativePointerIndices.find(Ptr); 23 if (It != NativePointerIndices.end()) 24 return It->second; 25 26 unsigned Idx = NativePointers.size(); 27 NativePointers.push_back(Ptr); 28 NativePointerIndices[Ptr] = Idx; 29 return Idx; 30 } 31 32 const void *Program::getNativePointer(unsigned Idx) { 33 return NativePointers[Idx]; 34 } 35 36 unsigned Program::createGlobalString(const StringLiteral *S) { 37 const size_t CharWidth = S->getCharByteWidth(); 38 const size_t BitWidth = CharWidth * Ctx.getCharBit(); 39 40 PrimType CharType; 41 switch (CharWidth) { 42 case 1: 43 CharType = PT_Sint8; 44 break; 45 case 2: 46 CharType = PT_Uint16; 47 break; 48 case 4: 49 CharType = PT_Uint32; 50 break; 51 default: 52 llvm_unreachable("unsupported character width"); 53 } 54 55 // Create a descriptor for the string. 56 Descriptor *Desc = 57 allocateDescriptor(S, CharType, std::nullopt, S->getLength() + 1, 58 /*isConst=*/true, 59 /*isTemporary=*/false, 60 /*isMutable=*/false); 61 62 // Allocate storage for the string. 63 // The byte length does not include the null terminator. 64 unsigned I = Globals.size(); 65 unsigned Sz = Desc->getAllocSize(); 66 auto *G = new (Allocator, Sz) Global(Desc, /*isStatic=*/true, 67 /*isExtern=*/false); 68 G->block()->invokeCtor(); 69 Globals.push_back(G); 70 71 // Construct the string in storage. 72 const Pointer Ptr(G->block()); 73 for (unsigned I = 0, N = S->getLength(); I <= N; ++I) { 74 Pointer Field = Ptr.atIndex(I).narrow(); 75 const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I); 76 switch (CharType) { 77 case PT_Sint8: { 78 using T = PrimConv<PT_Sint8>::T; 79 Field.deref<T>() = T::from(CodePoint, BitWidth); 80 break; 81 } 82 case PT_Uint16: { 83 using T = PrimConv<PT_Uint16>::T; 84 Field.deref<T>() = T::from(CodePoint, BitWidth); 85 break; 86 } 87 case PT_Uint32: { 88 using T = PrimConv<PT_Uint32>::T; 89 Field.deref<T>() = T::from(CodePoint, BitWidth); 90 break; 91 } 92 default: 93 llvm_unreachable("unsupported character type"); 94 } 95 } 96 return I; 97 } 98 99 Pointer Program::getPtrGlobal(unsigned Idx) { 100 assert(Idx < Globals.size()); 101 return Pointer(Globals[Idx]->block()); 102 } 103 104 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) { 105 auto It = GlobalIndices.find(VD); 106 if (It != GlobalIndices.end()) 107 return It->second; 108 109 // Find any previous declarations which were already evaluated. 110 std::optional<unsigned> Index; 111 for (const Decl *P = VD; P; P = P->getPreviousDecl()) { 112 auto It = GlobalIndices.find(P); 113 if (It != GlobalIndices.end()) { 114 Index = It->second; 115 break; 116 } 117 } 118 119 // Map the decl to the existing index. 120 if (Index) { 121 GlobalIndices[VD] = *Index; 122 return {}; 123 } 124 125 return Index; 126 } 127 128 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD, 129 const Expr *Init) { 130 if (auto Idx = getGlobal(VD)) 131 return Idx; 132 133 if (auto Idx = createGlobal(VD, Init)) { 134 GlobalIndices[VD] = *Idx; 135 return Idx; 136 } 137 return {}; 138 } 139 140 std::optional<unsigned> Program::getOrCreateDummy(const ParmVarDecl *PD) { 141 auto &ASTCtx = Ctx.getASTContext(); 142 143 // Create a pointer to an incomplete array of the specified elements. 144 QualType ElemTy = PD->getType()->castAs<PointerType>()->getPointeeType(); 145 QualType Ty = ASTCtx.getIncompleteArrayType(ElemTy, ArrayType::Normal, 0); 146 147 // Dedup blocks since they are immutable and pointers cannot be compared. 148 auto It = DummyParams.find(PD); 149 if (It != DummyParams.end()) 150 return It->second; 151 152 if (auto Idx = createGlobal(PD, Ty, /*isStatic=*/true, /*isExtern=*/true)) { 153 DummyParams[PD] = *Idx; 154 return Idx; 155 } 156 return {}; 157 } 158 159 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD, 160 const Expr *Init) { 161 assert(!getGlobal(VD)); 162 bool IsStatic, IsExtern; 163 if (auto *Var = dyn_cast<VarDecl>(VD)) { 164 IsStatic = !Var->hasLocalStorage(); 165 IsExtern = !Var->getAnyInitializer(); 166 } else { 167 IsStatic = false; 168 IsExtern = true; 169 } 170 if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) { 171 for (const Decl *P = VD; P; P = P->getPreviousDecl()) 172 GlobalIndices[P] = *Idx; 173 return *Idx; 174 } 175 return {}; 176 } 177 178 std::optional<unsigned> Program::createGlobal(const Expr *E) { 179 return createGlobal(E, E->getType(), /*isStatic=*/true, /*isExtern=*/false); 180 } 181 182 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty, 183 bool IsStatic, bool IsExtern, 184 const Expr *Init) { 185 // Create a descriptor for the global. 186 Descriptor *Desc; 187 const bool IsConst = Ty.isConstQualified(); 188 const bool IsTemporary = D.dyn_cast<const Expr *>(); 189 if (auto T = Ctx.classify(Ty)) { 190 Desc = createDescriptor(D, *T, std::nullopt, IsConst, IsTemporary); 191 } else { 192 Desc = createDescriptor(D, Ty.getTypePtr(), std::nullopt, IsConst, 193 IsTemporary); 194 } 195 if (!Desc) 196 return {}; 197 198 // Allocate a block for storage. 199 unsigned I = Globals.size(); 200 201 auto *G = new (Allocator, Desc->getAllocSize()) 202 Global(getCurrentDecl(), Desc, IsStatic, IsExtern); 203 G->block()->invokeCtor(); 204 205 Globals.push_back(G); 206 207 return I; 208 } 209 210 Function *Program::getFunction(const FunctionDecl *F) { 211 F = F->getCanonicalDecl(); 212 assert(F); 213 auto It = Funcs.find(F); 214 return It == Funcs.end() ? nullptr : It->second.get(); 215 } 216 217 Record *Program::getOrCreateRecord(const RecordDecl *RD) { 218 // Use the actual definition as a key. 219 RD = RD->getDefinition(); 220 if (!RD) 221 return nullptr; 222 223 // Deduplicate records. 224 auto It = Records.find(RD); 225 if (It != Records.end()) { 226 return It->second; 227 } 228 229 // We insert nullptr now and replace that later, so recursive calls 230 // to this function with the same RecordDecl don't run into 231 // infinite recursion. 232 Records.insert({RD, nullptr}); 233 234 // Number of bytes required by fields and base classes. 235 unsigned BaseSize = 0; 236 // Number of bytes required by virtual base. 237 unsigned VirtSize = 0; 238 239 // Helper to get a base descriptor. 240 auto GetBaseDesc = [this](const RecordDecl *BD, Record *BR) -> Descriptor * { 241 if (!BR) 242 return nullptr; 243 return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false, 244 /*isTemporary=*/false, 245 /*isMutable=*/false); 246 }; 247 248 // Reserve space for base classes. 249 Record::BaseList Bases; 250 Record::VirtualBaseList VirtBases; 251 if (auto *CD = dyn_cast<CXXRecordDecl>(RD)) { 252 for (const CXXBaseSpecifier &Spec : CD->bases()) { 253 if (Spec.isVirtual()) 254 continue; 255 256 const RecordDecl *BD = Spec.getType()->castAs<RecordType>()->getDecl(); 257 Record *BR = getOrCreateRecord(BD); 258 if (Descriptor *Desc = GetBaseDesc(BD, BR)) { 259 BaseSize += align(sizeof(InlineDescriptor)); 260 Bases.push_back({BD, BaseSize, Desc, BR}); 261 BaseSize += align(BR->getSize()); 262 continue; 263 } 264 return nullptr; 265 } 266 267 for (const CXXBaseSpecifier &Spec : CD->vbases()) { 268 const RecordDecl *BD = Spec.getType()->castAs<RecordType>()->getDecl(); 269 Record *BR = getOrCreateRecord(BD); 270 271 if (Descriptor *Desc = GetBaseDesc(BD, BR)) { 272 VirtSize += align(sizeof(InlineDescriptor)); 273 VirtBases.push_back({BD, VirtSize, Desc, BR}); 274 VirtSize += align(BR->getSize()); 275 continue; 276 } 277 return nullptr; 278 } 279 } 280 281 // Reserve space for fields. 282 Record::FieldList Fields; 283 for (const FieldDecl *FD : RD->fields()) { 284 // Reserve space for the field's descriptor and the offset. 285 BaseSize += align(sizeof(InlineDescriptor)); 286 287 // Classify the field and add its metadata. 288 QualType FT = FD->getType(); 289 const bool IsConst = FT.isConstQualified(); 290 const bool IsMutable = FD->isMutable(); 291 Descriptor *Desc; 292 if (std::optional<PrimType> T = Ctx.classify(FT)) { 293 Desc = createDescriptor(FD, *T, std::nullopt, IsConst, 294 /*isTemporary=*/false, IsMutable); 295 } else { 296 Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst, 297 /*isTemporary=*/false, IsMutable); 298 } 299 if (!Desc) 300 return nullptr; 301 Fields.push_back({FD, BaseSize, Desc}); 302 BaseSize += align(Desc->getAllocSize()); 303 } 304 305 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields), 306 std::move(VirtBases), VirtSize, BaseSize); 307 Records[RD] = R; 308 return R; 309 } 310 311 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty, 312 Descriptor::MetadataSize MDSize, 313 bool IsConst, bool IsTemporary, 314 bool IsMutable, const Expr *Init) { 315 // Classes and structures. 316 if (auto *RT = Ty->getAs<RecordType>()) { 317 if (auto *Record = getOrCreateRecord(RT->getDecl())) 318 return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary, 319 IsMutable); 320 } 321 322 // Arrays. 323 if (auto ArrayType = Ty->getAsArrayTypeUnsafe()) { 324 QualType ElemTy = ArrayType->getElementType(); 325 // Array of well-known bounds. 326 if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) { 327 size_t NumElems = CAT->getSize().getZExtValue(); 328 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 329 // Arrays of primitives. 330 unsigned ElemSize = primSize(*T); 331 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) { 332 return {}; 333 } 334 return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary, 335 IsMutable); 336 } else { 337 // Arrays of composites. In this case, the array is a list of pointers, 338 // followed by the actual elements. 339 Descriptor *ElemDesc = createDescriptor( 340 D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary); 341 if (!ElemDesc) 342 return nullptr; 343 InterpSize ElemSize = 344 ElemDesc->getAllocSize() + sizeof(InlineDescriptor); 345 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) 346 return {}; 347 return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst, 348 IsTemporary, IsMutable); 349 } 350 } 351 352 // Array of unknown bounds - cannot be accessed and pointer arithmetic 353 // is forbidden on pointers to such objects. 354 if (isa<IncompleteArrayType>(ArrayType)) { 355 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 356 return allocateDescriptor(D, *T, IsTemporary, 357 Descriptor::UnknownSize{}); 358 } else { 359 Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(), MDSize, 360 IsConst, IsTemporary); 361 if (!Desc) 362 return nullptr; 363 return allocateDescriptor(D, Desc, IsTemporary, 364 Descriptor::UnknownSize{}); 365 } 366 } 367 } 368 369 // Atomic types. 370 if (auto *AT = Ty->getAs<AtomicType>()) { 371 const Type *InnerTy = AT->getValueType().getTypePtr(); 372 return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary, 373 IsMutable); 374 } 375 376 // Complex types - represented as arrays of elements. 377 if (auto *CT = Ty->getAs<ComplexType>()) { 378 PrimType ElemTy = *Ctx.classify(CT->getElementType()); 379 return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary, 380 IsMutable); 381 } 382 383 return nullptr; 384 } 385