1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 9 #include "Program.h" 10 #include "ByteCodeStmtGen.h" 11 #include "Context.h" 12 #include "Function.h" 13 #include "Integral.h" 14 #include "Opcode.h" 15 #include "PrimType.h" 16 #include "clang/AST/Decl.h" 17 #include "clang/AST/DeclCXX.h" 18 19 using namespace clang; 20 using namespace clang::interp; 21 22 unsigned Program::getOrCreateNativePointer(const void *Ptr) { 23 auto It = NativePointerIndices.find(Ptr); 24 if (It != NativePointerIndices.end()) 25 return It->second; 26 27 unsigned Idx = NativePointers.size(); 28 NativePointers.push_back(Ptr); 29 NativePointerIndices[Ptr] = Idx; 30 return Idx; 31 } 32 33 const void *Program::getNativePointer(unsigned Idx) { 34 return NativePointers[Idx]; 35 } 36 37 unsigned Program::createGlobalString(const StringLiteral *S) { 38 const size_t CharWidth = S->getCharByteWidth(); 39 const size_t BitWidth = CharWidth * Ctx.getCharBit(); 40 41 PrimType CharType; 42 switch (CharWidth) { 43 case 1: 44 CharType = PT_Sint8; 45 break; 46 case 2: 47 CharType = PT_Uint16; 48 break; 49 case 4: 50 CharType = PT_Uint32; 51 break; 52 default: 53 llvm_unreachable("unsupported character width"); 54 } 55 56 // Create a descriptor for the string. 57 Descriptor *Desc = 58 allocateDescriptor(S, CharType, std::nullopt, S->getLength() + 1, 59 /*isConst=*/true, 60 /*isTemporary=*/false, 61 /*isMutable=*/false); 62 63 // Allocate storage for the string. 64 // The byte length does not include the null terminator. 65 unsigned I = Globals.size(); 66 unsigned Sz = Desc->getAllocSize(); 67 auto *G = new (Allocator, Sz) Global(Desc, /*isStatic=*/true, 68 /*isExtern=*/false); 69 G->block()->invokeCtor(); 70 Globals.push_back(G); 71 72 // Construct the string in storage. 73 const Pointer Ptr(G->block()); 74 for (unsigned I = 0, N = S->getLength(); I <= N; ++I) { 75 Pointer Field = Ptr.atIndex(I).narrow(); 76 const uint32_t CodePoint = I == N ? 0 : S->getCodeUnit(I); 77 switch (CharType) { 78 case PT_Sint8: { 79 using T = PrimConv<PT_Sint8>::T; 80 Field.deref<T>() = T::from(CodePoint, BitWidth); 81 break; 82 } 83 case PT_Uint16: { 84 using T = PrimConv<PT_Uint16>::T; 85 Field.deref<T>() = T::from(CodePoint, BitWidth); 86 break; 87 } 88 case PT_Uint32: { 89 using T = PrimConv<PT_Uint32>::T; 90 Field.deref<T>() = T::from(CodePoint, BitWidth); 91 break; 92 } 93 default: 94 llvm_unreachable("unsupported character type"); 95 } 96 } 97 return I; 98 } 99 100 Pointer Program::getPtrGlobal(unsigned Idx) { 101 assert(Idx < Globals.size()); 102 return Pointer(Globals[Idx]->block()); 103 } 104 105 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) { 106 auto It = GlobalIndices.find(VD); 107 if (It != GlobalIndices.end()) 108 return It->second; 109 110 // Find any previous declarations which were already evaluated. 111 std::optional<unsigned> Index; 112 for (const Decl *P = VD; P; P = P->getPreviousDecl()) { 113 auto It = GlobalIndices.find(P); 114 if (It != GlobalIndices.end()) { 115 Index = It->second; 116 break; 117 } 118 } 119 120 // Map the decl to the existing index. 121 if (Index) { 122 GlobalIndices[VD] = *Index; 123 return std::nullopt; 124 } 125 126 return Index; 127 } 128 129 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD, 130 const Expr *Init) { 131 if (auto Idx = getGlobal(VD)) 132 return Idx; 133 134 if (auto Idx = createGlobal(VD, Init)) { 135 GlobalIndices[VD] = *Idx; 136 return Idx; 137 } 138 return std::nullopt; 139 } 140 141 std::optional<unsigned> Program::getOrCreateDummy(const ParmVarDecl *PD) { 142 143 // Dedup blocks since they are immutable and pointers cannot be compared. 144 if (auto It = DummyParams.find(PD); 145 It != DummyParams.end()) 146 return It->second; 147 148 auto &ASTCtx = Ctx.getASTContext(); 149 // Create a pointer to an incomplete array of the specified elements. 150 QualType ElemTy = PD->getType()->castAs<PointerType>()->getPointeeType(); 151 QualType Ty = ASTCtx.getIncompleteArrayType(ElemTy, ArrayType::Normal, 0); 152 153 if (auto Idx = createGlobal(PD, Ty, /*isStatic=*/true, /*isExtern=*/true)) { 154 DummyParams[PD] = *Idx; 155 return Idx; 156 } 157 return std::nullopt; 158 } 159 160 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD, 161 const Expr *Init) { 162 assert(!getGlobal(VD)); 163 bool IsStatic, IsExtern; 164 if (auto *Var = dyn_cast<VarDecl>(VD)) { 165 IsStatic = Context::shouldBeGloballyIndexed(VD); 166 IsExtern = !Var->getAnyInitializer(); 167 } else { 168 IsStatic = false; 169 IsExtern = true; 170 } 171 if (auto Idx = createGlobal(VD, VD->getType(), IsStatic, IsExtern, Init)) { 172 for (const Decl *P = VD; P; P = P->getPreviousDecl()) 173 GlobalIndices[P] = *Idx; 174 return *Idx; 175 } 176 return std::nullopt; 177 } 178 179 std::optional<unsigned> Program::createGlobal(const Expr *E) { 180 return createGlobal(E, E->getType(), /*isStatic=*/true, /*isExtern=*/false); 181 } 182 183 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty, 184 bool IsStatic, bool IsExtern, 185 const Expr *Init) { 186 // Create a descriptor for the global. 187 Descriptor *Desc; 188 const bool IsConst = Ty.isConstQualified(); 189 const bool IsTemporary = D.dyn_cast<const Expr *>(); 190 if (auto T = Ctx.classify(Ty)) { 191 Desc = createDescriptor(D, *T, std::nullopt, IsConst, IsTemporary); 192 } else { 193 Desc = createDescriptor(D, Ty.getTypePtr(), std::nullopt, IsConst, 194 IsTemporary); 195 } 196 if (!Desc) 197 return std::nullopt; 198 199 // Allocate a block for storage. 200 unsigned I = Globals.size(); 201 202 auto *G = new (Allocator, Desc->getAllocSize()) 203 Global(getCurrentDecl(), Desc, IsStatic, IsExtern); 204 G->block()->invokeCtor(); 205 206 Globals.push_back(G); 207 208 return I; 209 } 210 211 Function *Program::getFunction(const FunctionDecl *F) { 212 F = F->getCanonicalDecl(); 213 assert(F); 214 auto It = Funcs.find(F); 215 return It == Funcs.end() ? nullptr : It->second.get(); 216 } 217 218 Record *Program::getOrCreateRecord(const RecordDecl *RD) { 219 // Use the actual definition as a key. 220 RD = RD->getDefinition(); 221 if (!RD) 222 return nullptr; 223 224 // Deduplicate records. 225 if (auto It = Records.find(RD); It != Records.end()) 226 return It->second; 227 228 // We insert nullptr now and replace that later, so recursive calls 229 // to this function with the same RecordDecl don't run into 230 // infinite recursion. 231 Records.insert({RD, nullptr}); 232 233 // Number of bytes required by fields and base classes. 234 unsigned BaseSize = 0; 235 // Number of bytes required by virtual base. 236 unsigned VirtSize = 0; 237 238 // Helper to get a base descriptor. 239 auto GetBaseDesc = [this](const RecordDecl *BD, Record *BR) -> Descriptor * { 240 if (!BR) 241 return nullptr; 242 return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false, 243 /*isTemporary=*/false, 244 /*isMutable=*/false); 245 }; 246 247 // Reserve space for base classes. 248 Record::BaseList Bases; 249 Record::VirtualBaseList VirtBases; 250 if (auto *CD = dyn_cast<CXXRecordDecl>(RD)) { 251 for (const CXXBaseSpecifier &Spec : CD->bases()) { 252 if (Spec.isVirtual()) 253 continue; 254 255 const RecordDecl *BD = Spec.getType()->castAs<RecordType>()->getDecl(); 256 Record *BR = getOrCreateRecord(BD); 257 if (Descriptor *Desc = GetBaseDesc(BD, BR)) { 258 BaseSize += align(sizeof(InlineDescriptor)); 259 Bases.push_back({BD, BaseSize, Desc, BR}); 260 BaseSize += align(BR->getSize()); 261 continue; 262 } 263 return nullptr; 264 } 265 266 for (const CXXBaseSpecifier &Spec : CD->vbases()) { 267 const RecordDecl *BD = Spec.getType()->castAs<RecordType>()->getDecl(); 268 Record *BR = getOrCreateRecord(BD); 269 270 if (Descriptor *Desc = GetBaseDesc(BD, BR)) { 271 VirtSize += align(sizeof(InlineDescriptor)); 272 VirtBases.push_back({BD, VirtSize, Desc, BR}); 273 VirtSize += align(BR->getSize()); 274 continue; 275 } 276 return nullptr; 277 } 278 } 279 280 // Reserve space for fields. 281 Record::FieldList Fields; 282 for (const FieldDecl *FD : RD->fields()) { 283 // Reserve space for the field's descriptor and the offset. 284 BaseSize += align(sizeof(InlineDescriptor)); 285 286 // Classify the field and add its metadata. 287 QualType FT = FD->getType(); 288 const bool IsConst = FT.isConstQualified(); 289 const bool IsMutable = FD->isMutable(); 290 Descriptor *Desc; 291 if (std::optional<PrimType> T = Ctx.classify(FT)) { 292 Desc = createDescriptor(FD, *T, std::nullopt, IsConst, 293 /*isTemporary=*/false, IsMutable); 294 } else { 295 Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst, 296 /*isTemporary=*/false, IsMutable); 297 } 298 if (!Desc) 299 return nullptr; 300 Fields.push_back({FD, BaseSize, Desc}); 301 BaseSize += align(Desc->getAllocSize()); 302 } 303 304 Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields), 305 std::move(VirtBases), VirtSize, BaseSize); 306 Records[RD] = R; 307 return R; 308 } 309 310 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty, 311 Descriptor::MetadataSize MDSize, 312 bool IsConst, bool IsTemporary, 313 bool IsMutable, const Expr *Init) { 314 // Classes and structures. 315 if (auto *RT = Ty->getAs<RecordType>()) { 316 if (auto *Record = getOrCreateRecord(RT->getDecl())) 317 return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary, 318 IsMutable); 319 } 320 321 // Arrays. 322 if (auto ArrayType = Ty->getAsArrayTypeUnsafe()) { 323 QualType ElemTy = ArrayType->getElementType(); 324 // Array of well-known bounds. 325 if (auto CAT = dyn_cast<ConstantArrayType>(ArrayType)) { 326 size_t NumElems = CAT->getSize().getZExtValue(); 327 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 328 // Arrays of primitives. 329 unsigned ElemSize = primSize(*T); 330 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) { 331 return {}; 332 } 333 return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary, 334 IsMutable); 335 } else { 336 // Arrays of composites. In this case, the array is a list of pointers, 337 // followed by the actual elements. 338 Descriptor *ElemDesc = createDescriptor( 339 D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary); 340 if (!ElemDesc) 341 return nullptr; 342 unsigned ElemSize = 343 ElemDesc->getAllocSize() + sizeof(InlineDescriptor); 344 if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) 345 return {}; 346 return allocateDescriptor(D, ElemDesc, MDSize, NumElems, IsConst, 347 IsTemporary, IsMutable); 348 } 349 } 350 351 // Array of unknown bounds - cannot be accessed and pointer arithmetic 352 // is forbidden on pointers to such objects. 353 if (isa<IncompleteArrayType>(ArrayType)) { 354 if (std::optional<PrimType> T = Ctx.classify(ElemTy)) { 355 return allocateDescriptor(D, *T, IsTemporary, 356 Descriptor::UnknownSize{}); 357 } else { 358 Descriptor *Desc = createDescriptor(D, ElemTy.getTypePtr(), MDSize, 359 IsConst, IsTemporary); 360 if (!Desc) 361 return nullptr; 362 return allocateDescriptor(D, Desc, IsTemporary, 363 Descriptor::UnknownSize{}); 364 } 365 } 366 } 367 368 // Atomic types. 369 if (auto *AT = Ty->getAs<AtomicType>()) { 370 const Type *InnerTy = AT->getValueType().getTypePtr(); 371 return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary, 372 IsMutable); 373 } 374 375 // Complex types - represented as arrays of elements. 376 if (auto *CT = Ty->getAs<ComplexType>()) { 377 PrimType ElemTy = *Ctx.classify(CT->getElementType()); 378 return allocateDescriptor(D, ElemTy, MDSize, 2, IsConst, IsTemporary, 379 IsMutable); 380 } 381 382 return nullptr; 383 } 384