xref: /freebsd/contrib/llvm-project/clang/lib/AST/ByteCode/Program.cpp (revision 770cf0a5f02dc8983a89c6568d741fbc25baa999)
1 //===--- Program.cpp - Bytecode for the constexpr VM ------------*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "Program.h"
10 #include "Context.h"
11 #include "Function.h"
12 #include "Integral.h"
13 #include "PrimType.h"
14 #include "clang/AST/Decl.h"
15 #include "clang/AST/DeclCXX.h"
16 
17 using namespace clang;
18 using namespace clang::interp;
19 
20 unsigned Program::getOrCreateNativePointer(const void *Ptr) {
21   auto [It, Inserted] =
22       NativePointerIndices.try_emplace(Ptr, NativePointers.size());
23   if (Inserted)
24     NativePointers.push_back(Ptr);
25 
26   return It->second;
27 }
28 
29 const void *Program::getNativePointer(unsigned Idx) {
30   return NativePointers[Idx];
31 }
32 
33 unsigned Program::createGlobalString(const StringLiteral *S, const Expr *Base) {
34   const size_t CharWidth = S->getCharByteWidth();
35   const size_t BitWidth = CharWidth * Ctx.getCharBit();
36   unsigned StringLength = S->getLength();
37 
38   PrimType CharType;
39   switch (CharWidth) {
40   case 1:
41     CharType = PT_Sint8;
42     break;
43   case 2:
44     CharType = PT_Uint16;
45     break;
46   case 4:
47     CharType = PT_Uint32;
48     break;
49   default:
50     llvm_unreachable("unsupported character width");
51   }
52 
53   if (!Base)
54     Base = S;
55 
56   // Create a descriptor for the string.
57   Descriptor *Desc =
58       allocateDescriptor(Base, CharType, Descriptor::GlobalMD, StringLength + 1,
59                          /*isConst=*/true,
60                          /*isTemporary=*/false,
61                          /*isMutable=*/false);
62 
63   // Allocate storage for the string.
64   // The byte length does not include the null terminator.
65   unsigned GlobalIndex = Globals.size();
66   unsigned Sz = Desc->getAllocSize();
67   auto *G = new (Allocator, Sz) Global(Ctx.getEvalID(), Desc, /*isStatic=*/true,
68                                        /*isExtern=*/false);
69   G->block()->invokeCtor();
70 
71   new (G->block()->rawData())
72       GlobalInlineDescriptor{GlobalInitState::Initialized};
73   Globals.push_back(G);
74 
75   const Pointer Ptr(G->block());
76   if (CharWidth == 1) {
77     std::memcpy(&Ptr.atIndex(0).deref<char>(), S->getString().data(),
78                 StringLength);
79   } else {
80     // Construct the string in storage.
81     for (unsigned I = 0; I <= StringLength; ++I) {
82       Pointer Field = Ptr.atIndex(I);
83       const uint32_t CodePoint = I == StringLength ? 0 : S->getCodeUnit(I);
84       switch (CharType) {
85       case PT_Sint8: {
86         using T = PrimConv<PT_Sint8>::T;
87         Field.deref<T>() = T::from(CodePoint, BitWidth);
88         break;
89       }
90       case PT_Uint16: {
91         using T = PrimConv<PT_Uint16>::T;
92         Field.deref<T>() = T::from(CodePoint, BitWidth);
93         break;
94       }
95       case PT_Uint32: {
96         using T = PrimConv<PT_Uint32>::T;
97         Field.deref<T>() = T::from(CodePoint, BitWidth);
98         break;
99       }
100       default:
101         llvm_unreachable("unsupported character type");
102       }
103     }
104   }
105   Ptr.initialize();
106 
107   return GlobalIndex;
108 }
109 
110 Pointer Program::getPtrGlobal(unsigned Idx) const {
111   assert(Idx < Globals.size());
112   return Pointer(Globals[Idx]->block());
113 }
114 
115 std::optional<unsigned> Program::getGlobal(const ValueDecl *VD) {
116   if (auto It = GlobalIndices.find(VD); It != GlobalIndices.end())
117     return It->second;
118 
119   // Find any previous declarations which were already evaluated.
120   std::optional<unsigned> Index;
121   for (const Decl *P = VD->getPreviousDecl(); P; P = P->getPreviousDecl()) {
122     if (auto It = GlobalIndices.find(P); It != GlobalIndices.end()) {
123       Index = It->second;
124       break;
125     }
126   }
127 
128   // Map the decl to the existing index.
129   if (Index)
130     GlobalIndices[VD] = *Index;
131 
132   return std::nullopt;
133 }
134 
135 std::optional<unsigned> Program::getGlobal(const Expr *E) {
136   if (auto It = GlobalIndices.find(E); It != GlobalIndices.end())
137     return It->second;
138   return std::nullopt;
139 }
140 
141 std::optional<unsigned> Program::getOrCreateGlobal(const ValueDecl *VD,
142                                                    const Expr *Init) {
143   if (auto Idx = getGlobal(VD))
144     return Idx;
145 
146   if (auto Idx = createGlobal(VD, Init)) {
147     GlobalIndices[VD] = *Idx;
148     return Idx;
149   }
150   return std::nullopt;
151 }
152 
153 unsigned Program::getOrCreateDummy(const DeclTy &D) {
154   assert(D);
155   // Dedup blocks since they are immutable and pointers cannot be compared.
156   if (auto It = DummyVariables.find(D.getOpaqueValue());
157       It != DummyVariables.end())
158     return It->second;
159 
160   QualType QT;
161   bool IsWeak = false;
162   if (const auto *E = dyn_cast<const Expr *>(D)) {
163     QT = E->getType();
164   } else {
165     const auto *VD = cast<ValueDecl>(cast<const Decl *>(D));
166     IsWeak = VD->isWeak();
167     QT = VD->getType();
168     if (const auto *RT = QT->getAs<ReferenceType>())
169       QT = RT->getPointeeType();
170   }
171   assert(!QT.isNull());
172 
173   Descriptor *Desc;
174   if (std::optional<PrimType> T = Ctx.classify(QT))
175     Desc = createDescriptor(D, *T, /*SourceTy=*/nullptr, std::nullopt,
176                             /*IsConst=*/QT.isConstQualified());
177   else
178     Desc = createDescriptor(D, QT.getTypePtr(), std::nullopt,
179                             /*IsConst=*/QT.isConstQualified());
180   if (!Desc)
181     Desc = allocateDescriptor(D);
182 
183   assert(Desc);
184   Desc->makeDummy();
185 
186   assert(Desc->isDummy());
187 
188   // Allocate a block for storage.
189   unsigned I = Globals.size();
190 
191   auto *G = new (Allocator, Desc->getAllocSize())
192       Global(Ctx.getEvalID(), getCurrentDecl(), Desc, /*IsStatic=*/true,
193              /*IsExtern=*/false, IsWeak);
194   G->block()->invokeCtor();
195 
196   Globals.push_back(G);
197   DummyVariables[D.getOpaqueValue()] = I;
198   return I;
199 }
200 
201 std::optional<unsigned> Program::createGlobal(const ValueDecl *VD,
202                                               const Expr *Init) {
203   bool IsStatic, IsExtern;
204   bool IsWeak = VD->isWeak();
205   if (const auto *Var = dyn_cast<VarDecl>(VD)) {
206     IsStatic = Context::shouldBeGloballyIndexed(VD);
207     IsExtern = Var->hasExternalStorage();
208   } else if (isa<UnnamedGlobalConstantDecl, MSGuidDecl,
209                  TemplateParamObjectDecl>(VD)) {
210     IsStatic = true;
211     IsExtern = false;
212   } else {
213     IsStatic = false;
214     IsExtern = true;
215   }
216 
217   // Register all previous declarations as well. For extern blocks, just replace
218   // the index with the new variable.
219   if (auto Idx =
220           createGlobal(VD, VD->getType(), IsStatic, IsExtern, IsWeak, Init)) {
221     for (const Decl *P = VD; P; P = P->getPreviousDecl()) {
222       unsigned &PIdx = GlobalIndices[P];
223       if (P != VD) {
224         if (Globals[PIdx]->block()->isExtern())
225           Globals[PIdx] = Globals[*Idx];
226       }
227       PIdx = *Idx;
228     }
229     return *Idx;
230   }
231   return std::nullopt;
232 }
233 
234 std::optional<unsigned> Program::createGlobal(const Expr *E) {
235   if (auto Idx = getGlobal(E))
236     return Idx;
237   if (auto Idx = createGlobal(E, E->getType(), /*isStatic=*/true,
238                               /*isExtern=*/false, /*IsWeak=*/false)) {
239     GlobalIndices[E] = *Idx;
240     return *Idx;
241   }
242   return std::nullopt;
243 }
244 
245 std::optional<unsigned> Program::createGlobal(const DeclTy &D, QualType Ty,
246                                               bool IsStatic, bool IsExtern,
247                                               bool IsWeak, const Expr *Init) {
248   // Create a descriptor for the global.
249   Descriptor *Desc;
250   const bool IsConst = Ty.isConstQualified();
251   const bool IsTemporary = D.dyn_cast<const Expr *>();
252   const bool IsVolatile = Ty.isVolatileQualified();
253   if (std::optional<PrimType> T = Ctx.classify(Ty))
254     Desc = createDescriptor(D, *T, nullptr, Descriptor::GlobalMD, IsConst,
255                             IsTemporary, /*IsMutable=*/false, IsVolatile);
256   else
257     Desc = createDescriptor(D, Ty.getTypePtr(), Descriptor::GlobalMD, IsConst,
258                             IsTemporary, /*IsMutable=*/false, IsVolatile);
259 
260   if (!Desc)
261     return std::nullopt;
262 
263   // Allocate a block for storage.
264   unsigned I = Globals.size();
265 
266   auto *G = new (Allocator, Desc->getAllocSize()) Global(
267       Ctx.getEvalID(), getCurrentDecl(), Desc, IsStatic, IsExtern, IsWeak);
268   G->block()->invokeCtor();
269 
270   // Initialize InlineDescriptor fields.
271   auto *GD = new (G->block()->rawData()) GlobalInlineDescriptor();
272   if (!Init)
273     GD->InitState = GlobalInitState::NoInitializer;
274   Globals.push_back(G);
275 
276   return I;
277 }
278 
279 Function *Program::getFunction(const FunctionDecl *F) {
280   F = F->getCanonicalDecl();
281   assert(F);
282   auto It = Funcs.find(F);
283   return It == Funcs.end() ? nullptr : It->second.get();
284 }
285 
286 Record *Program::getOrCreateRecord(const RecordDecl *RD) {
287   // Use the actual definition as a key.
288   RD = RD->getDefinition();
289   if (!RD)
290     return nullptr;
291 
292   if (!RD->isCompleteDefinition())
293     return nullptr;
294 
295   // Return an existing record if available.  Otherwise, we insert nullptr now
296   // and replace that later, so recursive calls to this function with the same
297   // RecordDecl don't run into infinite recursion.
298   auto [It, Inserted] = Records.try_emplace(RD);
299   if (!Inserted)
300     return It->second;
301 
302   // Number of bytes required by fields and base classes.
303   unsigned BaseSize = 0;
304   // Number of bytes required by virtual base.
305   unsigned VirtSize = 0;
306 
307   // Helper to get a base descriptor.
308   auto GetBaseDesc = [this](const RecordDecl *BD,
309                             const Record *BR) -> const Descriptor * {
310     if (!BR)
311       return nullptr;
312     return allocateDescriptor(BD, BR, std::nullopt, /*isConst=*/false,
313                               /*isTemporary=*/false,
314                               /*isMutable=*/false, /*IsVolatile=*/false);
315   };
316 
317   // Reserve space for base classes.
318   Record::BaseList Bases;
319   Record::VirtualBaseList VirtBases;
320   if (const auto *CD = dyn_cast<CXXRecordDecl>(RD)) {
321     for (const CXXBaseSpecifier &Spec : CD->bases()) {
322       if (Spec.isVirtual())
323         continue;
324 
325       // In error cases, the base might not be a RecordType.
326       const auto *RT = Spec.getType()->getAs<RecordType>();
327       if (!RT)
328         return nullptr;
329       const RecordDecl *BD = RT->getDecl();
330       const Record *BR = getOrCreateRecord(BD);
331 
332       const Descriptor *Desc = GetBaseDesc(BD, BR);
333       if (!Desc)
334         return nullptr;
335 
336       BaseSize += align(sizeof(InlineDescriptor));
337       Bases.push_back({BD, BaseSize, Desc, BR});
338       BaseSize += align(BR->getSize());
339     }
340 
341     for (const CXXBaseSpecifier &Spec : CD->vbases()) {
342       const auto *RT = Spec.getType()->getAs<RecordType>();
343       if (!RT)
344         return nullptr;
345 
346       const RecordDecl *BD = RT->getDecl();
347       const Record *BR = getOrCreateRecord(BD);
348 
349       const Descriptor *Desc = GetBaseDesc(BD, BR);
350       if (!Desc)
351         return nullptr;
352 
353       VirtSize += align(sizeof(InlineDescriptor));
354       VirtBases.push_back({BD, VirtSize, Desc, BR});
355       VirtSize += align(BR->getSize());
356     }
357   }
358 
359   // Reserve space for fields.
360   Record::FieldList Fields;
361   for (const FieldDecl *FD : RD->fields()) {
362     FD = FD->getFirstDecl();
363     // Note that we DO create fields and descriptors
364     // for unnamed bitfields here, even though we later ignore
365     // them everywhere. That's so the FieldDecl's getFieldIndex() matches.
366 
367     // Reserve space for the field's descriptor and the offset.
368     BaseSize += align(sizeof(InlineDescriptor));
369 
370     // Classify the field and add its metadata.
371     QualType FT = FD->getType();
372     const bool IsConst = FT.isConstQualified();
373     const bool IsMutable = FD->isMutable();
374     const bool IsVolatile = FT.isVolatileQualified();
375     const Descriptor *Desc;
376     if (std::optional<PrimType> T = Ctx.classify(FT)) {
377       Desc = createDescriptor(FD, *T, nullptr, std::nullopt, IsConst,
378                               /*isTemporary=*/false, IsMutable, IsVolatile);
379     } else {
380       Desc = createDescriptor(FD, FT.getTypePtr(), std::nullopt, IsConst,
381                               /*isTemporary=*/false, IsMutable, IsVolatile);
382     }
383     if (!Desc)
384       return nullptr;
385     Fields.push_back({FD, BaseSize, Desc});
386     BaseSize += align(Desc->getAllocSize());
387   }
388 
389   Record *R = new (Allocator) Record(RD, std::move(Bases), std::move(Fields),
390                                      std::move(VirtBases), VirtSize, BaseSize);
391   Records[RD] = R;
392   return R;
393 }
394 
395 Descriptor *Program::createDescriptor(const DeclTy &D, const Type *Ty,
396                                       Descriptor::MetadataSize MDSize,
397                                       bool IsConst, bool IsTemporary,
398                                       bool IsMutable, bool IsVolatile,
399                                       const Expr *Init) {
400 
401   // Classes and structures.
402   if (const auto *RT = Ty->getAs<RecordType>()) {
403     if (const auto *Record = getOrCreateRecord(RT->getDecl()))
404       return allocateDescriptor(D, Record, MDSize, IsConst, IsTemporary,
405                                 IsMutable, IsVolatile);
406     return allocateDescriptor(D, MDSize);
407   }
408 
409   // Arrays.
410   if (const auto *ArrayType = Ty->getAsArrayTypeUnsafe()) {
411     QualType ElemTy = ArrayType->getElementType();
412     // Array of well-known bounds.
413     if (const auto *CAT = dyn_cast<ConstantArrayType>(ArrayType)) {
414       size_t NumElems = CAT->getZExtSize();
415       if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
416         // Arrays of primitives.
417         unsigned ElemSize = primSize(*T);
418         if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems) {
419           return {};
420         }
421         return allocateDescriptor(D, *T, MDSize, NumElems, IsConst, IsTemporary,
422                                   IsMutable);
423       } else {
424         // Arrays of composites. In this case, the array is a list of pointers,
425         // followed by the actual elements.
426         const Descriptor *ElemDesc = createDescriptor(
427             D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary);
428         if (!ElemDesc)
429           return nullptr;
430         unsigned ElemSize = ElemDesc->getAllocSize() + sizeof(InlineDescriptor);
431         if (std::numeric_limits<unsigned>::max() / ElemSize <= NumElems)
432           return {};
433         return allocateDescriptor(D, Ty, ElemDesc, MDSize, NumElems, IsConst,
434                                   IsTemporary, IsMutable);
435       }
436     }
437 
438     // Array of unknown bounds - cannot be accessed and pointer arithmetic
439     // is forbidden on pointers to such objects.
440     if (isa<IncompleteArrayType>(ArrayType) ||
441         isa<VariableArrayType>(ArrayType)) {
442       if (std::optional<PrimType> T = Ctx.classify(ElemTy)) {
443         return allocateDescriptor(D, *T, MDSize, IsConst, IsTemporary,
444                                   Descriptor::UnknownSize{});
445       } else {
446         const Descriptor *Desc = createDescriptor(
447             D, ElemTy.getTypePtr(), std::nullopt, IsConst, IsTemporary);
448         if (!Desc)
449           return nullptr;
450         return allocateDescriptor(D, Desc, MDSize, IsTemporary,
451                                   Descriptor::UnknownSize{});
452       }
453     }
454   }
455 
456   // Atomic types.
457   if (const auto *AT = Ty->getAs<AtomicType>()) {
458     const Type *InnerTy = AT->getValueType().getTypePtr();
459     return createDescriptor(D, InnerTy, MDSize, IsConst, IsTemporary,
460                             IsMutable);
461   }
462 
463   // Complex types - represented as arrays of elements.
464   if (const auto *CT = Ty->getAs<ComplexType>()) {
465     std::optional<PrimType> ElemTy = Ctx.classify(CT->getElementType());
466     if (!ElemTy)
467       return nullptr;
468 
469     return allocateDescriptor(D, *ElemTy, MDSize, 2, IsConst, IsTemporary,
470                               IsMutable);
471   }
472 
473   // Same with vector types.
474   if (const auto *VT = Ty->getAs<VectorType>()) {
475     std::optional<PrimType> ElemTy = Ctx.classify(VT->getElementType());
476     if (!ElemTy)
477       return nullptr;
478 
479     return allocateDescriptor(D, *ElemTy, MDSize, VT->getNumElements(), IsConst,
480                               IsTemporary, IsMutable);
481   }
482 
483   return nullptr;
484 }
485